github.com/mh-cbon/go@v0.0.0-20160603070303-9e112a3fe4c0/src/cmd/compile/internal/ssa/rewritegeneric.go (about) 1 // autogenerated from gen/generic.rules: do not edit! 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 8 var _ = math.MinInt8 // in case not otherwise used 9 func rewriteValuegeneric(v *Value, config *Config) bool { 10 switch v.Op { 11 case OpAdd16: 12 return rewriteValuegeneric_OpAdd16(v, config) 13 case OpAdd32: 14 return rewriteValuegeneric_OpAdd32(v, config) 15 case OpAdd32F: 16 return rewriteValuegeneric_OpAdd32F(v, config) 17 case OpAdd64: 18 return rewriteValuegeneric_OpAdd64(v, config) 19 case OpAdd64F: 20 return rewriteValuegeneric_OpAdd64F(v, config) 21 case OpAdd8: 22 return rewriteValuegeneric_OpAdd8(v, config) 23 case OpAddPtr: 24 return rewriteValuegeneric_OpAddPtr(v, config) 25 case OpAnd16: 26 return rewriteValuegeneric_OpAnd16(v, config) 27 case OpAnd32: 28 return rewriteValuegeneric_OpAnd32(v, config) 29 case OpAnd64: 30 return rewriteValuegeneric_OpAnd64(v, config) 31 case OpAnd8: 32 return rewriteValuegeneric_OpAnd8(v, config) 33 case OpArg: 34 return rewriteValuegeneric_OpArg(v, config) 35 case OpArrayIndex: 36 return rewriteValuegeneric_OpArrayIndex(v, config) 37 case OpCom16: 38 return rewriteValuegeneric_OpCom16(v, config) 39 case OpCom32: 40 return rewriteValuegeneric_OpCom32(v, config) 41 case OpCom64: 42 return rewriteValuegeneric_OpCom64(v, config) 43 case OpCom8: 44 return rewriteValuegeneric_OpCom8(v, config) 45 case OpConstInterface: 46 return rewriteValuegeneric_OpConstInterface(v, config) 47 case OpConstSlice: 48 return rewriteValuegeneric_OpConstSlice(v, config) 49 case OpConstString: 50 return rewriteValuegeneric_OpConstString(v, config) 51 case OpConvert: 52 return rewriteValuegeneric_OpConvert(v, config) 53 case OpCvt32Fto64F: 54 return rewriteValuegeneric_OpCvt32Fto64F(v, config) 55 case OpCvt64Fto32F: 56 return rewriteValuegeneric_OpCvt64Fto32F(v, config) 57 case OpDiv64: 58 return rewriteValuegeneric_OpDiv64(v, config) 59 case OpDiv64u: 60 return rewriteValuegeneric_OpDiv64u(v, config) 61 case OpEq16: 62 return rewriteValuegeneric_OpEq16(v, config) 63 case OpEq32: 64 return rewriteValuegeneric_OpEq32(v, config) 65 case OpEq64: 66 return rewriteValuegeneric_OpEq64(v, config) 67 case OpEq8: 68 return rewriteValuegeneric_OpEq8(v, config) 69 case OpEqB: 70 return rewriteValuegeneric_OpEqB(v, config) 71 case OpEqInter: 72 return rewriteValuegeneric_OpEqInter(v, config) 73 case OpEqPtr: 74 return rewriteValuegeneric_OpEqPtr(v, config) 75 case OpEqSlice: 76 return rewriteValuegeneric_OpEqSlice(v, config) 77 case OpGeq16: 78 return rewriteValuegeneric_OpGeq16(v, config) 79 case OpGeq16U: 80 return rewriteValuegeneric_OpGeq16U(v, config) 81 case OpGeq32: 82 return rewriteValuegeneric_OpGeq32(v, config) 83 case OpGeq32U: 84 return rewriteValuegeneric_OpGeq32U(v, config) 85 case OpGeq64: 86 return rewriteValuegeneric_OpGeq64(v, config) 87 case OpGeq64U: 88 return rewriteValuegeneric_OpGeq64U(v, config) 89 case OpGeq8: 90 return rewriteValuegeneric_OpGeq8(v, config) 91 case OpGeq8U: 92 return rewriteValuegeneric_OpGeq8U(v, config) 93 case OpGreater16: 94 return rewriteValuegeneric_OpGreater16(v, config) 95 case OpGreater16U: 96 return rewriteValuegeneric_OpGreater16U(v, config) 97 case OpGreater32: 98 return rewriteValuegeneric_OpGreater32(v, config) 99 case OpGreater32U: 100 return rewriteValuegeneric_OpGreater32U(v, config) 101 case OpGreater64: 102 return rewriteValuegeneric_OpGreater64(v, config) 103 case OpGreater64U: 104 return rewriteValuegeneric_OpGreater64U(v, config) 105 case OpGreater8: 106 return rewriteValuegeneric_OpGreater8(v, config) 107 case OpGreater8U: 108 return rewriteValuegeneric_OpGreater8U(v, config) 109 case OpIsInBounds: 110 return rewriteValuegeneric_OpIsInBounds(v, config) 111 case OpIsSliceInBounds: 112 return rewriteValuegeneric_OpIsSliceInBounds(v, config) 113 case OpLeq16: 114 return rewriteValuegeneric_OpLeq16(v, config) 115 case OpLeq16U: 116 return rewriteValuegeneric_OpLeq16U(v, config) 117 case OpLeq32: 118 return rewriteValuegeneric_OpLeq32(v, config) 119 case OpLeq32U: 120 return rewriteValuegeneric_OpLeq32U(v, config) 121 case OpLeq64: 122 return rewriteValuegeneric_OpLeq64(v, config) 123 case OpLeq64U: 124 return rewriteValuegeneric_OpLeq64U(v, config) 125 case OpLeq8: 126 return rewriteValuegeneric_OpLeq8(v, config) 127 case OpLeq8U: 128 return rewriteValuegeneric_OpLeq8U(v, config) 129 case OpLess16: 130 return rewriteValuegeneric_OpLess16(v, config) 131 case OpLess16U: 132 return rewriteValuegeneric_OpLess16U(v, config) 133 case OpLess32: 134 return rewriteValuegeneric_OpLess32(v, config) 135 case OpLess32U: 136 return rewriteValuegeneric_OpLess32U(v, config) 137 case OpLess64: 138 return rewriteValuegeneric_OpLess64(v, config) 139 case OpLess64U: 140 return rewriteValuegeneric_OpLess64U(v, config) 141 case OpLess8: 142 return rewriteValuegeneric_OpLess8(v, config) 143 case OpLess8U: 144 return rewriteValuegeneric_OpLess8U(v, config) 145 case OpLoad: 146 return rewriteValuegeneric_OpLoad(v, config) 147 case OpLsh16x16: 148 return rewriteValuegeneric_OpLsh16x16(v, config) 149 case OpLsh16x32: 150 return rewriteValuegeneric_OpLsh16x32(v, config) 151 case OpLsh16x64: 152 return rewriteValuegeneric_OpLsh16x64(v, config) 153 case OpLsh16x8: 154 return rewriteValuegeneric_OpLsh16x8(v, config) 155 case OpLsh32x16: 156 return rewriteValuegeneric_OpLsh32x16(v, config) 157 case OpLsh32x32: 158 return rewriteValuegeneric_OpLsh32x32(v, config) 159 case OpLsh32x64: 160 return rewriteValuegeneric_OpLsh32x64(v, config) 161 case OpLsh32x8: 162 return rewriteValuegeneric_OpLsh32x8(v, config) 163 case OpLsh64x16: 164 return rewriteValuegeneric_OpLsh64x16(v, config) 165 case OpLsh64x32: 166 return rewriteValuegeneric_OpLsh64x32(v, config) 167 case OpLsh64x64: 168 return rewriteValuegeneric_OpLsh64x64(v, config) 169 case OpLsh64x8: 170 return rewriteValuegeneric_OpLsh64x8(v, config) 171 case OpLsh8x16: 172 return rewriteValuegeneric_OpLsh8x16(v, config) 173 case OpLsh8x32: 174 return rewriteValuegeneric_OpLsh8x32(v, config) 175 case OpLsh8x64: 176 return rewriteValuegeneric_OpLsh8x64(v, config) 177 case OpLsh8x8: 178 return rewriteValuegeneric_OpLsh8x8(v, config) 179 case OpMod16: 180 return rewriteValuegeneric_OpMod16(v, config) 181 case OpMod16u: 182 return rewriteValuegeneric_OpMod16u(v, config) 183 case OpMod32: 184 return rewriteValuegeneric_OpMod32(v, config) 185 case OpMod32u: 186 return rewriteValuegeneric_OpMod32u(v, config) 187 case OpMod64: 188 return rewriteValuegeneric_OpMod64(v, config) 189 case OpMod64u: 190 return rewriteValuegeneric_OpMod64u(v, config) 191 case OpMod8: 192 return rewriteValuegeneric_OpMod8(v, config) 193 case OpMod8u: 194 return rewriteValuegeneric_OpMod8u(v, config) 195 case OpMul16: 196 return rewriteValuegeneric_OpMul16(v, config) 197 case OpMul32: 198 return rewriteValuegeneric_OpMul32(v, config) 199 case OpMul32F: 200 return rewriteValuegeneric_OpMul32F(v, config) 201 case OpMul64: 202 return rewriteValuegeneric_OpMul64(v, config) 203 case OpMul64F: 204 return rewriteValuegeneric_OpMul64F(v, config) 205 case OpMul8: 206 return rewriteValuegeneric_OpMul8(v, config) 207 case OpNeg16: 208 return rewriteValuegeneric_OpNeg16(v, config) 209 case OpNeg32: 210 return rewriteValuegeneric_OpNeg32(v, config) 211 case OpNeg64: 212 return rewriteValuegeneric_OpNeg64(v, config) 213 case OpNeg8: 214 return rewriteValuegeneric_OpNeg8(v, config) 215 case OpNeq16: 216 return rewriteValuegeneric_OpNeq16(v, config) 217 case OpNeq32: 218 return rewriteValuegeneric_OpNeq32(v, config) 219 case OpNeq64: 220 return rewriteValuegeneric_OpNeq64(v, config) 221 case OpNeq8: 222 return rewriteValuegeneric_OpNeq8(v, config) 223 case OpNeqB: 224 return rewriteValuegeneric_OpNeqB(v, config) 225 case OpNeqInter: 226 return rewriteValuegeneric_OpNeqInter(v, config) 227 case OpNeqPtr: 228 return rewriteValuegeneric_OpNeqPtr(v, config) 229 case OpNeqSlice: 230 return rewriteValuegeneric_OpNeqSlice(v, config) 231 case OpOffPtr: 232 return rewriteValuegeneric_OpOffPtr(v, config) 233 case OpOr16: 234 return rewriteValuegeneric_OpOr16(v, config) 235 case OpOr32: 236 return rewriteValuegeneric_OpOr32(v, config) 237 case OpOr64: 238 return rewriteValuegeneric_OpOr64(v, config) 239 case OpOr8: 240 return rewriteValuegeneric_OpOr8(v, config) 241 case OpPhi: 242 return rewriteValuegeneric_OpPhi(v, config) 243 case OpPtrIndex: 244 return rewriteValuegeneric_OpPtrIndex(v, config) 245 case OpRsh16Ux16: 246 return rewriteValuegeneric_OpRsh16Ux16(v, config) 247 case OpRsh16Ux32: 248 return rewriteValuegeneric_OpRsh16Ux32(v, config) 249 case OpRsh16Ux64: 250 return rewriteValuegeneric_OpRsh16Ux64(v, config) 251 case OpRsh16Ux8: 252 return rewriteValuegeneric_OpRsh16Ux8(v, config) 253 case OpRsh16x16: 254 return rewriteValuegeneric_OpRsh16x16(v, config) 255 case OpRsh16x32: 256 return rewriteValuegeneric_OpRsh16x32(v, config) 257 case OpRsh16x64: 258 return rewriteValuegeneric_OpRsh16x64(v, config) 259 case OpRsh16x8: 260 return rewriteValuegeneric_OpRsh16x8(v, config) 261 case OpRsh32Ux16: 262 return rewriteValuegeneric_OpRsh32Ux16(v, config) 263 case OpRsh32Ux32: 264 return rewriteValuegeneric_OpRsh32Ux32(v, config) 265 case OpRsh32Ux64: 266 return rewriteValuegeneric_OpRsh32Ux64(v, config) 267 case OpRsh32Ux8: 268 return rewriteValuegeneric_OpRsh32Ux8(v, config) 269 case OpRsh32x16: 270 return rewriteValuegeneric_OpRsh32x16(v, config) 271 case OpRsh32x32: 272 return rewriteValuegeneric_OpRsh32x32(v, config) 273 case OpRsh32x64: 274 return rewriteValuegeneric_OpRsh32x64(v, config) 275 case OpRsh32x8: 276 return rewriteValuegeneric_OpRsh32x8(v, config) 277 case OpRsh64Ux16: 278 return rewriteValuegeneric_OpRsh64Ux16(v, config) 279 case OpRsh64Ux32: 280 return rewriteValuegeneric_OpRsh64Ux32(v, config) 281 case OpRsh64Ux64: 282 return rewriteValuegeneric_OpRsh64Ux64(v, config) 283 case OpRsh64Ux8: 284 return rewriteValuegeneric_OpRsh64Ux8(v, config) 285 case OpRsh64x16: 286 return rewriteValuegeneric_OpRsh64x16(v, config) 287 case OpRsh64x32: 288 return rewriteValuegeneric_OpRsh64x32(v, config) 289 case OpRsh64x64: 290 return rewriteValuegeneric_OpRsh64x64(v, config) 291 case OpRsh64x8: 292 return rewriteValuegeneric_OpRsh64x8(v, config) 293 case OpRsh8Ux16: 294 return rewriteValuegeneric_OpRsh8Ux16(v, config) 295 case OpRsh8Ux32: 296 return rewriteValuegeneric_OpRsh8Ux32(v, config) 297 case OpRsh8Ux64: 298 return rewriteValuegeneric_OpRsh8Ux64(v, config) 299 case OpRsh8Ux8: 300 return rewriteValuegeneric_OpRsh8Ux8(v, config) 301 case OpRsh8x16: 302 return rewriteValuegeneric_OpRsh8x16(v, config) 303 case OpRsh8x32: 304 return rewriteValuegeneric_OpRsh8x32(v, config) 305 case OpRsh8x64: 306 return rewriteValuegeneric_OpRsh8x64(v, config) 307 case OpRsh8x8: 308 return rewriteValuegeneric_OpRsh8x8(v, config) 309 case OpSliceCap: 310 return rewriteValuegeneric_OpSliceCap(v, config) 311 case OpSliceLen: 312 return rewriteValuegeneric_OpSliceLen(v, config) 313 case OpSlicePtr: 314 return rewriteValuegeneric_OpSlicePtr(v, config) 315 case OpStore: 316 return rewriteValuegeneric_OpStore(v, config) 317 case OpStringLen: 318 return rewriteValuegeneric_OpStringLen(v, config) 319 case OpStringPtr: 320 return rewriteValuegeneric_OpStringPtr(v, config) 321 case OpStructSelect: 322 return rewriteValuegeneric_OpStructSelect(v, config) 323 case OpSub16: 324 return rewriteValuegeneric_OpSub16(v, config) 325 case OpSub32: 326 return rewriteValuegeneric_OpSub32(v, config) 327 case OpSub32F: 328 return rewriteValuegeneric_OpSub32F(v, config) 329 case OpSub64: 330 return rewriteValuegeneric_OpSub64(v, config) 331 case OpSub64F: 332 return rewriteValuegeneric_OpSub64F(v, config) 333 case OpSub8: 334 return rewriteValuegeneric_OpSub8(v, config) 335 case OpTrunc16to8: 336 return rewriteValuegeneric_OpTrunc16to8(v, config) 337 case OpTrunc32to16: 338 return rewriteValuegeneric_OpTrunc32to16(v, config) 339 case OpTrunc32to8: 340 return rewriteValuegeneric_OpTrunc32to8(v, config) 341 case OpTrunc64to16: 342 return rewriteValuegeneric_OpTrunc64to16(v, config) 343 case OpTrunc64to32: 344 return rewriteValuegeneric_OpTrunc64to32(v, config) 345 case OpTrunc64to8: 346 return rewriteValuegeneric_OpTrunc64to8(v, config) 347 case OpXor16: 348 return rewriteValuegeneric_OpXor16(v, config) 349 case OpXor32: 350 return rewriteValuegeneric_OpXor32(v, config) 351 case OpXor64: 352 return rewriteValuegeneric_OpXor64(v, config) 353 case OpXor8: 354 return rewriteValuegeneric_OpXor8(v, config) 355 } 356 return false 357 } 358 func rewriteValuegeneric_OpAdd16(v *Value, config *Config) bool { 359 b := v.Block 360 _ = b 361 // match: (Add16 (Const16 [c]) (Const16 [d])) 362 // cond: 363 // result: (Const16 [int64(int16(c+d))]) 364 for { 365 v_0 := v.Args[0] 366 if v_0.Op != OpConst16 { 367 break 368 } 369 c := v_0.AuxInt 370 v_1 := v.Args[1] 371 if v_1.Op != OpConst16 { 372 break 373 } 374 d := v_1.AuxInt 375 v.reset(OpConst16) 376 v.AuxInt = int64(int16(c + d)) 377 return true 378 } 379 // match: (Add16 x (Const16 <t> [c])) 380 // cond: x.Op != OpConst16 381 // result: (Add16 (Const16 <t> [c]) x) 382 for { 383 x := v.Args[0] 384 v_1 := v.Args[1] 385 if v_1.Op != OpConst16 { 386 break 387 } 388 t := v_1.Type 389 c := v_1.AuxInt 390 if !(x.Op != OpConst16) { 391 break 392 } 393 v.reset(OpAdd16) 394 v0 := b.NewValue0(v.Line, OpConst16, t) 395 v0.AuxInt = c 396 v.AddArg(v0) 397 v.AddArg(x) 398 return true 399 } 400 // match: (Add16 (Const16 [0]) x) 401 // cond: 402 // result: x 403 for { 404 v_0 := v.Args[0] 405 if v_0.Op != OpConst16 { 406 break 407 } 408 if v_0.AuxInt != 0 { 409 break 410 } 411 x := v.Args[1] 412 v.reset(OpCopy) 413 v.Type = x.Type 414 v.AddArg(x) 415 return true 416 } 417 return false 418 } 419 func rewriteValuegeneric_OpAdd32(v *Value, config *Config) bool { 420 b := v.Block 421 _ = b 422 // match: (Add32 (Const32 [c]) (Const32 [d])) 423 // cond: 424 // result: (Const32 [int64(int32(c+d))]) 425 for { 426 v_0 := v.Args[0] 427 if v_0.Op != OpConst32 { 428 break 429 } 430 c := v_0.AuxInt 431 v_1 := v.Args[1] 432 if v_1.Op != OpConst32 { 433 break 434 } 435 d := v_1.AuxInt 436 v.reset(OpConst32) 437 v.AuxInt = int64(int32(c + d)) 438 return true 439 } 440 // match: (Add32 x (Const32 <t> [c])) 441 // cond: x.Op != OpConst32 442 // result: (Add32 (Const32 <t> [c]) x) 443 for { 444 x := v.Args[0] 445 v_1 := v.Args[1] 446 if v_1.Op != OpConst32 { 447 break 448 } 449 t := v_1.Type 450 c := v_1.AuxInt 451 if !(x.Op != OpConst32) { 452 break 453 } 454 v.reset(OpAdd32) 455 v0 := b.NewValue0(v.Line, OpConst32, t) 456 v0.AuxInt = c 457 v.AddArg(v0) 458 v.AddArg(x) 459 return true 460 } 461 // match: (Add32 (Const32 [0]) x) 462 // cond: 463 // result: x 464 for { 465 v_0 := v.Args[0] 466 if v_0.Op != OpConst32 { 467 break 468 } 469 if v_0.AuxInt != 0 { 470 break 471 } 472 x := v.Args[1] 473 v.reset(OpCopy) 474 v.Type = x.Type 475 v.AddArg(x) 476 return true 477 } 478 return false 479 } 480 func rewriteValuegeneric_OpAdd32F(v *Value, config *Config) bool { 481 b := v.Block 482 _ = b 483 // match: (Add32F (Const32F [c]) (Const32F [d])) 484 // cond: 485 // result: (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) 486 for { 487 v_0 := v.Args[0] 488 if v_0.Op != OpConst32F { 489 break 490 } 491 c := v_0.AuxInt 492 v_1 := v.Args[1] 493 if v_1.Op != OpConst32F { 494 break 495 } 496 d := v_1.AuxInt 497 v.reset(OpConst32F) 498 v.AuxInt = f2i(float64(i2f32(c) + i2f32(d))) 499 return true 500 } 501 return false 502 } 503 func rewriteValuegeneric_OpAdd64(v *Value, config *Config) bool { 504 b := v.Block 505 _ = b 506 // match: (Add64 (Const64 [c]) (Const64 [d])) 507 // cond: 508 // result: (Const64 [c+d]) 509 for { 510 v_0 := v.Args[0] 511 if v_0.Op != OpConst64 { 512 break 513 } 514 c := v_0.AuxInt 515 v_1 := v.Args[1] 516 if v_1.Op != OpConst64 { 517 break 518 } 519 d := v_1.AuxInt 520 v.reset(OpConst64) 521 v.AuxInt = c + d 522 return true 523 } 524 // match: (Add64 x (Const64 <t> [c])) 525 // cond: x.Op != OpConst64 526 // result: (Add64 (Const64 <t> [c]) x) 527 for { 528 x := v.Args[0] 529 v_1 := v.Args[1] 530 if v_1.Op != OpConst64 { 531 break 532 } 533 t := v_1.Type 534 c := v_1.AuxInt 535 if !(x.Op != OpConst64) { 536 break 537 } 538 v.reset(OpAdd64) 539 v0 := b.NewValue0(v.Line, OpConst64, t) 540 v0.AuxInt = c 541 v.AddArg(v0) 542 v.AddArg(x) 543 return true 544 } 545 // match: (Add64 (Const64 [0]) x) 546 // cond: 547 // result: x 548 for { 549 v_0 := v.Args[0] 550 if v_0.Op != OpConst64 { 551 break 552 } 553 if v_0.AuxInt != 0 { 554 break 555 } 556 x := v.Args[1] 557 v.reset(OpCopy) 558 v.Type = x.Type 559 v.AddArg(x) 560 return true 561 } 562 return false 563 } 564 func rewriteValuegeneric_OpAdd64F(v *Value, config *Config) bool { 565 b := v.Block 566 _ = b 567 // match: (Add64F (Const64F [c]) (Const64F [d])) 568 // cond: 569 // result: (Const64F [f2i(i2f(c) + i2f(d))]) 570 for { 571 v_0 := v.Args[0] 572 if v_0.Op != OpConst64F { 573 break 574 } 575 c := v_0.AuxInt 576 v_1 := v.Args[1] 577 if v_1.Op != OpConst64F { 578 break 579 } 580 d := v_1.AuxInt 581 v.reset(OpConst64F) 582 v.AuxInt = f2i(i2f(c) + i2f(d)) 583 return true 584 } 585 return false 586 } 587 func rewriteValuegeneric_OpAdd8(v *Value, config *Config) bool { 588 b := v.Block 589 _ = b 590 // match: (Add8 (Const8 [c]) (Const8 [d])) 591 // cond: 592 // result: (Const8 [int64(int8(c+d))]) 593 for { 594 v_0 := v.Args[0] 595 if v_0.Op != OpConst8 { 596 break 597 } 598 c := v_0.AuxInt 599 v_1 := v.Args[1] 600 if v_1.Op != OpConst8 { 601 break 602 } 603 d := v_1.AuxInt 604 v.reset(OpConst8) 605 v.AuxInt = int64(int8(c + d)) 606 return true 607 } 608 // match: (Add8 x (Const8 <t> [c])) 609 // cond: x.Op != OpConst8 610 // result: (Add8 (Const8 <t> [c]) x) 611 for { 612 x := v.Args[0] 613 v_1 := v.Args[1] 614 if v_1.Op != OpConst8 { 615 break 616 } 617 t := v_1.Type 618 c := v_1.AuxInt 619 if !(x.Op != OpConst8) { 620 break 621 } 622 v.reset(OpAdd8) 623 v0 := b.NewValue0(v.Line, OpConst8, t) 624 v0.AuxInt = c 625 v.AddArg(v0) 626 v.AddArg(x) 627 return true 628 } 629 // match: (Add8 (Const8 [0]) x) 630 // cond: 631 // result: x 632 for { 633 v_0 := v.Args[0] 634 if v_0.Op != OpConst8 { 635 break 636 } 637 if v_0.AuxInt != 0 { 638 break 639 } 640 x := v.Args[1] 641 v.reset(OpCopy) 642 v.Type = x.Type 643 v.AddArg(x) 644 return true 645 } 646 return false 647 } 648 func rewriteValuegeneric_OpAddPtr(v *Value, config *Config) bool { 649 b := v.Block 650 _ = b 651 // match: (AddPtr <t> x (Const64 [c])) 652 // cond: 653 // result: (OffPtr <t> x [c]) 654 for { 655 t := v.Type 656 x := v.Args[0] 657 v_1 := v.Args[1] 658 if v_1.Op != OpConst64 { 659 break 660 } 661 c := v_1.AuxInt 662 v.reset(OpOffPtr) 663 v.Type = t 664 v.AddArg(x) 665 v.AuxInt = c 666 return true 667 } 668 return false 669 } 670 func rewriteValuegeneric_OpAnd16(v *Value, config *Config) bool { 671 b := v.Block 672 _ = b 673 // match: (And16 x (Const16 <t> [c])) 674 // cond: x.Op != OpConst16 675 // result: (And16 (Const16 <t> [c]) x) 676 for { 677 x := v.Args[0] 678 v_1 := v.Args[1] 679 if v_1.Op != OpConst16 { 680 break 681 } 682 t := v_1.Type 683 c := v_1.AuxInt 684 if !(x.Op != OpConst16) { 685 break 686 } 687 v.reset(OpAnd16) 688 v0 := b.NewValue0(v.Line, OpConst16, t) 689 v0.AuxInt = c 690 v.AddArg(v0) 691 v.AddArg(x) 692 return true 693 } 694 // match: (And16 x x) 695 // cond: 696 // result: x 697 for { 698 x := v.Args[0] 699 if x != v.Args[1] { 700 break 701 } 702 v.reset(OpCopy) 703 v.Type = x.Type 704 v.AddArg(x) 705 return true 706 } 707 // match: (And16 (Const16 [-1]) x) 708 // cond: 709 // result: x 710 for { 711 v_0 := v.Args[0] 712 if v_0.Op != OpConst16 { 713 break 714 } 715 if v_0.AuxInt != -1 { 716 break 717 } 718 x := v.Args[1] 719 v.reset(OpCopy) 720 v.Type = x.Type 721 v.AddArg(x) 722 return true 723 } 724 // match: (And16 (Const16 [0]) _) 725 // cond: 726 // result: (Const16 [0]) 727 for { 728 v_0 := v.Args[0] 729 if v_0.Op != OpConst16 { 730 break 731 } 732 if v_0.AuxInt != 0 { 733 break 734 } 735 v.reset(OpConst16) 736 v.AuxInt = 0 737 return true 738 } 739 // match: (And16 x (And16 x y)) 740 // cond: 741 // result: (And16 x y) 742 for { 743 x := v.Args[0] 744 v_1 := v.Args[1] 745 if v_1.Op != OpAnd16 { 746 break 747 } 748 if x != v_1.Args[0] { 749 break 750 } 751 y := v_1.Args[1] 752 v.reset(OpAnd16) 753 v.AddArg(x) 754 v.AddArg(y) 755 return true 756 } 757 // match: (And16 x (And16 y x)) 758 // cond: 759 // result: (And16 x y) 760 for { 761 x := v.Args[0] 762 v_1 := v.Args[1] 763 if v_1.Op != OpAnd16 { 764 break 765 } 766 y := v_1.Args[0] 767 if x != v_1.Args[1] { 768 break 769 } 770 v.reset(OpAnd16) 771 v.AddArg(x) 772 v.AddArg(y) 773 return true 774 } 775 // match: (And16 (And16 x y) x) 776 // cond: 777 // result: (And16 x y) 778 for { 779 v_0 := v.Args[0] 780 if v_0.Op != OpAnd16 { 781 break 782 } 783 x := v_0.Args[0] 784 y := v_0.Args[1] 785 if x != v.Args[1] { 786 break 787 } 788 v.reset(OpAnd16) 789 v.AddArg(x) 790 v.AddArg(y) 791 return true 792 } 793 // match: (And16 (And16 x y) y) 794 // cond: 795 // result: (And16 x y) 796 for { 797 v_0 := v.Args[0] 798 if v_0.Op != OpAnd16 { 799 break 800 } 801 x := v_0.Args[0] 802 y := v_0.Args[1] 803 if y != v.Args[1] { 804 break 805 } 806 v.reset(OpAnd16) 807 v.AddArg(x) 808 v.AddArg(y) 809 return true 810 } 811 return false 812 } 813 func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool { 814 b := v.Block 815 _ = b 816 // match: (And32 x (Const32 <t> [c])) 817 // cond: x.Op != OpConst32 818 // result: (And32 (Const32 <t> [c]) x) 819 for { 820 x := v.Args[0] 821 v_1 := v.Args[1] 822 if v_1.Op != OpConst32 { 823 break 824 } 825 t := v_1.Type 826 c := v_1.AuxInt 827 if !(x.Op != OpConst32) { 828 break 829 } 830 v.reset(OpAnd32) 831 v0 := b.NewValue0(v.Line, OpConst32, t) 832 v0.AuxInt = c 833 v.AddArg(v0) 834 v.AddArg(x) 835 return true 836 } 837 // match: (And32 x x) 838 // cond: 839 // result: x 840 for { 841 x := v.Args[0] 842 if x != v.Args[1] { 843 break 844 } 845 v.reset(OpCopy) 846 v.Type = x.Type 847 v.AddArg(x) 848 return true 849 } 850 // match: (And32 (Const32 [-1]) x) 851 // cond: 852 // result: x 853 for { 854 v_0 := v.Args[0] 855 if v_0.Op != OpConst32 { 856 break 857 } 858 if v_0.AuxInt != -1 { 859 break 860 } 861 x := v.Args[1] 862 v.reset(OpCopy) 863 v.Type = x.Type 864 v.AddArg(x) 865 return true 866 } 867 // match: (And32 (Const32 [0]) _) 868 // cond: 869 // result: (Const32 [0]) 870 for { 871 v_0 := v.Args[0] 872 if v_0.Op != OpConst32 { 873 break 874 } 875 if v_0.AuxInt != 0 { 876 break 877 } 878 v.reset(OpConst32) 879 v.AuxInt = 0 880 return true 881 } 882 // match: (And32 x (And32 x y)) 883 // cond: 884 // result: (And32 x y) 885 for { 886 x := v.Args[0] 887 v_1 := v.Args[1] 888 if v_1.Op != OpAnd32 { 889 break 890 } 891 if x != v_1.Args[0] { 892 break 893 } 894 y := v_1.Args[1] 895 v.reset(OpAnd32) 896 v.AddArg(x) 897 v.AddArg(y) 898 return true 899 } 900 // match: (And32 x (And32 y x)) 901 // cond: 902 // result: (And32 x y) 903 for { 904 x := v.Args[0] 905 v_1 := v.Args[1] 906 if v_1.Op != OpAnd32 { 907 break 908 } 909 y := v_1.Args[0] 910 if x != v_1.Args[1] { 911 break 912 } 913 v.reset(OpAnd32) 914 v.AddArg(x) 915 v.AddArg(y) 916 return true 917 } 918 // match: (And32 (And32 x y) x) 919 // cond: 920 // result: (And32 x y) 921 for { 922 v_0 := v.Args[0] 923 if v_0.Op != OpAnd32 { 924 break 925 } 926 x := v_0.Args[0] 927 y := v_0.Args[1] 928 if x != v.Args[1] { 929 break 930 } 931 v.reset(OpAnd32) 932 v.AddArg(x) 933 v.AddArg(y) 934 return true 935 } 936 // match: (And32 (And32 x y) y) 937 // cond: 938 // result: (And32 x y) 939 for { 940 v_0 := v.Args[0] 941 if v_0.Op != OpAnd32 { 942 break 943 } 944 x := v_0.Args[0] 945 y := v_0.Args[1] 946 if y != v.Args[1] { 947 break 948 } 949 v.reset(OpAnd32) 950 v.AddArg(x) 951 v.AddArg(y) 952 return true 953 } 954 return false 955 } 956 func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool { 957 b := v.Block 958 _ = b 959 // match: (And64 x (Const64 <t> [c])) 960 // cond: x.Op != OpConst64 961 // result: (And64 (Const64 <t> [c]) x) 962 for { 963 x := v.Args[0] 964 v_1 := v.Args[1] 965 if v_1.Op != OpConst64 { 966 break 967 } 968 t := v_1.Type 969 c := v_1.AuxInt 970 if !(x.Op != OpConst64) { 971 break 972 } 973 v.reset(OpAnd64) 974 v0 := b.NewValue0(v.Line, OpConst64, t) 975 v0.AuxInt = c 976 v.AddArg(v0) 977 v.AddArg(x) 978 return true 979 } 980 // match: (And64 x x) 981 // cond: 982 // result: x 983 for { 984 x := v.Args[0] 985 if x != v.Args[1] { 986 break 987 } 988 v.reset(OpCopy) 989 v.Type = x.Type 990 v.AddArg(x) 991 return true 992 } 993 // match: (And64 (Const64 [-1]) x) 994 // cond: 995 // result: x 996 for { 997 v_0 := v.Args[0] 998 if v_0.Op != OpConst64 { 999 break 1000 } 1001 if v_0.AuxInt != -1 { 1002 break 1003 } 1004 x := v.Args[1] 1005 v.reset(OpCopy) 1006 v.Type = x.Type 1007 v.AddArg(x) 1008 return true 1009 } 1010 // match: (And64 (Const64 [0]) _) 1011 // cond: 1012 // result: (Const64 [0]) 1013 for { 1014 v_0 := v.Args[0] 1015 if v_0.Op != OpConst64 { 1016 break 1017 } 1018 if v_0.AuxInt != 0 { 1019 break 1020 } 1021 v.reset(OpConst64) 1022 v.AuxInt = 0 1023 return true 1024 } 1025 // match: (And64 x (And64 x y)) 1026 // cond: 1027 // result: (And64 x y) 1028 for { 1029 x := v.Args[0] 1030 v_1 := v.Args[1] 1031 if v_1.Op != OpAnd64 { 1032 break 1033 } 1034 if x != v_1.Args[0] { 1035 break 1036 } 1037 y := v_1.Args[1] 1038 v.reset(OpAnd64) 1039 v.AddArg(x) 1040 v.AddArg(y) 1041 return true 1042 } 1043 // match: (And64 x (And64 y x)) 1044 // cond: 1045 // result: (And64 x y) 1046 for { 1047 x := v.Args[0] 1048 v_1 := v.Args[1] 1049 if v_1.Op != OpAnd64 { 1050 break 1051 } 1052 y := v_1.Args[0] 1053 if x != v_1.Args[1] { 1054 break 1055 } 1056 v.reset(OpAnd64) 1057 v.AddArg(x) 1058 v.AddArg(y) 1059 return true 1060 } 1061 // match: (And64 (And64 x y) x) 1062 // cond: 1063 // result: (And64 x y) 1064 for { 1065 v_0 := v.Args[0] 1066 if v_0.Op != OpAnd64 { 1067 break 1068 } 1069 x := v_0.Args[0] 1070 y := v_0.Args[1] 1071 if x != v.Args[1] { 1072 break 1073 } 1074 v.reset(OpAnd64) 1075 v.AddArg(x) 1076 v.AddArg(y) 1077 return true 1078 } 1079 // match: (And64 (And64 x y) y) 1080 // cond: 1081 // result: (And64 x y) 1082 for { 1083 v_0 := v.Args[0] 1084 if v_0.Op != OpAnd64 { 1085 break 1086 } 1087 x := v_0.Args[0] 1088 y := v_0.Args[1] 1089 if y != v.Args[1] { 1090 break 1091 } 1092 v.reset(OpAnd64) 1093 v.AddArg(x) 1094 v.AddArg(y) 1095 return true 1096 } 1097 // match: (And64 <t> (Const64 [y]) x) 1098 // cond: nlz(y) + nto(y) == 64 && nto(y) >= 32 1099 // result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)])) 1100 for { 1101 t := v.Type 1102 v_0 := v.Args[0] 1103 if v_0.Op != OpConst64 { 1104 break 1105 } 1106 y := v_0.AuxInt 1107 x := v.Args[1] 1108 if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) { 1109 break 1110 } 1111 v.reset(OpRsh64Ux64) 1112 v0 := b.NewValue0(v.Line, OpLsh64x64, t) 1113 v0.AddArg(x) 1114 v1 := b.NewValue0(v.Line, OpConst64, t) 1115 v1.AuxInt = nlz(y) 1116 v0.AddArg(v1) 1117 v.AddArg(v0) 1118 v2 := b.NewValue0(v.Line, OpConst64, t) 1119 v2.AuxInt = nlz(y) 1120 v.AddArg(v2) 1121 return true 1122 } 1123 // match: (And64 <t> (Const64 [y]) x) 1124 // cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32 1125 // result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)])) 1126 for { 1127 t := v.Type 1128 v_0 := v.Args[0] 1129 if v_0.Op != OpConst64 { 1130 break 1131 } 1132 y := v_0.AuxInt 1133 x := v.Args[1] 1134 if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) { 1135 break 1136 } 1137 v.reset(OpLsh64x64) 1138 v0 := b.NewValue0(v.Line, OpRsh64Ux64, t) 1139 v0.AddArg(x) 1140 v1 := b.NewValue0(v.Line, OpConst64, t) 1141 v1.AuxInt = ntz(y) 1142 v0.AddArg(v1) 1143 v.AddArg(v0) 1144 v2 := b.NewValue0(v.Line, OpConst64, t) 1145 v2.AuxInt = ntz(y) 1146 v.AddArg(v2) 1147 return true 1148 } 1149 return false 1150 } 1151 func rewriteValuegeneric_OpAnd8(v *Value, config *Config) bool { 1152 b := v.Block 1153 _ = b 1154 // match: (And8 x (Const8 <t> [c])) 1155 // cond: x.Op != OpConst8 1156 // result: (And8 (Const8 <t> [c]) x) 1157 for { 1158 x := v.Args[0] 1159 v_1 := v.Args[1] 1160 if v_1.Op != OpConst8 { 1161 break 1162 } 1163 t := v_1.Type 1164 c := v_1.AuxInt 1165 if !(x.Op != OpConst8) { 1166 break 1167 } 1168 v.reset(OpAnd8) 1169 v0 := b.NewValue0(v.Line, OpConst8, t) 1170 v0.AuxInt = c 1171 v.AddArg(v0) 1172 v.AddArg(x) 1173 return true 1174 } 1175 // match: (And8 x x) 1176 // cond: 1177 // result: x 1178 for { 1179 x := v.Args[0] 1180 if x != v.Args[1] { 1181 break 1182 } 1183 v.reset(OpCopy) 1184 v.Type = x.Type 1185 v.AddArg(x) 1186 return true 1187 } 1188 // match: (And8 (Const8 [-1]) x) 1189 // cond: 1190 // result: x 1191 for { 1192 v_0 := v.Args[0] 1193 if v_0.Op != OpConst8 { 1194 break 1195 } 1196 if v_0.AuxInt != -1 { 1197 break 1198 } 1199 x := v.Args[1] 1200 v.reset(OpCopy) 1201 v.Type = x.Type 1202 v.AddArg(x) 1203 return true 1204 } 1205 // match: (And8 (Const8 [0]) _) 1206 // cond: 1207 // result: (Const8 [0]) 1208 for { 1209 v_0 := v.Args[0] 1210 if v_0.Op != OpConst8 { 1211 break 1212 } 1213 if v_0.AuxInt != 0 { 1214 break 1215 } 1216 v.reset(OpConst8) 1217 v.AuxInt = 0 1218 return true 1219 } 1220 // match: (And8 x (And8 x y)) 1221 // cond: 1222 // result: (And8 x y) 1223 for { 1224 x := v.Args[0] 1225 v_1 := v.Args[1] 1226 if v_1.Op != OpAnd8 { 1227 break 1228 } 1229 if x != v_1.Args[0] { 1230 break 1231 } 1232 y := v_1.Args[1] 1233 v.reset(OpAnd8) 1234 v.AddArg(x) 1235 v.AddArg(y) 1236 return true 1237 } 1238 // match: (And8 x (And8 y x)) 1239 // cond: 1240 // result: (And8 x y) 1241 for { 1242 x := v.Args[0] 1243 v_1 := v.Args[1] 1244 if v_1.Op != OpAnd8 { 1245 break 1246 } 1247 y := v_1.Args[0] 1248 if x != v_1.Args[1] { 1249 break 1250 } 1251 v.reset(OpAnd8) 1252 v.AddArg(x) 1253 v.AddArg(y) 1254 return true 1255 } 1256 // match: (And8 (And8 x y) x) 1257 // cond: 1258 // result: (And8 x y) 1259 for { 1260 v_0 := v.Args[0] 1261 if v_0.Op != OpAnd8 { 1262 break 1263 } 1264 x := v_0.Args[0] 1265 y := v_0.Args[1] 1266 if x != v.Args[1] { 1267 break 1268 } 1269 v.reset(OpAnd8) 1270 v.AddArg(x) 1271 v.AddArg(y) 1272 return true 1273 } 1274 // match: (And8 (And8 x y) y) 1275 // cond: 1276 // result: (And8 x y) 1277 for { 1278 v_0 := v.Args[0] 1279 if v_0.Op != OpAnd8 { 1280 break 1281 } 1282 x := v_0.Args[0] 1283 y := v_0.Args[1] 1284 if y != v.Args[1] { 1285 break 1286 } 1287 v.reset(OpAnd8) 1288 v.AddArg(x) 1289 v.AddArg(y) 1290 return true 1291 } 1292 return false 1293 } 1294 func rewriteValuegeneric_OpArg(v *Value, config *Config) bool { 1295 b := v.Block 1296 _ = b 1297 // match: (Arg {n} [off]) 1298 // cond: v.Type.IsString() 1299 // result: (StringMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])) 1300 for { 1301 n := v.Aux 1302 off := v.AuxInt 1303 if !(v.Type.IsString()) { 1304 break 1305 } 1306 v.reset(OpStringMake) 1307 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1308 v0.Aux = n 1309 v0.AuxInt = off 1310 v.AddArg(v0) 1311 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1312 v1.Aux = n 1313 v1.AuxInt = off + config.PtrSize 1314 v.AddArg(v1) 1315 return true 1316 } 1317 // match: (Arg {n} [off]) 1318 // cond: v.Type.IsSlice() 1319 // result: (SliceMake (Arg <v.Type.ElemType().PtrTo()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]) (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize])) 1320 for { 1321 n := v.Aux 1322 off := v.AuxInt 1323 if !(v.Type.IsSlice()) { 1324 break 1325 } 1326 v.reset(OpSliceMake) 1327 v0 := b.NewValue0(v.Line, OpArg, v.Type.ElemType().PtrTo()) 1328 v0.Aux = n 1329 v0.AuxInt = off 1330 v.AddArg(v0) 1331 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1332 v1.Aux = n 1333 v1.AuxInt = off + config.PtrSize 1334 v.AddArg(v1) 1335 v2 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1336 v2.Aux = n 1337 v2.AuxInt = off + 2*config.PtrSize 1338 v.AddArg(v2) 1339 return true 1340 } 1341 // match: (Arg {n} [off]) 1342 // cond: v.Type.IsInterface() 1343 // result: (IMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize])) 1344 for { 1345 n := v.Aux 1346 off := v.AuxInt 1347 if !(v.Type.IsInterface()) { 1348 break 1349 } 1350 v.reset(OpIMake) 1351 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1352 v0.Aux = n 1353 v0.AuxInt = off 1354 v.AddArg(v0) 1355 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1356 v1.Aux = n 1357 v1.AuxInt = off + config.PtrSize 1358 v.AddArg(v1) 1359 return true 1360 } 1361 // match: (Arg {n} [off]) 1362 // cond: v.Type.IsComplex() && v.Type.Size() == 16 1363 // result: (ComplexMake (Arg <config.fe.TypeFloat64()> {n} [off]) (Arg <config.fe.TypeFloat64()> {n} [off+8])) 1364 for { 1365 n := v.Aux 1366 off := v.AuxInt 1367 if !(v.Type.IsComplex() && v.Type.Size() == 16) { 1368 break 1369 } 1370 v.reset(OpComplexMake) 1371 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) 1372 v0.Aux = n 1373 v0.AuxInt = off 1374 v.AddArg(v0) 1375 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) 1376 v1.Aux = n 1377 v1.AuxInt = off + 8 1378 v.AddArg(v1) 1379 return true 1380 } 1381 // match: (Arg {n} [off]) 1382 // cond: v.Type.IsComplex() && v.Type.Size() == 8 1383 // result: (ComplexMake (Arg <config.fe.TypeFloat32()> {n} [off]) (Arg <config.fe.TypeFloat32()> {n} [off+4])) 1384 for { 1385 n := v.Aux 1386 off := v.AuxInt 1387 if !(v.Type.IsComplex() && v.Type.Size() == 8) { 1388 break 1389 } 1390 v.reset(OpComplexMake) 1391 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) 1392 v0.Aux = n 1393 v0.AuxInt = off 1394 v.AddArg(v0) 1395 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) 1396 v1.Aux = n 1397 v1.AuxInt = off + 4 1398 v.AddArg(v1) 1399 return true 1400 } 1401 // match: (Arg <t>) 1402 // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) 1403 // result: (StructMake0) 1404 for { 1405 t := v.Type 1406 if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { 1407 break 1408 } 1409 v.reset(OpStructMake0) 1410 return true 1411 } 1412 // match: (Arg <t> {n} [off]) 1413 // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) 1414 // result: (StructMake1 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])) 1415 for { 1416 t := v.Type 1417 n := v.Aux 1418 off := v.AuxInt 1419 if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { 1420 break 1421 } 1422 v.reset(OpStructMake1) 1423 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1424 v0.Aux = n 1425 v0.AuxInt = off + t.FieldOff(0) 1426 v.AddArg(v0) 1427 return true 1428 } 1429 // match: (Arg <t> {n} [off]) 1430 // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) 1431 // result: (StructMake2 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])) 1432 for { 1433 t := v.Type 1434 n := v.Aux 1435 off := v.AuxInt 1436 if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { 1437 break 1438 } 1439 v.reset(OpStructMake2) 1440 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1441 v0.Aux = n 1442 v0.AuxInt = off + t.FieldOff(0) 1443 v.AddArg(v0) 1444 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1445 v1.Aux = n 1446 v1.AuxInt = off + t.FieldOff(1) 1447 v.AddArg(v1) 1448 return true 1449 } 1450 // match: (Arg <t> {n} [off]) 1451 // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) 1452 // result: (StructMake3 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])) 1453 for { 1454 t := v.Type 1455 n := v.Aux 1456 off := v.AuxInt 1457 if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { 1458 break 1459 } 1460 v.reset(OpStructMake3) 1461 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1462 v0.Aux = n 1463 v0.AuxInt = off + t.FieldOff(0) 1464 v.AddArg(v0) 1465 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1466 v1.Aux = n 1467 v1.AuxInt = off + t.FieldOff(1) 1468 v.AddArg(v1) 1469 v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) 1470 v2.Aux = n 1471 v2.AuxInt = off + t.FieldOff(2) 1472 v.AddArg(v2) 1473 return true 1474 } 1475 // match: (Arg <t> {n} [off]) 1476 // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) 1477 // result: (StructMake4 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]) (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)])) 1478 for { 1479 t := v.Type 1480 n := v.Aux 1481 off := v.AuxInt 1482 if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { 1483 break 1484 } 1485 v.reset(OpStructMake4) 1486 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1487 v0.Aux = n 1488 v0.AuxInt = off + t.FieldOff(0) 1489 v.AddArg(v0) 1490 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1491 v1.Aux = n 1492 v1.AuxInt = off + t.FieldOff(1) 1493 v.AddArg(v1) 1494 v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) 1495 v2.Aux = n 1496 v2.AuxInt = off + t.FieldOff(2) 1497 v.AddArg(v2) 1498 v3 := b.NewValue0(v.Line, OpArg, t.FieldType(3)) 1499 v3.Aux = n 1500 v3.AuxInt = off + t.FieldOff(3) 1501 v.AddArg(v3) 1502 return true 1503 } 1504 return false 1505 } 1506 func rewriteValuegeneric_OpArrayIndex(v *Value, config *Config) bool { 1507 b := v.Block 1508 _ = b 1509 // match: (ArrayIndex <t> [0] x:(Load ptr mem)) 1510 // cond: 1511 // result: @x.Block (Load <t> ptr mem) 1512 for { 1513 t := v.Type 1514 if v.AuxInt != 0 { 1515 break 1516 } 1517 x := v.Args[0] 1518 if x.Op != OpLoad { 1519 break 1520 } 1521 ptr := x.Args[0] 1522 mem := x.Args[1] 1523 b = x.Block 1524 v0 := b.NewValue0(v.Line, OpLoad, t) 1525 v.reset(OpCopy) 1526 v.AddArg(v0) 1527 v0.AddArg(ptr) 1528 v0.AddArg(mem) 1529 return true 1530 } 1531 return false 1532 } 1533 func rewriteValuegeneric_OpCom16(v *Value, config *Config) bool { 1534 b := v.Block 1535 _ = b 1536 // match: (Com16 (Com16 x)) 1537 // cond: 1538 // result: x 1539 for { 1540 v_0 := v.Args[0] 1541 if v_0.Op != OpCom16 { 1542 break 1543 } 1544 x := v_0.Args[0] 1545 v.reset(OpCopy) 1546 v.Type = x.Type 1547 v.AddArg(x) 1548 return true 1549 } 1550 return false 1551 } 1552 func rewriteValuegeneric_OpCom32(v *Value, config *Config) bool { 1553 b := v.Block 1554 _ = b 1555 // match: (Com32 (Com32 x)) 1556 // cond: 1557 // result: x 1558 for { 1559 v_0 := v.Args[0] 1560 if v_0.Op != OpCom32 { 1561 break 1562 } 1563 x := v_0.Args[0] 1564 v.reset(OpCopy) 1565 v.Type = x.Type 1566 v.AddArg(x) 1567 return true 1568 } 1569 return false 1570 } 1571 func rewriteValuegeneric_OpCom64(v *Value, config *Config) bool { 1572 b := v.Block 1573 _ = b 1574 // match: (Com64 (Com64 x)) 1575 // cond: 1576 // result: x 1577 for { 1578 v_0 := v.Args[0] 1579 if v_0.Op != OpCom64 { 1580 break 1581 } 1582 x := v_0.Args[0] 1583 v.reset(OpCopy) 1584 v.Type = x.Type 1585 v.AddArg(x) 1586 return true 1587 } 1588 return false 1589 } 1590 func rewriteValuegeneric_OpCom8(v *Value, config *Config) bool { 1591 b := v.Block 1592 _ = b 1593 // match: (Com8 (Com8 x)) 1594 // cond: 1595 // result: x 1596 for { 1597 v_0 := v.Args[0] 1598 if v_0.Op != OpCom8 { 1599 break 1600 } 1601 x := v_0.Args[0] 1602 v.reset(OpCopy) 1603 v.Type = x.Type 1604 v.AddArg(x) 1605 return true 1606 } 1607 return false 1608 } 1609 func rewriteValuegeneric_OpConstInterface(v *Value, config *Config) bool { 1610 b := v.Block 1611 _ = b 1612 // match: (ConstInterface) 1613 // cond: 1614 // result: (IMake (ConstNil <config.fe.TypeBytePtr()>) (ConstNil <config.fe.TypeBytePtr()>)) 1615 for { 1616 v.reset(OpIMake) 1617 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1618 v.AddArg(v0) 1619 v1 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1620 v.AddArg(v1) 1621 return true 1622 } 1623 } 1624 func rewriteValuegeneric_OpConstSlice(v *Value, config *Config) bool { 1625 b := v.Block 1626 _ = b 1627 // match: (ConstSlice) 1628 // cond: config.PtrSize == 4 1629 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const32 <config.fe.TypeInt()> [0]) (Const32 <config.fe.TypeInt()> [0])) 1630 for { 1631 if !(config.PtrSize == 4) { 1632 break 1633 } 1634 v.reset(OpSliceMake) 1635 v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) 1636 v.AddArg(v0) 1637 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1638 v1.AuxInt = 0 1639 v.AddArg(v1) 1640 v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1641 v2.AuxInt = 0 1642 v.AddArg(v2) 1643 return true 1644 } 1645 // match: (ConstSlice) 1646 // cond: config.PtrSize == 8 1647 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const64 <config.fe.TypeInt()> [0]) (Const64 <config.fe.TypeInt()> [0])) 1648 for { 1649 if !(config.PtrSize == 8) { 1650 break 1651 } 1652 v.reset(OpSliceMake) 1653 v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) 1654 v.AddArg(v0) 1655 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1656 v1.AuxInt = 0 1657 v.AddArg(v1) 1658 v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1659 v2.AuxInt = 0 1660 v.AddArg(v2) 1661 return true 1662 } 1663 return false 1664 } 1665 func rewriteValuegeneric_OpConstString(v *Value, config *Config) bool { 1666 b := v.Block 1667 _ = b 1668 // match: (ConstString {s}) 1669 // cond: config.PtrSize == 4 && s.(string) == "" 1670 // result: (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0])) 1671 for { 1672 s := v.Aux 1673 if !(config.PtrSize == 4 && s.(string) == "") { 1674 break 1675 } 1676 v.reset(OpStringMake) 1677 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1678 v.AddArg(v0) 1679 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1680 v1.AuxInt = 0 1681 v.AddArg(v1) 1682 return true 1683 } 1684 // match: (ConstString {s}) 1685 // cond: config.PtrSize == 8 && s.(string) == "" 1686 // result: (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0])) 1687 for { 1688 s := v.Aux 1689 if !(config.PtrSize == 8 && s.(string) == "") { 1690 break 1691 } 1692 v.reset(OpStringMake) 1693 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1694 v.AddArg(v0) 1695 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1696 v1.AuxInt = 0 1697 v.AddArg(v1) 1698 return true 1699 } 1700 // match: (ConstString {s}) 1701 // cond: config.PtrSize == 4 && s.(string) != "" 1702 // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))])) 1703 for { 1704 s := v.Aux 1705 if !(config.PtrSize == 4 && s.(string) != "") { 1706 break 1707 } 1708 v.reset(OpStringMake) 1709 v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) 1710 v0.Aux = config.fe.StringData(s.(string)) 1711 v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) 1712 v0.AddArg(v1) 1713 v.AddArg(v0) 1714 v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1715 v2.AuxInt = int64(len(s.(string))) 1716 v.AddArg(v2) 1717 return true 1718 } 1719 // match: (ConstString {s}) 1720 // cond: config.PtrSize == 8 && s.(string) != "" 1721 // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))])) 1722 for { 1723 s := v.Aux 1724 if !(config.PtrSize == 8 && s.(string) != "") { 1725 break 1726 } 1727 v.reset(OpStringMake) 1728 v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) 1729 v0.Aux = config.fe.StringData(s.(string)) 1730 v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) 1731 v0.AddArg(v1) 1732 v.AddArg(v0) 1733 v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1734 v2.AuxInt = int64(len(s.(string))) 1735 v.AddArg(v2) 1736 return true 1737 } 1738 return false 1739 } 1740 func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool { 1741 b := v.Block 1742 _ = b 1743 // match: (Convert (Add64 (Convert ptr mem) off) mem) 1744 // cond: 1745 // result: (Add64 ptr off) 1746 for { 1747 v_0 := v.Args[0] 1748 if v_0.Op != OpAdd64 { 1749 break 1750 } 1751 v_0_0 := v_0.Args[0] 1752 if v_0_0.Op != OpConvert { 1753 break 1754 } 1755 ptr := v_0_0.Args[0] 1756 mem := v_0_0.Args[1] 1757 off := v_0.Args[1] 1758 if mem != v.Args[1] { 1759 break 1760 } 1761 v.reset(OpAdd64) 1762 v.AddArg(ptr) 1763 v.AddArg(off) 1764 return true 1765 } 1766 // match: (Convert (Add64 off (Convert ptr mem)) mem) 1767 // cond: 1768 // result: (Add64 ptr off) 1769 for { 1770 v_0 := v.Args[0] 1771 if v_0.Op != OpAdd64 { 1772 break 1773 } 1774 off := v_0.Args[0] 1775 v_0_1 := v_0.Args[1] 1776 if v_0_1.Op != OpConvert { 1777 break 1778 } 1779 ptr := v_0_1.Args[0] 1780 mem := v_0_1.Args[1] 1781 if mem != v.Args[1] { 1782 break 1783 } 1784 v.reset(OpAdd64) 1785 v.AddArg(ptr) 1786 v.AddArg(off) 1787 return true 1788 } 1789 // match: (Convert (Convert ptr mem) mem) 1790 // cond: 1791 // result: ptr 1792 for { 1793 v_0 := v.Args[0] 1794 if v_0.Op != OpConvert { 1795 break 1796 } 1797 ptr := v_0.Args[0] 1798 mem := v_0.Args[1] 1799 if mem != v.Args[1] { 1800 break 1801 } 1802 v.reset(OpCopy) 1803 v.Type = ptr.Type 1804 v.AddArg(ptr) 1805 return true 1806 } 1807 return false 1808 } 1809 func rewriteValuegeneric_OpCvt32Fto64F(v *Value, config *Config) bool { 1810 b := v.Block 1811 _ = b 1812 // match: (Cvt32Fto64F (Const32F [c])) 1813 // cond: 1814 // result: (Const64F [c]) 1815 for { 1816 v_0 := v.Args[0] 1817 if v_0.Op != OpConst32F { 1818 break 1819 } 1820 c := v_0.AuxInt 1821 v.reset(OpConst64F) 1822 v.AuxInt = c 1823 return true 1824 } 1825 return false 1826 } 1827 func rewriteValuegeneric_OpCvt64Fto32F(v *Value, config *Config) bool { 1828 b := v.Block 1829 _ = b 1830 // match: (Cvt64Fto32F (Const64F [c])) 1831 // cond: 1832 // result: (Const32F [f2i(float64(i2f32(c)))]) 1833 for { 1834 v_0 := v.Args[0] 1835 if v_0.Op != OpConst64F { 1836 break 1837 } 1838 c := v_0.AuxInt 1839 v.reset(OpConst32F) 1840 v.AuxInt = f2i(float64(i2f32(c))) 1841 return true 1842 } 1843 return false 1844 } 1845 func rewriteValuegeneric_OpDiv64(v *Value, config *Config) bool { 1846 b := v.Block 1847 _ = b 1848 // match: (Div64 <t> x (Const64 [c])) 1849 // cond: c > 0 && smagic64ok(c) && smagic64m(c) > 0 1850 // result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) 1851 for { 1852 t := v.Type 1853 x := v.Args[0] 1854 v_1 := v.Args[1] 1855 if v_1.Op != OpConst64 { 1856 break 1857 } 1858 c := v_1.AuxInt 1859 if !(c > 0 && smagic64ok(c) && smagic64m(c) > 0) { 1860 break 1861 } 1862 v.reset(OpSub64) 1863 v.Type = t 1864 v0 := b.NewValue0(v.Line, OpRsh64x64, t) 1865 v1 := b.NewValue0(v.Line, OpHmul64, t) 1866 v2 := b.NewValue0(v.Line, OpConst64, t) 1867 v2.AuxInt = smagic64m(c) 1868 v1.AddArg(v2) 1869 v1.AddArg(x) 1870 v0.AddArg(v1) 1871 v3 := b.NewValue0(v.Line, OpConst64, t) 1872 v3.AuxInt = smagic64s(c) 1873 v0.AddArg(v3) 1874 v.AddArg(v0) 1875 v4 := b.NewValue0(v.Line, OpRsh64x64, t) 1876 v4.AddArg(x) 1877 v5 := b.NewValue0(v.Line, OpConst64, t) 1878 v5.AuxInt = 63 1879 v4.AddArg(v5) 1880 v.AddArg(v4) 1881 return true 1882 } 1883 // match: (Div64 <t> x (Const64 [c])) 1884 // cond: c > 0 && smagic64ok(c) && smagic64m(c) < 0 1885 // result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) 1886 for { 1887 t := v.Type 1888 x := v.Args[0] 1889 v_1 := v.Args[1] 1890 if v_1.Op != OpConst64 { 1891 break 1892 } 1893 c := v_1.AuxInt 1894 if !(c > 0 && smagic64ok(c) && smagic64m(c) < 0) { 1895 break 1896 } 1897 v.reset(OpSub64) 1898 v.Type = t 1899 v0 := b.NewValue0(v.Line, OpRsh64x64, t) 1900 v1 := b.NewValue0(v.Line, OpAdd64, t) 1901 v2 := b.NewValue0(v.Line, OpHmul64, t) 1902 v3 := b.NewValue0(v.Line, OpConst64, t) 1903 v3.AuxInt = smagic64m(c) 1904 v2.AddArg(v3) 1905 v2.AddArg(x) 1906 v1.AddArg(v2) 1907 v1.AddArg(x) 1908 v0.AddArg(v1) 1909 v4 := b.NewValue0(v.Line, OpConst64, t) 1910 v4.AuxInt = smagic64s(c) 1911 v0.AddArg(v4) 1912 v.AddArg(v0) 1913 v5 := b.NewValue0(v.Line, OpRsh64x64, t) 1914 v5.AddArg(x) 1915 v6 := b.NewValue0(v.Line, OpConst64, t) 1916 v6.AuxInt = 63 1917 v5.AddArg(v6) 1918 v.AddArg(v5) 1919 return true 1920 } 1921 // match: (Div64 <t> x (Const64 [c])) 1922 // cond: c < 0 && smagic64ok(c) && smagic64m(c) > 0 1923 // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) 1924 for { 1925 t := v.Type 1926 x := v.Args[0] 1927 v_1 := v.Args[1] 1928 if v_1.Op != OpConst64 { 1929 break 1930 } 1931 c := v_1.AuxInt 1932 if !(c < 0 && smagic64ok(c) && smagic64m(c) > 0) { 1933 break 1934 } 1935 v.reset(OpNeg64) 1936 v.Type = t 1937 v0 := b.NewValue0(v.Line, OpSub64, t) 1938 v1 := b.NewValue0(v.Line, OpRsh64x64, t) 1939 v2 := b.NewValue0(v.Line, OpHmul64, t) 1940 v3 := b.NewValue0(v.Line, OpConst64, t) 1941 v3.AuxInt = smagic64m(c) 1942 v2.AddArg(v3) 1943 v2.AddArg(x) 1944 v1.AddArg(v2) 1945 v4 := b.NewValue0(v.Line, OpConst64, t) 1946 v4.AuxInt = smagic64s(c) 1947 v1.AddArg(v4) 1948 v0.AddArg(v1) 1949 v5 := b.NewValue0(v.Line, OpRsh64x64, t) 1950 v5.AddArg(x) 1951 v6 := b.NewValue0(v.Line, OpConst64, t) 1952 v6.AuxInt = 63 1953 v5.AddArg(v6) 1954 v0.AddArg(v5) 1955 v.AddArg(v0) 1956 return true 1957 } 1958 // match: (Div64 <t> x (Const64 [c])) 1959 // cond: c < 0 && smagic64ok(c) && smagic64m(c) < 0 1960 // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) 1961 for { 1962 t := v.Type 1963 x := v.Args[0] 1964 v_1 := v.Args[1] 1965 if v_1.Op != OpConst64 { 1966 break 1967 } 1968 c := v_1.AuxInt 1969 if !(c < 0 && smagic64ok(c) && smagic64m(c) < 0) { 1970 break 1971 } 1972 v.reset(OpNeg64) 1973 v.Type = t 1974 v0 := b.NewValue0(v.Line, OpSub64, t) 1975 v1 := b.NewValue0(v.Line, OpRsh64x64, t) 1976 v2 := b.NewValue0(v.Line, OpAdd64, t) 1977 v3 := b.NewValue0(v.Line, OpHmul64, t) 1978 v4 := b.NewValue0(v.Line, OpConst64, t) 1979 v4.AuxInt = smagic64m(c) 1980 v3.AddArg(v4) 1981 v3.AddArg(x) 1982 v2.AddArg(v3) 1983 v2.AddArg(x) 1984 v1.AddArg(v2) 1985 v5 := b.NewValue0(v.Line, OpConst64, t) 1986 v5.AuxInt = smagic64s(c) 1987 v1.AddArg(v5) 1988 v0.AddArg(v1) 1989 v6 := b.NewValue0(v.Line, OpRsh64x64, t) 1990 v6.AddArg(x) 1991 v7 := b.NewValue0(v.Line, OpConst64, t) 1992 v7.AuxInt = 63 1993 v6.AddArg(v7) 1994 v0.AddArg(v6) 1995 v.AddArg(v0) 1996 return true 1997 } 1998 return false 1999 } 2000 func rewriteValuegeneric_OpDiv64u(v *Value, config *Config) bool { 2001 b := v.Block 2002 _ = b 2003 // match: (Div64u <t> n (Const64 [c])) 2004 // cond: isPowerOfTwo(c) 2005 // result: (Rsh64Ux64 n (Const64 <t> [log2(c)])) 2006 for { 2007 t := v.Type 2008 n := v.Args[0] 2009 v_1 := v.Args[1] 2010 if v_1.Op != OpConst64 { 2011 break 2012 } 2013 c := v_1.AuxInt 2014 if !(isPowerOfTwo(c)) { 2015 break 2016 } 2017 v.reset(OpRsh64Ux64) 2018 v.AddArg(n) 2019 v0 := b.NewValue0(v.Line, OpConst64, t) 2020 v0.AuxInt = log2(c) 2021 v.AddArg(v0) 2022 return true 2023 } 2024 // match: (Div64u <t> x (Const64 [c])) 2025 // cond: umagic64ok(c) && !umagic64a(c) 2026 // result: (Rsh64Ux64 (Hmul64u <t> (Const64 <t> [umagic64m(c)]) x) (Const64 <t> [umagic64s(c)])) 2027 for { 2028 t := v.Type 2029 x := v.Args[0] 2030 v_1 := v.Args[1] 2031 if v_1.Op != OpConst64 { 2032 break 2033 } 2034 c := v_1.AuxInt 2035 if !(umagic64ok(c) && !umagic64a(c)) { 2036 break 2037 } 2038 v.reset(OpRsh64Ux64) 2039 v0 := b.NewValue0(v.Line, OpHmul64u, t) 2040 v1 := b.NewValue0(v.Line, OpConst64, t) 2041 v1.AuxInt = umagic64m(c) 2042 v0.AddArg(v1) 2043 v0.AddArg(x) 2044 v.AddArg(v0) 2045 v2 := b.NewValue0(v.Line, OpConst64, t) 2046 v2.AuxInt = umagic64s(c) 2047 v.AddArg(v2) 2048 return true 2049 } 2050 // match: (Div64u <t> x (Const64 [c])) 2051 // cond: umagic64ok(c) && umagic64a(c) 2052 // result: (Rsh64Ux64 (Avg64u <t> (Hmul64u <t> x (Const64 <t> [umagic64m(c)])) x) (Const64 <t> [umagic64s(c)-1])) 2053 for { 2054 t := v.Type 2055 x := v.Args[0] 2056 v_1 := v.Args[1] 2057 if v_1.Op != OpConst64 { 2058 break 2059 } 2060 c := v_1.AuxInt 2061 if !(umagic64ok(c) && umagic64a(c)) { 2062 break 2063 } 2064 v.reset(OpRsh64Ux64) 2065 v0 := b.NewValue0(v.Line, OpAvg64u, t) 2066 v1 := b.NewValue0(v.Line, OpHmul64u, t) 2067 v1.AddArg(x) 2068 v2 := b.NewValue0(v.Line, OpConst64, t) 2069 v2.AuxInt = umagic64m(c) 2070 v1.AddArg(v2) 2071 v0.AddArg(v1) 2072 v0.AddArg(x) 2073 v.AddArg(v0) 2074 v3 := b.NewValue0(v.Line, OpConst64, t) 2075 v3.AuxInt = umagic64s(c) - 1 2076 v.AddArg(v3) 2077 return true 2078 } 2079 return false 2080 } 2081 func rewriteValuegeneric_OpEq16(v *Value, config *Config) bool { 2082 b := v.Block 2083 _ = b 2084 // match: (Eq16 x x) 2085 // cond: 2086 // result: (ConstBool [1]) 2087 for { 2088 x := v.Args[0] 2089 if x != v.Args[1] { 2090 break 2091 } 2092 v.reset(OpConstBool) 2093 v.AuxInt = 1 2094 return true 2095 } 2096 // match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 2097 // cond: 2098 // result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x) 2099 for { 2100 v_0 := v.Args[0] 2101 if v_0.Op != OpConst16 { 2102 break 2103 } 2104 t := v_0.Type 2105 c := v_0.AuxInt 2106 v_1 := v.Args[1] 2107 if v_1.Op != OpAdd16 { 2108 break 2109 } 2110 v_1_0 := v_1.Args[0] 2111 if v_1_0.Op != OpConst16 { 2112 break 2113 } 2114 if v_1_0.Type != t { 2115 break 2116 } 2117 d := v_1_0.AuxInt 2118 x := v_1.Args[1] 2119 v.reset(OpEq16) 2120 v0 := b.NewValue0(v.Line, OpConst16, t) 2121 v0.AuxInt = int64(int16(c - d)) 2122 v.AddArg(v0) 2123 v.AddArg(x) 2124 return true 2125 } 2126 // match: (Eq16 x (Const16 <t> [c])) 2127 // cond: x.Op != OpConst16 2128 // result: (Eq16 (Const16 <t> [c]) x) 2129 for { 2130 x := v.Args[0] 2131 v_1 := v.Args[1] 2132 if v_1.Op != OpConst16 { 2133 break 2134 } 2135 t := v_1.Type 2136 c := v_1.AuxInt 2137 if !(x.Op != OpConst16) { 2138 break 2139 } 2140 v.reset(OpEq16) 2141 v0 := b.NewValue0(v.Line, OpConst16, t) 2142 v0.AuxInt = c 2143 v.AddArg(v0) 2144 v.AddArg(x) 2145 return true 2146 } 2147 // match: (Eq16 (Const16 [c]) (Const16 [d])) 2148 // cond: 2149 // result: (ConstBool [b2i(c == d)]) 2150 for { 2151 v_0 := v.Args[0] 2152 if v_0.Op != OpConst16 { 2153 break 2154 } 2155 c := v_0.AuxInt 2156 v_1 := v.Args[1] 2157 if v_1.Op != OpConst16 { 2158 break 2159 } 2160 d := v_1.AuxInt 2161 v.reset(OpConstBool) 2162 v.AuxInt = b2i(c == d) 2163 return true 2164 } 2165 return false 2166 } 2167 func rewriteValuegeneric_OpEq32(v *Value, config *Config) bool { 2168 b := v.Block 2169 _ = b 2170 // match: (Eq32 x x) 2171 // cond: 2172 // result: (ConstBool [1]) 2173 for { 2174 x := v.Args[0] 2175 if x != v.Args[1] { 2176 break 2177 } 2178 v.reset(OpConstBool) 2179 v.AuxInt = 1 2180 return true 2181 } 2182 // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 2183 // cond: 2184 // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x) 2185 for { 2186 v_0 := v.Args[0] 2187 if v_0.Op != OpConst32 { 2188 break 2189 } 2190 t := v_0.Type 2191 c := v_0.AuxInt 2192 v_1 := v.Args[1] 2193 if v_1.Op != OpAdd32 { 2194 break 2195 } 2196 v_1_0 := v_1.Args[0] 2197 if v_1_0.Op != OpConst32 { 2198 break 2199 } 2200 if v_1_0.Type != t { 2201 break 2202 } 2203 d := v_1_0.AuxInt 2204 x := v_1.Args[1] 2205 v.reset(OpEq32) 2206 v0 := b.NewValue0(v.Line, OpConst32, t) 2207 v0.AuxInt = int64(int32(c - d)) 2208 v.AddArg(v0) 2209 v.AddArg(x) 2210 return true 2211 } 2212 // match: (Eq32 x (Const32 <t> [c])) 2213 // cond: x.Op != OpConst32 2214 // result: (Eq32 (Const32 <t> [c]) x) 2215 for { 2216 x := v.Args[0] 2217 v_1 := v.Args[1] 2218 if v_1.Op != OpConst32 { 2219 break 2220 } 2221 t := v_1.Type 2222 c := v_1.AuxInt 2223 if !(x.Op != OpConst32) { 2224 break 2225 } 2226 v.reset(OpEq32) 2227 v0 := b.NewValue0(v.Line, OpConst32, t) 2228 v0.AuxInt = c 2229 v.AddArg(v0) 2230 v.AddArg(x) 2231 return true 2232 } 2233 // match: (Eq32 (Const32 [c]) (Const32 [d])) 2234 // cond: 2235 // result: (ConstBool [b2i(c == d)]) 2236 for { 2237 v_0 := v.Args[0] 2238 if v_0.Op != OpConst32 { 2239 break 2240 } 2241 c := v_0.AuxInt 2242 v_1 := v.Args[1] 2243 if v_1.Op != OpConst32 { 2244 break 2245 } 2246 d := v_1.AuxInt 2247 v.reset(OpConstBool) 2248 v.AuxInt = b2i(c == d) 2249 return true 2250 } 2251 return false 2252 } 2253 func rewriteValuegeneric_OpEq64(v *Value, config *Config) bool { 2254 b := v.Block 2255 _ = b 2256 // match: (Eq64 x x) 2257 // cond: 2258 // result: (ConstBool [1]) 2259 for { 2260 x := v.Args[0] 2261 if x != v.Args[1] { 2262 break 2263 } 2264 v.reset(OpConstBool) 2265 v.AuxInt = 1 2266 return true 2267 } 2268 // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 2269 // cond: 2270 // result: (Eq64 (Const64 <t> [c-d]) x) 2271 for { 2272 v_0 := v.Args[0] 2273 if v_0.Op != OpConst64 { 2274 break 2275 } 2276 t := v_0.Type 2277 c := v_0.AuxInt 2278 v_1 := v.Args[1] 2279 if v_1.Op != OpAdd64 { 2280 break 2281 } 2282 v_1_0 := v_1.Args[0] 2283 if v_1_0.Op != OpConst64 { 2284 break 2285 } 2286 if v_1_0.Type != t { 2287 break 2288 } 2289 d := v_1_0.AuxInt 2290 x := v_1.Args[1] 2291 v.reset(OpEq64) 2292 v0 := b.NewValue0(v.Line, OpConst64, t) 2293 v0.AuxInt = c - d 2294 v.AddArg(v0) 2295 v.AddArg(x) 2296 return true 2297 } 2298 // match: (Eq64 x (Const64 <t> [c])) 2299 // cond: x.Op != OpConst64 2300 // result: (Eq64 (Const64 <t> [c]) x) 2301 for { 2302 x := v.Args[0] 2303 v_1 := v.Args[1] 2304 if v_1.Op != OpConst64 { 2305 break 2306 } 2307 t := v_1.Type 2308 c := v_1.AuxInt 2309 if !(x.Op != OpConst64) { 2310 break 2311 } 2312 v.reset(OpEq64) 2313 v0 := b.NewValue0(v.Line, OpConst64, t) 2314 v0.AuxInt = c 2315 v.AddArg(v0) 2316 v.AddArg(x) 2317 return true 2318 } 2319 // match: (Eq64 (Const64 [c]) (Const64 [d])) 2320 // cond: 2321 // result: (ConstBool [b2i(c == d)]) 2322 for { 2323 v_0 := v.Args[0] 2324 if v_0.Op != OpConst64 { 2325 break 2326 } 2327 c := v_0.AuxInt 2328 v_1 := v.Args[1] 2329 if v_1.Op != OpConst64 { 2330 break 2331 } 2332 d := v_1.AuxInt 2333 v.reset(OpConstBool) 2334 v.AuxInt = b2i(c == d) 2335 return true 2336 } 2337 return false 2338 } 2339 func rewriteValuegeneric_OpEq8(v *Value, config *Config) bool { 2340 b := v.Block 2341 _ = b 2342 // match: (Eq8 x x) 2343 // cond: 2344 // result: (ConstBool [1]) 2345 for { 2346 x := v.Args[0] 2347 if x != v.Args[1] { 2348 break 2349 } 2350 v.reset(OpConstBool) 2351 v.AuxInt = 1 2352 return true 2353 } 2354 // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 2355 // cond: 2356 // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x) 2357 for { 2358 v_0 := v.Args[0] 2359 if v_0.Op != OpConst8 { 2360 break 2361 } 2362 t := v_0.Type 2363 c := v_0.AuxInt 2364 v_1 := v.Args[1] 2365 if v_1.Op != OpAdd8 { 2366 break 2367 } 2368 v_1_0 := v_1.Args[0] 2369 if v_1_0.Op != OpConst8 { 2370 break 2371 } 2372 if v_1_0.Type != t { 2373 break 2374 } 2375 d := v_1_0.AuxInt 2376 x := v_1.Args[1] 2377 v.reset(OpEq8) 2378 v0 := b.NewValue0(v.Line, OpConst8, t) 2379 v0.AuxInt = int64(int8(c - d)) 2380 v.AddArg(v0) 2381 v.AddArg(x) 2382 return true 2383 } 2384 // match: (Eq8 x (Const8 <t> [c])) 2385 // cond: x.Op != OpConst8 2386 // result: (Eq8 (Const8 <t> [c]) x) 2387 for { 2388 x := v.Args[0] 2389 v_1 := v.Args[1] 2390 if v_1.Op != OpConst8 { 2391 break 2392 } 2393 t := v_1.Type 2394 c := v_1.AuxInt 2395 if !(x.Op != OpConst8) { 2396 break 2397 } 2398 v.reset(OpEq8) 2399 v0 := b.NewValue0(v.Line, OpConst8, t) 2400 v0.AuxInt = c 2401 v.AddArg(v0) 2402 v.AddArg(x) 2403 return true 2404 } 2405 // match: (Eq8 (Const8 [c]) (Const8 [d])) 2406 // cond: 2407 // result: (ConstBool [b2i(c == d)]) 2408 for { 2409 v_0 := v.Args[0] 2410 if v_0.Op != OpConst8 { 2411 break 2412 } 2413 c := v_0.AuxInt 2414 v_1 := v.Args[1] 2415 if v_1.Op != OpConst8 { 2416 break 2417 } 2418 d := v_1.AuxInt 2419 v.reset(OpConstBool) 2420 v.AuxInt = b2i(c == d) 2421 return true 2422 } 2423 return false 2424 } 2425 func rewriteValuegeneric_OpEqB(v *Value, config *Config) bool { 2426 b := v.Block 2427 _ = b 2428 // match: (EqB (ConstBool [c]) (ConstBool [d])) 2429 // cond: 2430 // result: (ConstBool [b2i(c == d)]) 2431 for { 2432 v_0 := v.Args[0] 2433 if v_0.Op != OpConstBool { 2434 break 2435 } 2436 c := v_0.AuxInt 2437 v_1 := v.Args[1] 2438 if v_1.Op != OpConstBool { 2439 break 2440 } 2441 d := v_1.AuxInt 2442 v.reset(OpConstBool) 2443 v.AuxInt = b2i(c == d) 2444 return true 2445 } 2446 // match: (EqB (ConstBool [0]) x) 2447 // cond: 2448 // result: (Not x) 2449 for { 2450 v_0 := v.Args[0] 2451 if v_0.Op != OpConstBool { 2452 break 2453 } 2454 if v_0.AuxInt != 0 { 2455 break 2456 } 2457 x := v.Args[1] 2458 v.reset(OpNot) 2459 v.AddArg(x) 2460 return true 2461 } 2462 // match: (EqB (ConstBool [1]) x) 2463 // cond: 2464 // result: x 2465 for { 2466 v_0 := v.Args[0] 2467 if v_0.Op != OpConstBool { 2468 break 2469 } 2470 if v_0.AuxInt != 1 { 2471 break 2472 } 2473 x := v.Args[1] 2474 v.reset(OpCopy) 2475 v.Type = x.Type 2476 v.AddArg(x) 2477 return true 2478 } 2479 return false 2480 } 2481 func rewriteValuegeneric_OpEqInter(v *Value, config *Config) bool { 2482 b := v.Block 2483 _ = b 2484 // match: (EqInter x y) 2485 // cond: 2486 // result: (EqPtr (ITab x) (ITab y)) 2487 for { 2488 x := v.Args[0] 2489 y := v.Args[1] 2490 v.reset(OpEqPtr) 2491 v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 2492 v0.AddArg(x) 2493 v.AddArg(v0) 2494 v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 2495 v1.AddArg(y) 2496 v.AddArg(v1) 2497 return true 2498 } 2499 } 2500 func rewriteValuegeneric_OpEqPtr(v *Value, config *Config) bool { 2501 b := v.Block 2502 _ = b 2503 // match: (EqPtr p (ConstNil)) 2504 // cond: 2505 // result: (Not (IsNonNil p)) 2506 for { 2507 p := v.Args[0] 2508 v_1 := v.Args[1] 2509 if v_1.Op != OpConstNil { 2510 break 2511 } 2512 v.reset(OpNot) 2513 v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) 2514 v0.AddArg(p) 2515 v.AddArg(v0) 2516 return true 2517 } 2518 // match: (EqPtr (ConstNil) p) 2519 // cond: 2520 // result: (Not (IsNonNil p)) 2521 for { 2522 v_0 := v.Args[0] 2523 if v_0.Op != OpConstNil { 2524 break 2525 } 2526 p := v.Args[1] 2527 v.reset(OpNot) 2528 v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) 2529 v0.AddArg(p) 2530 v.AddArg(v0) 2531 return true 2532 } 2533 return false 2534 } 2535 func rewriteValuegeneric_OpEqSlice(v *Value, config *Config) bool { 2536 b := v.Block 2537 _ = b 2538 // match: (EqSlice x y) 2539 // cond: 2540 // result: (EqPtr (SlicePtr x) (SlicePtr y)) 2541 for { 2542 x := v.Args[0] 2543 y := v.Args[1] 2544 v.reset(OpEqPtr) 2545 v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 2546 v0.AddArg(x) 2547 v.AddArg(v0) 2548 v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 2549 v1.AddArg(y) 2550 v.AddArg(v1) 2551 return true 2552 } 2553 } 2554 func rewriteValuegeneric_OpGeq16(v *Value, config *Config) bool { 2555 b := v.Block 2556 _ = b 2557 // match: (Geq16 (Const16 [c]) (Const16 [d])) 2558 // cond: 2559 // result: (ConstBool [b2i(c >= d)]) 2560 for { 2561 v_0 := v.Args[0] 2562 if v_0.Op != OpConst16 { 2563 break 2564 } 2565 c := v_0.AuxInt 2566 v_1 := v.Args[1] 2567 if v_1.Op != OpConst16 { 2568 break 2569 } 2570 d := v_1.AuxInt 2571 v.reset(OpConstBool) 2572 v.AuxInt = b2i(c >= d) 2573 return true 2574 } 2575 return false 2576 } 2577 func rewriteValuegeneric_OpGeq16U(v *Value, config *Config) bool { 2578 b := v.Block 2579 _ = b 2580 // match: (Geq16U (Const16 [c]) (Const16 [d])) 2581 // cond: 2582 // result: (ConstBool [b2i(uint16(c) >= uint16(d))]) 2583 for { 2584 v_0 := v.Args[0] 2585 if v_0.Op != OpConst16 { 2586 break 2587 } 2588 c := v_0.AuxInt 2589 v_1 := v.Args[1] 2590 if v_1.Op != OpConst16 { 2591 break 2592 } 2593 d := v_1.AuxInt 2594 v.reset(OpConstBool) 2595 v.AuxInt = b2i(uint16(c) >= uint16(d)) 2596 return true 2597 } 2598 return false 2599 } 2600 func rewriteValuegeneric_OpGeq32(v *Value, config *Config) bool { 2601 b := v.Block 2602 _ = b 2603 // match: (Geq32 (Const32 [c]) (Const32 [d])) 2604 // cond: 2605 // result: (ConstBool [b2i(c >= d)]) 2606 for { 2607 v_0 := v.Args[0] 2608 if v_0.Op != OpConst32 { 2609 break 2610 } 2611 c := v_0.AuxInt 2612 v_1 := v.Args[1] 2613 if v_1.Op != OpConst32 { 2614 break 2615 } 2616 d := v_1.AuxInt 2617 v.reset(OpConstBool) 2618 v.AuxInt = b2i(c >= d) 2619 return true 2620 } 2621 return false 2622 } 2623 func rewriteValuegeneric_OpGeq32U(v *Value, config *Config) bool { 2624 b := v.Block 2625 _ = b 2626 // match: (Geq32U (Const32 [c]) (Const32 [d])) 2627 // cond: 2628 // result: (ConstBool [b2i(uint32(c) >= uint32(d))]) 2629 for { 2630 v_0 := v.Args[0] 2631 if v_0.Op != OpConst32 { 2632 break 2633 } 2634 c := v_0.AuxInt 2635 v_1 := v.Args[1] 2636 if v_1.Op != OpConst32 { 2637 break 2638 } 2639 d := v_1.AuxInt 2640 v.reset(OpConstBool) 2641 v.AuxInt = b2i(uint32(c) >= uint32(d)) 2642 return true 2643 } 2644 return false 2645 } 2646 func rewriteValuegeneric_OpGeq64(v *Value, config *Config) bool { 2647 b := v.Block 2648 _ = b 2649 // match: (Geq64 (Const64 [c]) (Const64 [d])) 2650 // cond: 2651 // result: (ConstBool [b2i(c >= d)]) 2652 for { 2653 v_0 := v.Args[0] 2654 if v_0.Op != OpConst64 { 2655 break 2656 } 2657 c := v_0.AuxInt 2658 v_1 := v.Args[1] 2659 if v_1.Op != OpConst64 { 2660 break 2661 } 2662 d := v_1.AuxInt 2663 v.reset(OpConstBool) 2664 v.AuxInt = b2i(c >= d) 2665 return true 2666 } 2667 return false 2668 } 2669 func rewriteValuegeneric_OpGeq64U(v *Value, config *Config) bool { 2670 b := v.Block 2671 _ = b 2672 // match: (Geq64U (Const64 [c]) (Const64 [d])) 2673 // cond: 2674 // result: (ConstBool [b2i(uint64(c) >= uint64(d))]) 2675 for { 2676 v_0 := v.Args[0] 2677 if v_0.Op != OpConst64 { 2678 break 2679 } 2680 c := v_0.AuxInt 2681 v_1 := v.Args[1] 2682 if v_1.Op != OpConst64 { 2683 break 2684 } 2685 d := v_1.AuxInt 2686 v.reset(OpConstBool) 2687 v.AuxInt = b2i(uint64(c) >= uint64(d)) 2688 return true 2689 } 2690 return false 2691 } 2692 func rewriteValuegeneric_OpGeq8(v *Value, config *Config) bool { 2693 b := v.Block 2694 _ = b 2695 // match: (Geq8 (Const8 [c]) (Const8 [d])) 2696 // cond: 2697 // result: (ConstBool [b2i(c >= d)]) 2698 for { 2699 v_0 := v.Args[0] 2700 if v_0.Op != OpConst8 { 2701 break 2702 } 2703 c := v_0.AuxInt 2704 v_1 := v.Args[1] 2705 if v_1.Op != OpConst8 { 2706 break 2707 } 2708 d := v_1.AuxInt 2709 v.reset(OpConstBool) 2710 v.AuxInt = b2i(c >= d) 2711 return true 2712 } 2713 return false 2714 } 2715 func rewriteValuegeneric_OpGeq8U(v *Value, config *Config) bool { 2716 b := v.Block 2717 _ = b 2718 // match: (Geq8U (Const8 [c]) (Const8 [d])) 2719 // cond: 2720 // result: (ConstBool [b2i(uint8(c) >= uint8(d))]) 2721 for { 2722 v_0 := v.Args[0] 2723 if v_0.Op != OpConst8 { 2724 break 2725 } 2726 c := v_0.AuxInt 2727 v_1 := v.Args[1] 2728 if v_1.Op != OpConst8 { 2729 break 2730 } 2731 d := v_1.AuxInt 2732 v.reset(OpConstBool) 2733 v.AuxInt = b2i(uint8(c) >= uint8(d)) 2734 return true 2735 } 2736 return false 2737 } 2738 func rewriteValuegeneric_OpGreater16(v *Value, config *Config) bool { 2739 b := v.Block 2740 _ = b 2741 // match: (Greater16 (Const16 [c]) (Const16 [d])) 2742 // cond: 2743 // result: (ConstBool [b2i(c > d)]) 2744 for { 2745 v_0 := v.Args[0] 2746 if v_0.Op != OpConst16 { 2747 break 2748 } 2749 c := v_0.AuxInt 2750 v_1 := v.Args[1] 2751 if v_1.Op != OpConst16 { 2752 break 2753 } 2754 d := v_1.AuxInt 2755 v.reset(OpConstBool) 2756 v.AuxInt = b2i(c > d) 2757 return true 2758 } 2759 return false 2760 } 2761 func rewriteValuegeneric_OpGreater16U(v *Value, config *Config) bool { 2762 b := v.Block 2763 _ = b 2764 // match: (Greater16U (Const16 [c]) (Const16 [d])) 2765 // cond: 2766 // result: (ConstBool [b2i(uint16(c) > uint16(d))]) 2767 for { 2768 v_0 := v.Args[0] 2769 if v_0.Op != OpConst16 { 2770 break 2771 } 2772 c := v_0.AuxInt 2773 v_1 := v.Args[1] 2774 if v_1.Op != OpConst16 { 2775 break 2776 } 2777 d := v_1.AuxInt 2778 v.reset(OpConstBool) 2779 v.AuxInt = b2i(uint16(c) > uint16(d)) 2780 return true 2781 } 2782 return false 2783 } 2784 func rewriteValuegeneric_OpGreater32(v *Value, config *Config) bool { 2785 b := v.Block 2786 _ = b 2787 // match: (Greater32 (Const32 [c]) (Const32 [d])) 2788 // cond: 2789 // result: (ConstBool [b2i(c > d)]) 2790 for { 2791 v_0 := v.Args[0] 2792 if v_0.Op != OpConst32 { 2793 break 2794 } 2795 c := v_0.AuxInt 2796 v_1 := v.Args[1] 2797 if v_1.Op != OpConst32 { 2798 break 2799 } 2800 d := v_1.AuxInt 2801 v.reset(OpConstBool) 2802 v.AuxInt = b2i(c > d) 2803 return true 2804 } 2805 return false 2806 } 2807 func rewriteValuegeneric_OpGreater32U(v *Value, config *Config) bool { 2808 b := v.Block 2809 _ = b 2810 // match: (Greater32U (Const32 [c]) (Const32 [d])) 2811 // cond: 2812 // result: (ConstBool [b2i(uint32(c) > uint32(d))]) 2813 for { 2814 v_0 := v.Args[0] 2815 if v_0.Op != OpConst32 { 2816 break 2817 } 2818 c := v_0.AuxInt 2819 v_1 := v.Args[1] 2820 if v_1.Op != OpConst32 { 2821 break 2822 } 2823 d := v_1.AuxInt 2824 v.reset(OpConstBool) 2825 v.AuxInt = b2i(uint32(c) > uint32(d)) 2826 return true 2827 } 2828 return false 2829 } 2830 func rewriteValuegeneric_OpGreater64(v *Value, config *Config) bool { 2831 b := v.Block 2832 _ = b 2833 // match: (Greater64 (Const64 [c]) (Const64 [d])) 2834 // cond: 2835 // result: (ConstBool [b2i(c > d)]) 2836 for { 2837 v_0 := v.Args[0] 2838 if v_0.Op != OpConst64 { 2839 break 2840 } 2841 c := v_0.AuxInt 2842 v_1 := v.Args[1] 2843 if v_1.Op != OpConst64 { 2844 break 2845 } 2846 d := v_1.AuxInt 2847 v.reset(OpConstBool) 2848 v.AuxInt = b2i(c > d) 2849 return true 2850 } 2851 return false 2852 } 2853 func rewriteValuegeneric_OpGreater64U(v *Value, config *Config) bool { 2854 b := v.Block 2855 _ = b 2856 // match: (Greater64U (Const64 [c]) (Const64 [d])) 2857 // cond: 2858 // result: (ConstBool [b2i(uint64(c) > uint64(d))]) 2859 for { 2860 v_0 := v.Args[0] 2861 if v_0.Op != OpConst64 { 2862 break 2863 } 2864 c := v_0.AuxInt 2865 v_1 := v.Args[1] 2866 if v_1.Op != OpConst64 { 2867 break 2868 } 2869 d := v_1.AuxInt 2870 v.reset(OpConstBool) 2871 v.AuxInt = b2i(uint64(c) > uint64(d)) 2872 return true 2873 } 2874 return false 2875 } 2876 func rewriteValuegeneric_OpGreater8(v *Value, config *Config) bool { 2877 b := v.Block 2878 _ = b 2879 // match: (Greater8 (Const8 [c]) (Const8 [d])) 2880 // cond: 2881 // result: (ConstBool [b2i(c > d)]) 2882 for { 2883 v_0 := v.Args[0] 2884 if v_0.Op != OpConst8 { 2885 break 2886 } 2887 c := v_0.AuxInt 2888 v_1 := v.Args[1] 2889 if v_1.Op != OpConst8 { 2890 break 2891 } 2892 d := v_1.AuxInt 2893 v.reset(OpConstBool) 2894 v.AuxInt = b2i(c > d) 2895 return true 2896 } 2897 return false 2898 } 2899 func rewriteValuegeneric_OpGreater8U(v *Value, config *Config) bool { 2900 b := v.Block 2901 _ = b 2902 // match: (Greater8U (Const8 [c]) (Const8 [d])) 2903 // cond: 2904 // result: (ConstBool [b2i(uint8(c) > uint8(d))]) 2905 for { 2906 v_0 := v.Args[0] 2907 if v_0.Op != OpConst8 { 2908 break 2909 } 2910 c := v_0.AuxInt 2911 v_1 := v.Args[1] 2912 if v_1.Op != OpConst8 { 2913 break 2914 } 2915 d := v_1.AuxInt 2916 v.reset(OpConstBool) 2917 v.AuxInt = b2i(uint8(c) > uint8(d)) 2918 return true 2919 } 2920 return false 2921 } 2922 func rewriteValuegeneric_OpIsInBounds(v *Value, config *Config) bool { 2923 b := v.Block 2924 _ = b 2925 // match: (IsInBounds (ZeroExt8to32 _) (Const32 [c])) 2926 // cond: (1 << 8) <= c 2927 // result: (ConstBool [1]) 2928 for { 2929 v_0 := v.Args[0] 2930 if v_0.Op != OpZeroExt8to32 { 2931 break 2932 } 2933 v_1 := v.Args[1] 2934 if v_1.Op != OpConst32 { 2935 break 2936 } 2937 c := v_1.AuxInt 2938 if !((1 << 8) <= c) { 2939 break 2940 } 2941 v.reset(OpConstBool) 2942 v.AuxInt = 1 2943 return true 2944 } 2945 // match: (IsInBounds (ZeroExt8to64 _) (Const64 [c])) 2946 // cond: (1 << 8) <= c 2947 // result: (ConstBool [1]) 2948 for { 2949 v_0 := v.Args[0] 2950 if v_0.Op != OpZeroExt8to64 { 2951 break 2952 } 2953 v_1 := v.Args[1] 2954 if v_1.Op != OpConst64 { 2955 break 2956 } 2957 c := v_1.AuxInt 2958 if !((1 << 8) <= c) { 2959 break 2960 } 2961 v.reset(OpConstBool) 2962 v.AuxInt = 1 2963 return true 2964 } 2965 // match: (IsInBounds (ZeroExt16to32 _) (Const32 [c])) 2966 // cond: (1 << 16) <= c 2967 // result: (ConstBool [1]) 2968 for { 2969 v_0 := v.Args[0] 2970 if v_0.Op != OpZeroExt16to32 { 2971 break 2972 } 2973 v_1 := v.Args[1] 2974 if v_1.Op != OpConst32 { 2975 break 2976 } 2977 c := v_1.AuxInt 2978 if !((1 << 16) <= c) { 2979 break 2980 } 2981 v.reset(OpConstBool) 2982 v.AuxInt = 1 2983 return true 2984 } 2985 // match: (IsInBounds (ZeroExt16to64 _) (Const64 [c])) 2986 // cond: (1 << 16) <= c 2987 // result: (ConstBool [1]) 2988 for { 2989 v_0 := v.Args[0] 2990 if v_0.Op != OpZeroExt16to64 { 2991 break 2992 } 2993 v_1 := v.Args[1] 2994 if v_1.Op != OpConst64 { 2995 break 2996 } 2997 c := v_1.AuxInt 2998 if !((1 << 16) <= c) { 2999 break 3000 } 3001 v.reset(OpConstBool) 3002 v.AuxInt = 1 3003 return true 3004 } 3005 // match: (IsInBounds x x) 3006 // cond: 3007 // result: (ConstBool [0]) 3008 for { 3009 x := v.Args[0] 3010 if x != v.Args[1] { 3011 break 3012 } 3013 v.reset(OpConstBool) 3014 v.AuxInt = 0 3015 return true 3016 } 3017 // match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d])) 3018 // cond: 0 <= c && c < d 3019 // result: (ConstBool [1]) 3020 for { 3021 v_0 := v.Args[0] 3022 if v_0.Op != OpAnd32 { 3023 break 3024 } 3025 v_0_0 := v_0.Args[0] 3026 if v_0_0.Op != OpConst32 { 3027 break 3028 } 3029 c := v_0_0.AuxInt 3030 v_1 := v.Args[1] 3031 if v_1.Op != OpConst32 { 3032 break 3033 } 3034 d := v_1.AuxInt 3035 if !(0 <= c && c < d) { 3036 break 3037 } 3038 v.reset(OpConstBool) 3039 v.AuxInt = 1 3040 return true 3041 } 3042 // match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d])) 3043 // cond: 0 <= c && c < d 3044 // result: (ConstBool [1]) 3045 for { 3046 v_0 := v.Args[0] 3047 if v_0.Op != OpAnd64 { 3048 break 3049 } 3050 v_0_0 := v_0.Args[0] 3051 if v_0_0.Op != OpConst64 { 3052 break 3053 } 3054 c := v_0_0.AuxInt 3055 v_1 := v.Args[1] 3056 if v_1.Op != OpConst64 { 3057 break 3058 } 3059 d := v_1.AuxInt 3060 if !(0 <= c && c < d) { 3061 break 3062 } 3063 v.reset(OpConstBool) 3064 v.AuxInt = 1 3065 return true 3066 } 3067 // match: (IsInBounds (Const32 [c]) (Const32 [d])) 3068 // cond: 3069 // result: (ConstBool [b2i(0 <= c && c < d)]) 3070 for { 3071 v_0 := v.Args[0] 3072 if v_0.Op != OpConst32 { 3073 break 3074 } 3075 c := v_0.AuxInt 3076 v_1 := v.Args[1] 3077 if v_1.Op != OpConst32 { 3078 break 3079 } 3080 d := v_1.AuxInt 3081 v.reset(OpConstBool) 3082 v.AuxInt = b2i(0 <= c && c < d) 3083 return true 3084 } 3085 // match: (IsInBounds (Const64 [c]) (Const64 [d])) 3086 // cond: 3087 // result: (ConstBool [b2i(0 <= c && c < d)]) 3088 for { 3089 v_0 := v.Args[0] 3090 if v_0.Op != OpConst64 { 3091 break 3092 } 3093 c := v_0.AuxInt 3094 v_1 := v.Args[1] 3095 if v_1.Op != OpConst64 { 3096 break 3097 } 3098 d := v_1.AuxInt 3099 v.reset(OpConstBool) 3100 v.AuxInt = b2i(0 <= c && c < d) 3101 return true 3102 } 3103 // match: (IsInBounds (Mod32u _ y) y) 3104 // cond: 3105 // result: (ConstBool [1]) 3106 for { 3107 v_0 := v.Args[0] 3108 if v_0.Op != OpMod32u { 3109 break 3110 } 3111 y := v_0.Args[1] 3112 if y != v.Args[1] { 3113 break 3114 } 3115 v.reset(OpConstBool) 3116 v.AuxInt = 1 3117 return true 3118 } 3119 // match: (IsInBounds (Mod64u _ y) y) 3120 // cond: 3121 // result: (ConstBool [1]) 3122 for { 3123 v_0 := v.Args[0] 3124 if v_0.Op != OpMod64u { 3125 break 3126 } 3127 y := v_0.Args[1] 3128 if y != v.Args[1] { 3129 break 3130 } 3131 v.reset(OpConstBool) 3132 v.AuxInt = 1 3133 return true 3134 } 3135 return false 3136 } 3137 func rewriteValuegeneric_OpIsSliceInBounds(v *Value, config *Config) bool { 3138 b := v.Block 3139 _ = b 3140 // match: (IsSliceInBounds x x) 3141 // cond: 3142 // result: (ConstBool [1]) 3143 for { 3144 x := v.Args[0] 3145 if x != v.Args[1] { 3146 break 3147 } 3148 v.reset(OpConstBool) 3149 v.AuxInt = 1 3150 return true 3151 } 3152 // match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d])) 3153 // cond: 0 <= c && c <= d 3154 // result: (ConstBool [1]) 3155 for { 3156 v_0 := v.Args[0] 3157 if v_0.Op != OpAnd32 { 3158 break 3159 } 3160 v_0_0 := v_0.Args[0] 3161 if v_0_0.Op != OpConst32 { 3162 break 3163 } 3164 c := v_0_0.AuxInt 3165 v_1 := v.Args[1] 3166 if v_1.Op != OpConst32 { 3167 break 3168 } 3169 d := v_1.AuxInt 3170 if !(0 <= c && c <= d) { 3171 break 3172 } 3173 v.reset(OpConstBool) 3174 v.AuxInt = 1 3175 return true 3176 } 3177 // match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d])) 3178 // cond: 0 <= c && c <= d 3179 // result: (ConstBool [1]) 3180 for { 3181 v_0 := v.Args[0] 3182 if v_0.Op != OpAnd64 { 3183 break 3184 } 3185 v_0_0 := v_0.Args[0] 3186 if v_0_0.Op != OpConst64 { 3187 break 3188 } 3189 c := v_0_0.AuxInt 3190 v_1 := v.Args[1] 3191 if v_1.Op != OpConst64 { 3192 break 3193 } 3194 d := v_1.AuxInt 3195 if !(0 <= c && c <= d) { 3196 break 3197 } 3198 v.reset(OpConstBool) 3199 v.AuxInt = 1 3200 return true 3201 } 3202 // match: (IsSliceInBounds (Const32 [0]) _) 3203 // cond: 3204 // result: (ConstBool [1]) 3205 for { 3206 v_0 := v.Args[0] 3207 if v_0.Op != OpConst32 { 3208 break 3209 } 3210 if v_0.AuxInt != 0 { 3211 break 3212 } 3213 v.reset(OpConstBool) 3214 v.AuxInt = 1 3215 return true 3216 } 3217 // match: (IsSliceInBounds (Const64 [0]) _) 3218 // cond: 3219 // result: (ConstBool [1]) 3220 for { 3221 v_0 := v.Args[0] 3222 if v_0.Op != OpConst64 { 3223 break 3224 } 3225 if v_0.AuxInt != 0 { 3226 break 3227 } 3228 v.reset(OpConstBool) 3229 v.AuxInt = 1 3230 return true 3231 } 3232 // match: (IsSliceInBounds (Const32 [c]) (Const32 [d])) 3233 // cond: 3234 // result: (ConstBool [b2i(0 <= c && c <= d)]) 3235 for { 3236 v_0 := v.Args[0] 3237 if v_0.Op != OpConst32 { 3238 break 3239 } 3240 c := v_0.AuxInt 3241 v_1 := v.Args[1] 3242 if v_1.Op != OpConst32 { 3243 break 3244 } 3245 d := v_1.AuxInt 3246 v.reset(OpConstBool) 3247 v.AuxInt = b2i(0 <= c && c <= d) 3248 return true 3249 } 3250 // match: (IsSliceInBounds (Const64 [c]) (Const64 [d])) 3251 // cond: 3252 // result: (ConstBool [b2i(0 <= c && c <= d)]) 3253 for { 3254 v_0 := v.Args[0] 3255 if v_0.Op != OpConst64 { 3256 break 3257 } 3258 c := v_0.AuxInt 3259 v_1 := v.Args[1] 3260 if v_1.Op != OpConst64 { 3261 break 3262 } 3263 d := v_1.AuxInt 3264 v.reset(OpConstBool) 3265 v.AuxInt = b2i(0 <= c && c <= d) 3266 return true 3267 } 3268 // match: (IsSliceInBounds (SliceLen x) (SliceCap x)) 3269 // cond: 3270 // result: (ConstBool [1]) 3271 for { 3272 v_0 := v.Args[0] 3273 if v_0.Op != OpSliceLen { 3274 break 3275 } 3276 x := v_0.Args[0] 3277 v_1 := v.Args[1] 3278 if v_1.Op != OpSliceCap { 3279 break 3280 } 3281 if x != v_1.Args[0] { 3282 break 3283 } 3284 v.reset(OpConstBool) 3285 v.AuxInt = 1 3286 return true 3287 } 3288 return false 3289 } 3290 func rewriteValuegeneric_OpLeq16(v *Value, config *Config) bool { 3291 b := v.Block 3292 _ = b 3293 // match: (Leq16 (Const16 [c]) (Const16 [d])) 3294 // cond: 3295 // result: (ConstBool [b2i(c <= d)]) 3296 for { 3297 v_0 := v.Args[0] 3298 if v_0.Op != OpConst16 { 3299 break 3300 } 3301 c := v_0.AuxInt 3302 v_1 := v.Args[1] 3303 if v_1.Op != OpConst16 { 3304 break 3305 } 3306 d := v_1.AuxInt 3307 v.reset(OpConstBool) 3308 v.AuxInt = b2i(c <= d) 3309 return true 3310 } 3311 return false 3312 } 3313 func rewriteValuegeneric_OpLeq16U(v *Value, config *Config) bool { 3314 b := v.Block 3315 _ = b 3316 // match: (Leq16U (Const16 [c]) (Const16 [d])) 3317 // cond: 3318 // result: (ConstBool [b2i(uint16(c) <= uint16(d))]) 3319 for { 3320 v_0 := v.Args[0] 3321 if v_0.Op != OpConst16 { 3322 break 3323 } 3324 c := v_0.AuxInt 3325 v_1 := v.Args[1] 3326 if v_1.Op != OpConst16 { 3327 break 3328 } 3329 d := v_1.AuxInt 3330 v.reset(OpConstBool) 3331 v.AuxInt = b2i(uint16(c) <= uint16(d)) 3332 return true 3333 } 3334 return false 3335 } 3336 func rewriteValuegeneric_OpLeq32(v *Value, config *Config) bool { 3337 b := v.Block 3338 _ = b 3339 // match: (Leq32 (Const32 [c]) (Const32 [d])) 3340 // cond: 3341 // result: (ConstBool [b2i(c <= d)]) 3342 for { 3343 v_0 := v.Args[0] 3344 if v_0.Op != OpConst32 { 3345 break 3346 } 3347 c := v_0.AuxInt 3348 v_1 := v.Args[1] 3349 if v_1.Op != OpConst32 { 3350 break 3351 } 3352 d := v_1.AuxInt 3353 v.reset(OpConstBool) 3354 v.AuxInt = b2i(c <= d) 3355 return true 3356 } 3357 return false 3358 } 3359 func rewriteValuegeneric_OpLeq32U(v *Value, config *Config) bool { 3360 b := v.Block 3361 _ = b 3362 // match: (Leq32U (Const32 [c]) (Const32 [d])) 3363 // cond: 3364 // result: (ConstBool [b2i(uint32(c) <= uint32(d))]) 3365 for { 3366 v_0 := v.Args[0] 3367 if v_0.Op != OpConst32 { 3368 break 3369 } 3370 c := v_0.AuxInt 3371 v_1 := v.Args[1] 3372 if v_1.Op != OpConst32 { 3373 break 3374 } 3375 d := v_1.AuxInt 3376 v.reset(OpConstBool) 3377 v.AuxInt = b2i(uint32(c) <= uint32(d)) 3378 return true 3379 } 3380 return false 3381 } 3382 func rewriteValuegeneric_OpLeq64(v *Value, config *Config) bool { 3383 b := v.Block 3384 _ = b 3385 // match: (Leq64 (Const64 [c]) (Const64 [d])) 3386 // cond: 3387 // result: (ConstBool [b2i(c <= d)]) 3388 for { 3389 v_0 := v.Args[0] 3390 if v_0.Op != OpConst64 { 3391 break 3392 } 3393 c := v_0.AuxInt 3394 v_1 := v.Args[1] 3395 if v_1.Op != OpConst64 { 3396 break 3397 } 3398 d := v_1.AuxInt 3399 v.reset(OpConstBool) 3400 v.AuxInt = b2i(c <= d) 3401 return true 3402 } 3403 return false 3404 } 3405 func rewriteValuegeneric_OpLeq64U(v *Value, config *Config) bool { 3406 b := v.Block 3407 _ = b 3408 // match: (Leq64U (Const64 [c]) (Const64 [d])) 3409 // cond: 3410 // result: (ConstBool [b2i(uint64(c) <= uint64(d))]) 3411 for { 3412 v_0 := v.Args[0] 3413 if v_0.Op != OpConst64 { 3414 break 3415 } 3416 c := v_0.AuxInt 3417 v_1 := v.Args[1] 3418 if v_1.Op != OpConst64 { 3419 break 3420 } 3421 d := v_1.AuxInt 3422 v.reset(OpConstBool) 3423 v.AuxInt = b2i(uint64(c) <= uint64(d)) 3424 return true 3425 } 3426 return false 3427 } 3428 func rewriteValuegeneric_OpLeq8(v *Value, config *Config) bool { 3429 b := v.Block 3430 _ = b 3431 // match: (Leq8 (Const8 [c]) (Const8 [d])) 3432 // cond: 3433 // result: (ConstBool [b2i(c <= d)]) 3434 for { 3435 v_0 := v.Args[0] 3436 if v_0.Op != OpConst8 { 3437 break 3438 } 3439 c := v_0.AuxInt 3440 v_1 := v.Args[1] 3441 if v_1.Op != OpConst8 { 3442 break 3443 } 3444 d := v_1.AuxInt 3445 v.reset(OpConstBool) 3446 v.AuxInt = b2i(c <= d) 3447 return true 3448 } 3449 return false 3450 } 3451 func rewriteValuegeneric_OpLeq8U(v *Value, config *Config) bool { 3452 b := v.Block 3453 _ = b 3454 // match: (Leq8U (Const8 [c]) (Const8 [d])) 3455 // cond: 3456 // result: (ConstBool [b2i(uint8(c) <= uint8(d))]) 3457 for { 3458 v_0 := v.Args[0] 3459 if v_0.Op != OpConst8 { 3460 break 3461 } 3462 c := v_0.AuxInt 3463 v_1 := v.Args[1] 3464 if v_1.Op != OpConst8 { 3465 break 3466 } 3467 d := v_1.AuxInt 3468 v.reset(OpConstBool) 3469 v.AuxInt = b2i(uint8(c) <= uint8(d)) 3470 return true 3471 } 3472 return false 3473 } 3474 func rewriteValuegeneric_OpLess16(v *Value, config *Config) bool { 3475 b := v.Block 3476 _ = b 3477 // match: (Less16 (Const16 [c]) (Const16 [d])) 3478 // cond: 3479 // result: (ConstBool [b2i(c < d)]) 3480 for { 3481 v_0 := v.Args[0] 3482 if v_0.Op != OpConst16 { 3483 break 3484 } 3485 c := v_0.AuxInt 3486 v_1 := v.Args[1] 3487 if v_1.Op != OpConst16 { 3488 break 3489 } 3490 d := v_1.AuxInt 3491 v.reset(OpConstBool) 3492 v.AuxInt = b2i(c < d) 3493 return true 3494 } 3495 return false 3496 } 3497 func rewriteValuegeneric_OpLess16U(v *Value, config *Config) bool { 3498 b := v.Block 3499 _ = b 3500 // match: (Less16U (Const16 [c]) (Const16 [d])) 3501 // cond: 3502 // result: (ConstBool [b2i(uint16(c) < uint16(d))]) 3503 for { 3504 v_0 := v.Args[0] 3505 if v_0.Op != OpConst16 { 3506 break 3507 } 3508 c := v_0.AuxInt 3509 v_1 := v.Args[1] 3510 if v_1.Op != OpConst16 { 3511 break 3512 } 3513 d := v_1.AuxInt 3514 v.reset(OpConstBool) 3515 v.AuxInt = b2i(uint16(c) < uint16(d)) 3516 return true 3517 } 3518 return false 3519 } 3520 func rewriteValuegeneric_OpLess32(v *Value, config *Config) bool { 3521 b := v.Block 3522 _ = b 3523 // match: (Less32 (Const32 [c]) (Const32 [d])) 3524 // cond: 3525 // result: (ConstBool [b2i(c < d)]) 3526 for { 3527 v_0 := v.Args[0] 3528 if v_0.Op != OpConst32 { 3529 break 3530 } 3531 c := v_0.AuxInt 3532 v_1 := v.Args[1] 3533 if v_1.Op != OpConst32 { 3534 break 3535 } 3536 d := v_1.AuxInt 3537 v.reset(OpConstBool) 3538 v.AuxInt = b2i(c < d) 3539 return true 3540 } 3541 return false 3542 } 3543 func rewriteValuegeneric_OpLess32U(v *Value, config *Config) bool { 3544 b := v.Block 3545 _ = b 3546 // match: (Less32U (Const32 [c]) (Const32 [d])) 3547 // cond: 3548 // result: (ConstBool [b2i(uint32(c) < uint32(d))]) 3549 for { 3550 v_0 := v.Args[0] 3551 if v_0.Op != OpConst32 { 3552 break 3553 } 3554 c := v_0.AuxInt 3555 v_1 := v.Args[1] 3556 if v_1.Op != OpConst32 { 3557 break 3558 } 3559 d := v_1.AuxInt 3560 v.reset(OpConstBool) 3561 v.AuxInt = b2i(uint32(c) < uint32(d)) 3562 return true 3563 } 3564 return false 3565 } 3566 func rewriteValuegeneric_OpLess64(v *Value, config *Config) bool { 3567 b := v.Block 3568 _ = b 3569 // match: (Less64 (Const64 [c]) (Const64 [d])) 3570 // cond: 3571 // result: (ConstBool [b2i(c < d)]) 3572 for { 3573 v_0 := v.Args[0] 3574 if v_0.Op != OpConst64 { 3575 break 3576 } 3577 c := v_0.AuxInt 3578 v_1 := v.Args[1] 3579 if v_1.Op != OpConst64 { 3580 break 3581 } 3582 d := v_1.AuxInt 3583 v.reset(OpConstBool) 3584 v.AuxInt = b2i(c < d) 3585 return true 3586 } 3587 return false 3588 } 3589 func rewriteValuegeneric_OpLess64U(v *Value, config *Config) bool { 3590 b := v.Block 3591 _ = b 3592 // match: (Less64U (Const64 [c]) (Const64 [d])) 3593 // cond: 3594 // result: (ConstBool [b2i(uint64(c) < uint64(d))]) 3595 for { 3596 v_0 := v.Args[0] 3597 if v_0.Op != OpConst64 { 3598 break 3599 } 3600 c := v_0.AuxInt 3601 v_1 := v.Args[1] 3602 if v_1.Op != OpConst64 { 3603 break 3604 } 3605 d := v_1.AuxInt 3606 v.reset(OpConstBool) 3607 v.AuxInt = b2i(uint64(c) < uint64(d)) 3608 return true 3609 } 3610 return false 3611 } 3612 func rewriteValuegeneric_OpLess8(v *Value, config *Config) bool { 3613 b := v.Block 3614 _ = b 3615 // match: (Less8 (Const8 [c]) (Const8 [d])) 3616 // cond: 3617 // result: (ConstBool [b2i(c < d)]) 3618 for { 3619 v_0 := v.Args[0] 3620 if v_0.Op != OpConst8 { 3621 break 3622 } 3623 c := v_0.AuxInt 3624 v_1 := v.Args[1] 3625 if v_1.Op != OpConst8 { 3626 break 3627 } 3628 d := v_1.AuxInt 3629 v.reset(OpConstBool) 3630 v.AuxInt = b2i(c < d) 3631 return true 3632 } 3633 return false 3634 } 3635 func rewriteValuegeneric_OpLess8U(v *Value, config *Config) bool { 3636 b := v.Block 3637 _ = b 3638 // match: (Less8U (Const8 [c]) (Const8 [d])) 3639 // cond: 3640 // result: (ConstBool [b2i(uint8(c) < uint8(d))]) 3641 for { 3642 v_0 := v.Args[0] 3643 if v_0.Op != OpConst8 { 3644 break 3645 } 3646 c := v_0.AuxInt 3647 v_1 := v.Args[1] 3648 if v_1.Op != OpConst8 { 3649 break 3650 } 3651 d := v_1.AuxInt 3652 v.reset(OpConstBool) 3653 v.AuxInt = b2i(uint8(c) < uint8(d)) 3654 return true 3655 } 3656 return false 3657 } 3658 func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool { 3659 b := v.Block 3660 _ = b 3661 // match: (Load <t1> p1 (Store [w] p2 x _)) 3662 // cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() 3663 // result: x 3664 for { 3665 t1 := v.Type 3666 p1 := v.Args[0] 3667 v_1 := v.Args[1] 3668 if v_1.Op != OpStore { 3669 break 3670 } 3671 w := v_1.AuxInt 3672 p2 := v_1.Args[0] 3673 x := v_1.Args[1] 3674 if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && w == t1.Size()) { 3675 break 3676 } 3677 v.reset(OpCopy) 3678 v.Type = x.Type 3679 v.AddArg(x) 3680 return true 3681 } 3682 // match: (Load <t> _ _) 3683 // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) 3684 // result: (StructMake0) 3685 for { 3686 t := v.Type 3687 if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { 3688 break 3689 } 3690 v.reset(OpStructMake0) 3691 return true 3692 } 3693 // match: (Load <t> ptr mem) 3694 // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) 3695 // result: (StructMake1 (Load <t.FieldType(0)> ptr mem)) 3696 for { 3697 t := v.Type 3698 ptr := v.Args[0] 3699 mem := v.Args[1] 3700 if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { 3701 break 3702 } 3703 v.reset(OpStructMake1) 3704 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3705 v0.AddArg(ptr) 3706 v0.AddArg(mem) 3707 v.AddArg(v0) 3708 return true 3709 } 3710 // match: (Load <t> ptr mem) 3711 // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) 3712 // result: (StructMake2 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)) 3713 for { 3714 t := v.Type 3715 ptr := v.Args[0] 3716 mem := v.Args[1] 3717 if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { 3718 break 3719 } 3720 v.reset(OpStructMake2) 3721 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3722 v0.AddArg(ptr) 3723 v0.AddArg(mem) 3724 v.AddArg(v0) 3725 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3726 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 3727 v2.AuxInt = t.FieldOff(1) 3728 v2.AddArg(ptr) 3729 v1.AddArg(v2) 3730 v1.AddArg(mem) 3731 v.AddArg(v1) 3732 return true 3733 } 3734 // match: (Load <t> ptr mem) 3735 // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) 3736 // result: (StructMake3 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)) 3737 for { 3738 t := v.Type 3739 ptr := v.Args[0] 3740 mem := v.Args[1] 3741 if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { 3742 break 3743 } 3744 v.reset(OpStructMake3) 3745 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3746 v0.AddArg(ptr) 3747 v0.AddArg(mem) 3748 v.AddArg(v0) 3749 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3750 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 3751 v2.AuxInt = t.FieldOff(1) 3752 v2.AddArg(ptr) 3753 v1.AddArg(v2) 3754 v1.AddArg(mem) 3755 v.AddArg(v1) 3756 v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) 3757 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 3758 v4.AuxInt = t.FieldOff(2) 3759 v4.AddArg(ptr) 3760 v3.AddArg(v4) 3761 v3.AddArg(mem) 3762 v.AddArg(v3) 3763 return true 3764 } 3765 // match: (Load <t> ptr mem) 3766 // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) 3767 // result: (StructMake4 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem)) 3768 for { 3769 t := v.Type 3770 ptr := v.Args[0] 3771 mem := v.Args[1] 3772 if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { 3773 break 3774 } 3775 v.reset(OpStructMake4) 3776 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3777 v0.AddArg(ptr) 3778 v0.AddArg(mem) 3779 v.AddArg(v0) 3780 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3781 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 3782 v2.AuxInt = t.FieldOff(1) 3783 v2.AddArg(ptr) 3784 v1.AddArg(v2) 3785 v1.AddArg(mem) 3786 v.AddArg(v1) 3787 v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) 3788 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 3789 v4.AuxInt = t.FieldOff(2) 3790 v4.AddArg(ptr) 3791 v3.AddArg(v4) 3792 v3.AddArg(mem) 3793 v.AddArg(v3) 3794 v5 := b.NewValue0(v.Line, OpLoad, t.FieldType(3)) 3795 v6 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) 3796 v6.AuxInt = t.FieldOff(3) 3797 v6.AddArg(ptr) 3798 v5.AddArg(v6) 3799 v5.AddArg(mem) 3800 v.AddArg(v5) 3801 return true 3802 } 3803 return false 3804 } 3805 func rewriteValuegeneric_OpLsh16x16(v *Value, config *Config) bool { 3806 b := v.Block 3807 _ = b 3808 // match: (Lsh16x16 <t> x (Const16 [c])) 3809 // cond: 3810 // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))])) 3811 for { 3812 t := v.Type 3813 x := v.Args[0] 3814 v_1 := v.Args[1] 3815 if v_1.Op != OpConst16 { 3816 break 3817 } 3818 c := v_1.AuxInt 3819 v.reset(OpLsh16x64) 3820 v.AddArg(x) 3821 v0 := b.NewValue0(v.Line, OpConst64, t) 3822 v0.AuxInt = int64(uint16(c)) 3823 v.AddArg(v0) 3824 return true 3825 } 3826 // match: (Lsh16x16 (Const16 [0]) _) 3827 // cond: 3828 // result: (Const16 [0]) 3829 for { 3830 v_0 := v.Args[0] 3831 if v_0.Op != OpConst16 { 3832 break 3833 } 3834 if v_0.AuxInt != 0 { 3835 break 3836 } 3837 v.reset(OpConst16) 3838 v.AuxInt = 0 3839 return true 3840 } 3841 return false 3842 } 3843 func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool { 3844 b := v.Block 3845 _ = b 3846 // match: (Lsh16x32 <t> x (Const32 [c])) 3847 // cond: 3848 // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))])) 3849 for { 3850 t := v.Type 3851 x := v.Args[0] 3852 v_1 := v.Args[1] 3853 if v_1.Op != OpConst32 { 3854 break 3855 } 3856 c := v_1.AuxInt 3857 v.reset(OpLsh16x64) 3858 v.AddArg(x) 3859 v0 := b.NewValue0(v.Line, OpConst64, t) 3860 v0.AuxInt = int64(uint32(c)) 3861 v.AddArg(v0) 3862 return true 3863 } 3864 // match: (Lsh16x32 (Const16 [0]) _) 3865 // cond: 3866 // result: (Const16 [0]) 3867 for { 3868 v_0 := v.Args[0] 3869 if v_0.Op != OpConst16 { 3870 break 3871 } 3872 if v_0.AuxInt != 0 { 3873 break 3874 } 3875 v.reset(OpConst16) 3876 v.AuxInt = 0 3877 return true 3878 } 3879 return false 3880 } 3881 func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { 3882 b := v.Block 3883 _ = b 3884 // match: (Lsh16x64 (Const16 [c]) (Const64 [d])) 3885 // cond: 3886 // result: (Const16 [int64(int16(c) << uint64(d))]) 3887 for { 3888 v_0 := v.Args[0] 3889 if v_0.Op != OpConst16 { 3890 break 3891 } 3892 c := v_0.AuxInt 3893 v_1 := v.Args[1] 3894 if v_1.Op != OpConst64 { 3895 break 3896 } 3897 d := v_1.AuxInt 3898 v.reset(OpConst16) 3899 v.AuxInt = int64(int16(c) << uint64(d)) 3900 return true 3901 } 3902 // match: (Lsh16x64 x (Const64 [0])) 3903 // cond: 3904 // result: x 3905 for { 3906 x := v.Args[0] 3907 v_1 := v.Args[1] 3908 if v_1.Op != OpConst64 { 3909 break 3910 } 3911 if v_1.AuxInt != 0 { 3912 break 3913 } 3914 v.reset(OpCopy) 3915 v.Type = x.Type 3916 v.AddArg(x) 3917 return true 3918 } 3919 // match: (Lsh16x64 (Const16 [0]) _) 3920 // cond: 3921 // result: (Const16 [0]) 3922 for { 3923 v_0 := v.Args[0] 3924 if v_0.Op != OpConst16 { 3925 break 3926 } 3927 if v_0.AuxInt != 0 { 3928 break 3929 } 3930 v.reset(OpConst16) 3931 v.AuxInt = 0 3932 return true 3933 } 3934 // match: (Lsh16x64 _ (Const64 [c])) 3935 // cond: uint64(c) >= 16 3936 // result: (Const16 [0]) 3937 for { 3938 v_1 := v.Args[1] 3939 if v_1.Op != OpConst64 { 3940 break 3941 } 3942 c := v_1.AuxInt 3943 if !(uint64(c) >= 16) { 3944 break 3945 } 3946 v.reset(OpConst16) 3947 v.AuxInt = 0 3948 return true 3949 } 3950 // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) 3951 // cond: !uaddOvf(c,d) 3952 // result: (Lsh16x64 x (Const64 <t> [c+d])) 3953 for { 3954 t := v.Type 3955 v_0 := v.Args[0] 3956 if v_0.Op != OpLsh16x64 { 3957 break 3958 } 3959 x := v_0.Args[0] 3960 v_0_1 := v_0.Args[1] 3961 if v_0_1.Op != OpConst64 { 3962 break 3963 } 3964 c := v_0_1.AuxInt 3965 v_1 := v.Args[1] 3966 if v_1.Op != OpConst64 { 3967 break 3968 } 3969 d := v_1.AuxInt 3970 if !(!uaddOvf(c, d)) { 3971 break 3972 } 3973 v.reset(OpLsh16x64) 3974 v.AddArg(x) 3975 v0 := b.NewValue0(v.Line, OpConst64, t) 3976 v0.AuxInt = c + d 3977 v.AddArg(v0) 3978 return true 3979 } 3980 // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 3981 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 3982 // result: (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 3983 for { 3984 v_0 := v.Args[0] 3985 if v_0.Op != OpRsh16Ux64 { 3986 break 3987 } 3988 v_0_0 := v_0.Args[0] 3989 if v_0_0.Op != OpLsh16x64 { 3990 break 3991 } 3992 x := v_0_0.Args[0] 3993 v_0_0_1 := v_0_0.Args[1] 3994 if v_0_0_1.Op != OpConst64 { 3995 break 3996 } 3997 c1 := v_0_0_1.AuxInt 3998 v_0_1 := v_0.Args[1] 3999 if v_0_1.Op != OpConst64 { 4000 break 4001 } 4002 c2 := v_0_1.AuxInt 4003 v_1 := v.Args[1] 4004 if v_1.Op != OpConst64 { 4005 break 4006 } 4007 c3 := v_1.AuxInt 4008 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4009 break 4010 } 4011 v.reset(OpLsh16x64) 4012 v.AddArg(x) 4013 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4014 v0.AuxInt = c1 - c2 + c3 4015 v.AddArg(v0) 4016 return true 4017 } 4018 return false 4019 } 4020 func rewriteValuegeneric_OpLsh16x8(v *Value, config *Config) bool { 4021 b := v.Block 4022 _ = b 4023 // match: (Lsh16x8 <t> x (Const8 [c])) 4024 // cond: 4025 // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))])) 4026 for { 4027 t := v.Type 4028 x := v.Args[0] 4029 v_1 := v.Args[1] 4030 if v_1.Op != OpConst8 { 4031 break 4032 } 4033 c := v_1.AuxInt 4034 v.reset(OpLsh16x64) 4035 v.AddArg(x) 4036 v0 := b.NewValue0(v.Line, OpConst64, t) 4037 v0.AuxInt = int64(uint8(c)) 4038 v.AddArg(v0) 4039 return true 4040 } 4041 // match: (Lsh16x8 (Const16 [0]) _) 4042 // cond: 4043 // result: (Const16 [0]) 4044 for { 4045 v_0 := v.Args[0] 4046 if v_0.Op != OpConst16 { 4047 break 4048 } 4049 if v_0.AuxInt != 0 { 4050 break 4051 } 4052 v.reset(OpConst16) 4053 v.AuxInt = 0 4054 return true 4055 } 4056 return false 4057 } 4058 func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool { 4059 b := v.Block 4060 _ = b 4061 // match: (Lsh32x16 <t> x (Const16 [c])) 4062 // cond: 4063 // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))])) 4064 for { 4065 t := v.Type 4066 x := v.Args[0] 4067 v_1 := v.Args[1] 4068 if v_1.Op != OpConst16 { 4069 break 4070 } 4071 c := v_1.AuxInt 4072 v.reset(OpLsh32x64) 4073 v.AddArg(x) 4074 v0 := b.NewValue0(v.Line, OpConst64, t) 4075 v0.AuxInt = int64(uint16(c)) 4076 v.AddArg(v0) 4077 return true 4078 } 4079 // match: (Lsh32x16 (Const32 [0]) _) 4080 // cond: 4081 // result: (Const32 [0]) 4082 for { 4083 v_0 := v.Args[0] 4084 if v_0.Op != OpConst32 { 4085 break 4086 } 4087 if v_0.AuxInt != 0 { 4088 break 4089 } 4090 v.reset(OpConst32) 4091 v.AuxInt = 0 4092 return true 4093 } 4094 return false 4095 } 4096 func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool { 4097 b := v.Block 4098 _ = b 4099 // match: (Lsh32x32 <t> x (Const32 [c])) 4100 // cond: 4101 // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))])) 4102 for { 4103 t := v.Type 4104 x := v.Args[0] 4105 v_1 := v.Args[1] 4106 if v_1.Op != OpConst32 { 4107 break 4108 } 4109 c := v_1.AuxInt 4110 v.reset(OpLsh32x64) 4111 v.AddArg(x) 4112 v0 := b.NewValue0(v.Line, OpConst64, t) 4113 v0.AuxInt = int64(uint32(c)) 4114 v.AddArg(v0) 4115 return true 4116 } 4117 // match: (Lsh32x32 (Const32 [0]) _) 4118 // cond: 4119 // result: (Const32 [0]) 4120 for { 4121 v_0 := v.Args[0] 4122 if v_0.Op != OpConst32 { 4123 break 4124 } 4125 if v_0.AuxInt != 0 { 4126 break 4127 } 4128 v.reset(OpConst32) 4129 v.AuxInt = 0 4130 return true 4131 } 4132 return false 4133 } 4134 func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { 4135 b := v.Block 4136 _ = b 4137 // match: (Lsh32x64 (Const32 [c]) (Const64 [d])) 4138 // cond: 4139 // result: (Const32 [int64(int32(c) << uint64(d))]) 4140 for { 4141 v_0 := v.Args[0] 4142 if v_0.Op != OpConst32 { 4143 break 4144 } 4145 c := v_0.AuxInt 4146 v_1 := v.Args[1] 4147 if v_1.Op != OpConst64 { 4148 break 4149 } 4150 d := v_1.AuxInt 4151 v.reset(OpConst32) 4152 v.AuxInt = int64(int32(c) << uint64(d)) 4153 return true 4154 } 4155 // match: (Lsh32x64 x (Const64 [0])) 4156 // cond: 4157 // result: x 4158 for { 4159 x := v.Args[0] 4160 v_1 := v.Args[1] 4161 if v_1.Op != OpConst64 { 4162 break 4163 } 4164 if v_1.AuxInt != 0 { 4165 break 4166 } 4167 v.reset(OpCopy) 4168 v.Type = x.Type 4169 v.AddArg(x) 4170 return true 4171 } 4172 // match: (Lsh32x64 (Const32 [0]) _) 4173 // cond: 4174 // result: (Const32 [0]) 4175 for { 4176 v_0 := v.Args[0] 4177 if v_0.Op != OpConst32 { 4178 break 4179 } 4180 if v_0.AuxInt != 0 { 4181 break 4182 } 4183 v.reset(OpConst32) 4184 v.AuxInt = 0 4185 return true 4186 } 4187 // match: (Lsh32x64 _ (Const64 [c])) 4188 // cond: uint64(c) >= 32 4189 // result: (Const32 [0]) 4190 for { 4191 v_1 := v.Args[1] 4192 if v_1.Op != OpConst64 { 4193 break 4194 } 4195 c := v_1.AuxInt 4196 if !(uint64(c) >= 32) { 4197 break 4198 } 4199 v.reset(OpConst32) 4200 v.AuxInt = 0 4201 return true 4202 } 4203 // match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d])) 4204 // cond: !uaddOvf(c,d) 4205 // result: (Lsh32x64 x (Const64 <t> [c+d])) 4206 for { 4207 t := v.Type 4208 v_0 := v.Args[0] 4209 if v_0.Op != OpLsh32x64 { 4210 break 4211 } 4212 x := v_0.Args[0] 4213 v_0_1 := v_0.Args[1] 4214 if v_0_1.Op != OpConst64 { 4215 break 4216 } 4217 c := v_0_1.AuxInt 4218 v_1 := v.Args[1] 4219 if v_1.Op != OpConst64 { 4220 break 4221 } 4222 d := v_1.AuxInt 4223 if !(!uaddOvf(c, d)) { 4224 break 4225 } 4226 v.reset(OpLsh32x64) 4227 v.AddArg(x) 4228 v0 := b.NewValue0(v.Line, OpConst64, t) 4229 v0.AuxInt = c + d 4230 v.AddArg(v0) 4231 return true 4232 } 4233 // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4234 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4235 // result: (Lsh32x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4236 for { 4237 v_0 := v.Args[0] 4238 if v_0.Op != OpRsh32Ux64 { 4239 break 4240 } 4241 v_0_0 := v_0.Args[0] 4242 if v_0_0.Op != OpLsh32x64 { 4243 break 4244 } 4245 x := v_0_0.Args[0] 4246 v_0_0_1 := v_0_0.Args[1] 4247 if v_0_0_1.Op != OpConst64 { 4248 break 4249 } 4250 c1 := v_0_0_1.AuxInt 4251 v_0_1 := v_0.Args[1] 4252 if v_0_1.Op != OpConst64 { 4253 break 4254 } 4255 c2 := v_0_1.AuxInt 4256 v_1 := v.Args[1] 4257 if v_1.Op != OpConst64 { 4258 break 4259 } 4260 c3 := v_1.AuxInt 4261 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4262 break 4263 } 4264 v.reset(OpLsh32x64) 4265 v.AddArg(x) 4266 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4267 v0.AuxInt = c1 - c2 + c3 4268 v.AddArg(v0) 4269 return true 4270 } 4271 return false 4272 } 4273 func rewriteValuegeneric_OpLsh32x8(v *Value, config *Config) bool { 4274 b := v.Block 4275 _ = b 4276 // match: (Lsh32x8 <t> x (Const8 [c])) 4277 // cond: 4278 // result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))])) 4279 for { 4280 t := v.Type 4281 x := v.Args[0] 4282 v_1 := v.Args[1] 4283 if v_1.Op != OpConst8 { 4284 break 4285 } 4286 c := v_1.AuxInt 4287 v.reset(OpLsh32x64) 4288 v.AddArg(x) 4289 v0 := b.NewValue0(v.Line, OpConst64, t) 4290 v0.AuxInt = int64(uint8(c)) 4291 v.AddArg(v0) 4292 return true 4293 } 4294 // match: (Lsh32x8 (Const32 [0]) _) 4295 // cond: 4296 // result: (Const32 [0]) 4297 for { 4298 v_0 := v.Args[0] 4299 if v_0.Op != OpConst32 { 4300 break 4301 } 4302 if v_0.AuxInt != 0 { 4303 break 4304 } 4305 v.reset(OpConst32) 4306 v.AuxInt = 0 4307 return true 4308 } 4309 return false 4310 } 4311 func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool { 4312 b := v.Block 4313 _ = b 4314 // match: (Lsh64x16 <t> x (Const16 [c])) 4315 // cond: 4316 // result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))])) 4317 for { 4318 t := v.Type 4319 x := v.Args[0] 4320 v_1 := v.Args[1] 4321 if v_1.Op != OpConst16 { 4322 break 4323 } 4324 c := v_1.AuxInt 4325 v.reset(OpLsh64x64) 4326 v.AddArg(x) 4327 v0 := b.NewValue0(v.Line, OpConst64, t) 4328 v0.AuxInt = int64(uint16(c)) 4329 v.AddArg(v0) 4330 return true 4331 } 4332 // match: (Lsh64x16 (Const64 [0]) _) 4333 // cond: 4334 // result: (Const64 [0]) 4335 for { 4336 v_0 := v.Args[0] 4337 if v_0.Op != OpConst64 { 4338 break 4339 } 4340 if v_0.AuxInt != 0 { 4341 break 4342 } 4343 v.reset(OpConst64) 4344 v.AuxInt = 0 4345 return true 4346 } 4347 return false 4348 } 4349 func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool { 4350 b := v.Block 4351 _ = b 4352 // match: (Lsh64x32 <t> x (Const32 [c])) 4353 // cond: 4354 // result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))])) 4355 for { 4356 t := v.Type 4357 x := v.Args[0] 4358 v_1 := v.Args[1] 4359 if v_1.Op != OpConst32 { 4360 break 4361 } 4362 c := v_1.AuxInt 4363 v.reset(OpLsh64x64) 4364 v.AddArg(x) 4365 v0 := b.NewValue0(v.Line, OpConst64, t) 4366 v0.AuxInt = int64(uint32(c)) 4367 v.AddArg(v0) 4368 return true 4369 } 4370 // match: (Lsh64x32 (Const64 [0]) _) 4371 // cond: 4372 // result: (Const64 [0]) 4373 for { 4374 v_0 := v.Args[0] 4375 if v_0.Op != OpConst64 { 4376 break 4377 } 4378 if v_0.AuxInt != 0 { 4379 break 4380 } 4381 v.reset(OpConst64) 4382 v.AuxInt = 0 4383 return true 4384 } 4385 return false 4386 } 4387 func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool { 4388 b := v.Block 4389 _ = b 4390 // match: (Lsh64x64 (Const64 [c]) (Const64 [d])) 4391 // cond: 4392 // result: (Const64 [c << uint64(d)]) 4393 for { 4394 v_0 := v.Args[0] 4395 if v_0.Op != OpConst64 { 4396 break 4397 } 4398 c := v_0.AuxInt 4399 v_1 := v.Args[1] 4400 if v_1.Op != OpConst64 { 4401 break 4402 } 4403 d := v_1.AuxInt 4404 v.reset(OpConst64) 4405 v.AuxInt = c << uint64(d) 4406 return true 4407 } 4408 // match: (Lsh64x64 x (Const64 [0])) 4409 // cond: 4410 // result: x 4411 for { 4412 x := v.Args[0] 4413 v_1 := v.Args[1] 4414 if v_1.Op != OpConst64 { 4415 break 4416 } 4417 if v_1.AuxInt != 0 { 4418 break 4419 } 4420 v.reset(OpCopy) 4421 v.Type = x.Type 4422 v.AddArg(x) 4423 return true 4424 } 4425 // match: (Lsh64x64 (Const64 [0]) _) 4426 // cond: 4427 // result: (Const64 [0]) 4428 for { 4429 v_0 := v.Args[0] 4430 if v_0.Op != OpConst64 { 4431 break 4432 } 4433 if v_0.AuxInt != 0 { 4434 break 4435 } 4436 v.reset(OpConst64) 4437 v.AuxInt = 0 4438 return true 4439 } 4440 // match: (Lsh64x64 _ (Const64 [c])) 4441 // cond: uint64(c) >= 64 4442 // result: (Const64 [0]) 4443 for { 4444 v_1 := v.Args[1] 4445 if v_1.Op != OpConst64 { 4446 break 4447 } 4448 c := v_1.AuxInt 4449 if !(uint64(c) >= 64) { 4450 break 4451 } 4452 v.reset(OpConst64) 4453 v.AuxInt = 0 4454 return true 4455 } 4456 // match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d])) 4457 // cond: !uaddOvf(c,d) 4458 // result: (Lsh64x64 x (Const64 <t> [c+d])) 4459 for { 4460 t := v.Type 4461 v_0 := v.Args[0] 4462 if v_0.Op != OpLsh64x64 { 4463 break 4464 } 4465 x := v_0.Args[0] 4466 v_0_1 := v_0.Args[1] 4467 if v_0_1.Op != OpConst64 { 4468 break 4469 } 4470 c := v_0_1.AuxInt 4471 v_1 := v.Args[1] 4472 if v_1.Op != OpConst64 { 4473 break 4474 } 4475 d := v_1.AuxInt 4476 if !(!uaddOvf(c, d)) { 4477 break 4478 } 4479 v.reset(OpLsh64x64) 4480 v.AddArg(x) 4481 v0 := b.NewValue0(v.Line, OpConst64, t) 4482 v0.AuxInt = c + d 4483 v.AddArg(v0) 4484 return true 4485 } 4486 // match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4487 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4488 // result: (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4489 for { 4490 v_0 := v.Args[0] 4491 if v_0.Op != OpRsh64Ux64 { 4492 break 4493 } 4494 v_0_0 := v_0.Args[0] 4495 if v_0_0.Op != OpLsh64x64 { 4496 break 4497 } 4498 x := v_0_0.Args[0] 4499 v_0_0_1 := v_0_0.Args[1] 4500 if v_0_0_1.Op != OpConst64 { 4501 break 4502 } 4503 c1 := v_0_0_1.AuxInt 4504 v_0_1 := v_0.Args[1] 4505 if v_0_1.Op != OpConst64 { 4506 break 4507 } 4508 c2 := v_0_1.AuxInt 4509 v_1 := v.Args[1] 4510 if v_1.Op != OpConst64 { 4511 break 4512 } 4513 c3 := v_1.AuxInt 4514 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4515 break 4516 } 4517 v.reset(OpLsh64x64) 4518 v.AddArg(x) 4519 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4520 v0.AuxInt = c1 - c2 + c3 4521 v.AddArg(v0) 4522 return true 4523 } 4524 return false 4525 } 4526 func rewriteValuegeneric_OpLsh64x8(v *Value, config *Config) bool { 4527 b := v.Block 4528 _ = b 4529 // match: (Lsh64x8 <t> x (Const8 [c])) 4530 // cond: 4531 // result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))])) 4532 for { 4533 t := v.Type 4534 x := v.Args[0] 4535 v_1 := v.Args[1] 4536 if v_1.Op != OpConst8 { 4537 break 4538 } 4539 c := v_1.AuxInt 4540 v.reset(OpLsh64x64) 4541 v.AddArg(x) 4542 v0 := b.NewValue0(v.Line, OpConst64, t) 4543 v0.AuxInt = int64(uint8(c)) 4544 v.AddArg(v0) 4545 return true 4546 } 4547 // match: (Lsh64x8 (Const64 [0]) _) 4548 // cond: 4549 // result: (Const64 [0]) 4550 for { 4551 v_0 := v.Args[0] 4552 if v_0.Op != OpConst64 { 4553 break 4554 } 4555 if v_0.AuxInt != 0 { 4556 break 4557 } 4558 v.reset(OpConst64) 4559 v.AuxInt = 0 4560 return true 4561 } 4562 return false 4563 } 4564 func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool { 4565 b := v.Block 4566 _ = b 4567 // match: (Lsh8x16 <t> x (Const16 [c])) 4568 // cond: 4569 // result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))])) 4570 for { 4571 t := v.Type 4572 x := v.Args[0] 4573 v_1 := v.Args[1] 4574 if v_1.Op != OpConst16 { 4575 break 4576 } 4577 c := v_1.AuxInt 4578 v.reset(OpLsh8x64) 4579 v.AddArg(x) 4580 v0 := b.NewValue0(v.Line, OpConst64, t) 4581 v0.AuxInt = int64(uint16(c)) 4582 v.AddArg(v0) 4583 return true 4584 } 4585 // match: (Lsh8x16 (Const8 [0]) _) 4586 // cond: 4587 // result: (Const8 [0]) 4588 for { 4589 v_0 := v.Args[0] 4590 if v_0.Op != OpConst8 { 4591 break 4592 } 4593 if v_0.AuxInt != 0 { 4594 break 4595 } 4596 v.reset(OpConst8) 4597 v.AuxInt = 0 4598 return true 4599 } 4600 return false 4601 } 4602 func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool { 4603 b := v.Block 4604 _ = b 4605 // match: (Lsh8x32 <t> x (Const32 [c])) 4606 // cond: 4607 // result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))])) 4608 for { 4609 t := v.Type 4610 x := v.Args[0] 4611 v_1 := v.Args[1] 4612 if v_1.Op != OpConst32 { 4613 break 4614 } 4615 c := v_1.AuxInt 4616 v.reset(OpLsh8x64) 4617 v.AddArg(x) 4618 v0 := b.NewValue0(v.Line, OpConst64, t) 4619 v0.AuxInt = int64(uint32(c)) 4620 v.AddArg(v0) 4621 return true 4622 } 4623 // match: (Lsh8x32 (Const8 [0]) _) 4624 // cond: 4625 // result: (Const8 [0]) 4626 for { 4627 v_0 := v.Args[0] 4628 if v_0.Op != OpConst8 { 4629 break 4630 } 4631 if v_0.AuxInt != 0 { 4632 break 4633 } 4634 v.reset(OpConst8) 4635 v.AuxInt = 0 4636 return true 4637 } 4638 return false 4639 } 4640 func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { 4641 b := v.Block 4642 _ = b 4643 // match: (Lsh8x64 (Const8 [c]) (Const64 [d])) 4644 // cond: 4645 // result: (Const8 [int64(int8(c) << uint64(d))]) 4646 for { 4647 v_0 := v.Args[0] 4648 if v_0.Op != OpConst8 { 4649 break 4650 } 4651 c := v_0.AuxInt 4652 v_1 := v.Args[1] 4653 if v_1.Op != OpConst64 { 4654 break 4655 } 4656 d := v_1.AuxInt 4657 v.reset(OpConst8) 4658 v.AuxInt = int64(int8(c) << uint64(d)) 4659 return true 4660 } 4661 // match: (Lsh8x64 x (Const64 [0])) 4662 // cond: 4663 // result: x 4664 for { 4665 x := v.Args[0] 4666 v_1 := v.Args[1] 4667 if v_1.Op != OpConst64 { 4668 break 4669 } 4670 if v_1.AuxInt != 0 { 4671 break 4672 } 4673 v.reset(OpCopy) 4674 v.Type = x.Type 4675 v.AddArg(x) 4676 return true 4677 } 4678 // match: (Lsh8x64 (Const8 [0]) _) 4679 // cond: 4680 // result: (Const8 [0]) 4681 for { 4682 v_0 := v.Args[0] 4683 if v_0.Op != OpConst8 { 4684 break 4685 } 4686 if v_0.AuxInt != 0 { 4687 break 4688 } 4689 v.reset(OpConst8) 4690 v.AuxInt = 0 4691 return true 4692 } 4693 // match: (Lsh8x64 _ (Const64 [c])) 4694 // cond: uint64(c) >= 8 4695 // result: (Const8 [0]) 4696 for { 4697 v_1 := v.Args[1] 4698 if v_1.Op != OpConst64 { 4699 break 4700 } 4701 c := v_1.AuxInt 4702 if !(uint64(c) >= 8) { 4703 break 4704 } 4705 v.reset(OpConst8) 4706 v.AuxInt = 0 4707 return true 4708 } 4709 // match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d])) 4710 // cond: !uaddOvf(c,d) 4711 // result: (Lsh8x64 x (Const64 <t> [c+d])) 4712 for { 4713 t := v.Type 4714 v_0 := v.Args[0] 4715 if v_0.Op != OpLsh8x64 { 4716 break 4717 } 4718 x := v_0.Args[0] 4719 v_0_1 := v_0.Args[1] 4720 if v_0_1.Op != OpConst64 { 4721 break 4722 } 4723 c := v_0_1.AuxInt 4724 v_1 := v.Args[1] 4725 if v_1.Op != OpConst64 { 4726 break 4727 } 4728 d := v_1.AuxInt 4729 if !(!uaddOvf(c, d)) { 4730 break 4731 } 4732 v.reset(OpLsh8x64) 4733 v.AddArg(x) 4734 v0 := b.NewValue0(v.Line, OpConst64, t) 4735 v0.AuxInt = c + d 4736 v.AddArg(v0) 4737 return true 4738 } 4739 // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4740 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4741 // result: (Lsh8x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4742 for { 4743 v_0 := v.Args[0] 4744 if v_0.Op != OpRsh8Ux64 { 4745 break 4746 } 4747 v_0_0 := v_0.Args[0] 4748 if v_0_0.Op != OpLsh8x64 { 4749 break 4750 } 4751 x := v_0_0.Args[0] 4752 v_0_0_1 := v_0_0.Args[1] 4753 if v_0_0_1.Op != OpConst64 { 4754 break 4755 } 4756 c1 := v_0_0_1.AuxInt 4757 v_0_1 := v_0.Args[1] 4758 if v_0_1.Op != OpConst64 { 4759 break 4760 } 4761 c2 := v_0_1.AuxInt 4762 v_1 := v.Args[1] 4763 if v_1.Op != OpConst64 { 4764 break 4765 } 4766 c3 := v_1.AuxInt 4767 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4768 break 4769 } 4770 v.reset(OpLsh8x64) 4771 v.AddArg(x) 4772 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4773 v0.AuxInt = c1 - c2 + c3 4774 v.AddArg(v0) 4775 return true 4776 } 4777 return false 4778 } 4779 func rewriteValuegeneric_OpLsh8x8(v *Value, config *Config) bool { 4780 b := v.Block 4781 _ = b 4782 // match: (Lsh8x8 <t> x (Const8 [c])) 4783 // cond: 4784 // result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))])) 4785 for { 4786 t := v.Type 4787 x := v.Args[0] 4788 v_1 := v.Args[1] 4789 if v_1.Op != OpConst8 { 4790 break 4791 } 4792 c := v_1.AuxInt 4793 v.reset(OpLsh8x64) 4794 v.AddArg(x) 4795 v0 := b.NewValue0(v.Line, OpConst64, t) 4796 v0.AuxInt = int64(uint8(c)) 4797 v.AddArg(v0) 4798 return true 4799 } 4800 // match: (Lsh8x8 (Const8 [0]) _) 4801 // cond: 4802 // result: (Const8 [0]) 4803 for { 4804 v_0 := v.Args[0] 4805 if v_0.Op != OpConst8 { 4806 break 4807 } 4808 if v_0.AuxInt != 0 { 4809 break 4810 } 4811 v.reset(OpConst8) 4812 v.AuxInt = 0 4813 return true 4814 } 4815 return false 4816 } 4817 func rewriteValuegeneric_OpMod16(v *Value, config *Config) bool { 4818 b := v.Block 4819 _ = b 4820 // match: (Mod16 (Const16 [c]) (Const16 [d])) 4821 // cond: d != 0 4822 // result: (Const16 [int64(int16(c % d))]) 4823 for { 4824 v_0 := v.Args[0] 4825 if v_0.Op != OpConst16 { 4826 break 4827 } 4828 c := v_0.AuxInt 4829 v_1 := v.Args[1] 4830 if v_1.Op != OpConst16 { 4831 break 4832 } 4833 d := v_1.AuxInt 4834 if !(d != 0) { 4835 break 4836 } 4837 v.reset(OpConst16) 4838 v.AuxInt = int64(int16(c % d)) 4839 return true 4840 } 4841 return false 4842 } 4843 func rewriteValuegeneric_OpMod16u(v *Value, config *Config) bool { 4844 b := v.Block 4845 _ = b 4846 // match: (Mod16u (Const16 [c]) (Const16 [d])) 4847 // cond: d != 0 4848 // result: (Const16 [int64(uint16(c) % uint16(d))]) 4849 for { 4850 v_0 := v.Args[0] 4851 if v_0.Op != OpConst16 { 4852 break 4853 } 4854 c := v_0.AuxInt 4855 v_1 := v.Args[1] 4856 if v_1.Op != OpConst16 { 4857 break 4858 } 4859 d := v_1.AuxInt 4860 if !(d != 0) { 4861 break 4862 } 4863 v.reset(OpConst16) 4864 v.AuxInt = int64(uint16(c) % uint16(d)) 4865 return true 4866 } 4867 return false 4868 } 4869 func rewriteValuegeneric_OpMod32(v *Value, config *Config) bool { 4870 b := v.Block 4871 _ = b 4872 // match: (Mod32 (Const32 [c]) (Const32 [d])) 4873 // cond: d != 0 4874 // result: (Const32 [int64(int32(c % d))]) 4875 for { 4876 v_0 := v.Args[0] 4877 if v_0.Op != OpConst32 { 4878 break 4879 } 4880 c := v_0.AuxInt 4881 v_1 := v.Args[1] 4882 if v_1.Op != OpConst32 { 4883 break 4884 } 4885 d := v_1.AuxInt 4886 if !(d != 0) { 4887 break 4888 } 4889 v.reset(OpConst32) 4890 v.AuxInt = int64(int32(c % d)) 4891 return true 4892 } 4893 return false 4894 } 4895 func rewriteValuegeneric_OpMod32u(v *Value, config *Config) bool { 4896 b := v.Block 4897 _ = b 4898 // match: (Mod32u (Const32 [c]) (Const32 [d])) 4899 // cond: d != 0 4900 // result: (Const32 [int64(uint32(c) % uint32(d))]) 4901 for { 4902 v_0 := v.Args[0] 4903 if v_0.Op != OpConst32 { 4904 break 4905 } 4906 c := v_0.AuxInt 4907 v_1 := v.Args[1] 4908 if v_1.Op != OpConst32 { 4909 break 4910 } 4911 d := v_1.AuxInt 4912 if !(d != 0) { 4913 break 4914 } 4915 v.reset(OpConst32) 4916 v.AuxInt = int64(uint32(c) % uint32(d)) 4917 return true 4918 } 4919 return false 4920 } 4921 func rewriteValuegeneric_OpMod64(v *Value, config *Config) bool { 4922 b := v.Block 4923 _ = b 4924 // match: (Mod64 (Const64 [c]) (Const64 [d])) 4925 // cond: d != 0 4926 // result: (Const64 [c % d]) 4927 for { 4928 v_0 := v.Args[0] 4929 if v_0.Op != OpConst64 { 4930 break 4931 } 4932 c := v_0.AuxInt 4933 v_1 := v.Args[1] 4934 if v_1.Op != OpConst64 { 4935 break 4936 } 4937 d := v_1.AuxInt 4938 if !(d != 0) { 4939 break 4940 } 4941 v.reset(OpConst64) 4942 v.AuxInt = c % d 4943 return true 4944 } 4945 // match: (Mod64 <t> x (Const64 [c])) 4946 // cond: x.Op != OpConst64 && smagic64ok(c) 4947 // result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 4948 for { 4949 t := v.Type 4950 x := v.Args[0] 4951 v_1 := v.Args[1] 4952 if v_1.Op != OpConst64 { 4953 break 4954 } 4955 c := v_1.AuxInt 4956 if !(x.Op != OpConst64 && smagic64ok(c)) { 4957 break 4958 } 4959 v.reset(OpSub64) 4960 v.AddArg(x) 4961 v0 := b.NewValue0(v.Line, OpMul64, t) 4962 v1 := b.NewValue0(v.Line, OpDiv64, t) 4963 v1.AddArg(x) 4964 v2 := b.NewValue0(v.Line, OpConst64, t) 4965 v2.AuxInt = c 4966 v1.AddArg(v2) 4967 v0.AddArg(v1) 4968 v3 := b.NewValue0(v.Line, OpConst64, t) 4969 v3.AuxInt = c 4970 v0.AddArg(v3) 4971 v.AddArg(v0) 4972 return true 4973 } 4974 return false 4975 } 4976 func rewriteValuegeneric_OpMod64u(v *Value, config *Config) bool { 4977 b := v.Block 4978 _ = b 4979 // match: (Mod64u (Const64 [c]) (Const64 [d])) 4980 // cond: d != 0 4981 // result: (Const64 [int64(uint64(c) % uint64(d))]) 4982 for { 4983 v_0 := v.Args[0] 4984 if v_0.Op != OpConst64 { 4985 break 4986 } 4987 c := v_0.AuxInt 4988 v_1 := v.Args[1] 4989 if v_1.Op != OpConst64 { 4990 break 4991 } 4992 d := v_1.AuxInt 4993 if !(d != 0) { 4994 break 4995 } 4996 v.reset(OpConst64) 4997 v.AuxInt = int64(uint64(c) % uint64(d)) 4998 return true 4999 } 5000 // match: (Mod64u <t> n (Const64 [c])) 5001 // cond: isPowerOfTwo(c) 5002 // result: (And64 n (Const64 <t> [c-1])) 5003 for { 5004 t := v.Type 5005 n := v.Args[0] 5006 v_1 := v.Args[1] 5007 if v_1.Op != OpConst64 { 5008 break 5009 } 5010 c := v_1.AuxInt 5011 if !(isPowerOfTwo(c)) { 5012 break 5013 } 5014 v.reset(OpAnd64) 5015 v.AddArg(n) 5016 v0 := b.NewValue0(v.Line, OpConst64, t) 5017 v0.AuxInt = c - 1 5018 v.AddArg(v0) 5019 return true 5020 } 5021 // match: (Mod64u <t> x (Const64 [c])) 5022 // cond: x.Op != OpConst64 && umagic64ok(c) 5023 // result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 5024 for { 5025 t := v.Type 5026 x := v.Args[0] 5027 v_1 := v.Args[1] 5028 if v_1.Op != OpConst64 { 5029 break 5030 } 5031 c := v_1.AuxInt 5032 if !(x.Op != OpConst64 && umagic64ok(c)) { 5033 break 5034 } 5035 v.reset(OpSub64) 5036 v.AddArg(x) 5037 v0 := b.NewValue0(v.Line, OpMul64, t) 5038 v1 := b.NewValue0(v.Line, OpDiv64u, t) 5039 v1.AddArg(x) 5040 v2 := b.NewValue0(v.Line, OpConst64, t) 5041 v2.AuxInt = c 5042 v1.AddArg(v2) 5043 v0.AddArg(v1) 5044 v3 := b.NewValue0(v.Line, OpConst64, t) 5045 v3.AuxInt = c 5046 v0.AddArg(v3) 5047 v.AddArg(v0) 5048 return true 5049 } 5050 return false 5051 } 5052 func rewriteValuegeneric_OpMod8(v *Value, config *Config) bool { 5053 b := v.Block 5054 _ = b 5055 // match: (Mod8 (Const8 [c]) (Const8 [d])) 5056 // cond: d != 0 5057 // result: (Const8 [int64(int8(c % d))]) 5058 for { 5059 v_0 := v.Args[0] 5060 if v_0.Op != OpConst8 { 5061 break 5062 } 5063 c := v_0.AuxInt 5064 v_1 := v.Args[1] 5065 if v_1.Op != OpConst8 { 5066 break 5067 } 5068 d := v_1.AuxInt 5069 if !(d != 0) { 5070 break 5071 } 5072 v.reset(OpConst8) 5073 v.AuxInt = int64(int8(c % d)) 5074 return true 5075 } 5076 return false 5077 } 5078 func rewriteValuegeneric_OpMod8u(v *Value, config *Config) bool { 5079 b := v.Block 5080 _ = b 5081 // match: (Mod8u (Const8 [c]) (Const8 [d])) 5082 // cond: d != 0 5083 // result: (Const8 [int64(uint8(c) % uint8(d))]) 5084 for { 5085 v_0 := v.Args[0] 5086 if v_0.Op != OpConst8 { 5087 break 5088 } 5089 c := v_0.AuxInt 5090 v_1 := v.Args[1] 5091 if v_1.Op != OpConst8 { 5092 break 5093 } 5094 d := v_1.AuxInt 5095 if !(d != 0) { 5096 break 5097 } 5098 v.reset(OpConst8) 5099 v.AuxInt = int64(uint8(c) % uint8(d)) 5100 return true 5101 } 5102 return false 5103 } 5104 func rewriteValuegeneric_OpMul16(v *Value, config *Config) bool { 5105 b := v.Block 5106 _ = b 5107 // match: (Mul16 (Const16 [c]) (Const16 [d])) 5108 // cond: 5109 // result: (Const16 [int64(int16(c*d))]) 5110 for { 5111 v_0 := v.Args[0] 5112 if v_0.Op != OpConst16 { 5113 break 5114 } 5115 c := v_0.AuxInt 5116 v_1 := v.Args[1] 5117 if v_1.Op != OpConst16 { 5118 break 5119 } 5120 d := v_1.AuxInt 5121 v.reset(OpConst16) 5122 v.AuxInt = int64(int16(c * d)) 5123 return true 5124 } 5125 // match: (Mul16 x (Const16 <t> [c])) 5126 // cond: x.Op != OpConst16 5127 // result: (Mul16 (Const16 <t> [c]) x) 5128 for { 5129 x := v.Args[0] 5130 v_1 := v.Args[1] 5131 if v_1.Op != OpConst16 { 5132 break 5133 } 5134 t := v_1.Type 5135 c := v_1.AuxInt 5136 if !(x.Op != OpConst16) { 5137 break 5138 } 5139 v.reset(OpMul16) 5140 v0 := b.NewValue0(v.Line, OpConst16, t) 5141 v0.AuxInt = c 5142 v.AddArg(v0) 5143 v.AddArg(x) 5144 return true 5145 } 5146 // match: (Mul16 (Const16 [0]) _) 5147 // cond: 5148 // result: (Const16 [0]) 5149 for { 5150 v_0 := v.Args[0] 5151 if v_0.Op != OpConst16 { 5152 break 5153 } 5154 if v_0.AuxInt != 0 { 5155 break 5156 } 5157 v.reset(OpConst16) 5158 v.AuxInt = 0 5159 return true 5160 } 5161 return false 5162 } 5163 func rewriteValuegeneric_OpMul32(v *Value, config *Config) bool { 5164 b := v.Block 5165 _ = b 5166 // match: (Mul32 (Const32 [c]) (Const32 [d])) 5167 // cond: 5168 // result: (Const32 [int64(int32(c*d))]) 5169 for { 5170 v_0 := v.Args[0] 5171 if v_0.Op != OpConst32 { 5172 break 5173 } 5174 c := v_0.AuxInt 5175 v_1 := v.Args[1] 5176 if v_1.Op != OpConst32 { 5177 break 5178 } 5179 d := v_1.AuxInt 5180 v.reset(OpConst32) 5181 v.AuxInt = int64(int32(c * d)) 5182 return true 5183 } 5184 // match: (Mul32 x (Const32 <t> [c])) 5185 // cond: x.Op != OpConst32 5186 // result: (Mul32 (Const32 <t> [c]) x) 5187 for { 5188 x := v.Args[0] 5189 v_1 := v.Args[1] 5190 if v_1.Op != OpConst32 { 5191 break 5192 } 5193 t := v_1.Type 5194 c := v_1.AuxInt 5195 if !(x.Op != OpConst32) { 5196 break 5197 } 5198 v.reset(OpMul32) 5199 v0 := b.NewValue0(v.Line, OpConst32, t) 5200 v0.AuxInt = c 5201 v.AddArg(v0) 5202 v.AddArg(x) 5203 return true 5204 } 5205 // match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x)) 5206 // cond: 5207 // result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x)) 5208 for { 5209 v_0 := v.Args[0] 5210 if v_0.Op != OpConst32 { 5211 break 5212 } 5213 t := v_0.Type 5214 c := v_0.AuxInt 5215 v_1 := v.Args[1] 5216 if v_1.Op != OpAdd32 { 5217 break 5218 } 5219 if v_1.Type != t { 5220 break 5221 } 5222 v_1_0 := v_1.Args[0] 5223 if v_1_0.Op != OpConst32 { 5224 break 5225 } 5226 if v_1_0.Type != t { 5227 break 5228 } 5229 d := v_1_0.AuxInt 5230 x := v_1.Args[1] 5231 v.reset(OpAdd32) 5232 v0 := b.NewValue0(v.Line, OpConst32, t) 5233 v0.AuxInt = int64(int32(c * d)) 5234 v.AddArg(v0) 5235 v1 := b.NewValue0(v.Line, OpMul32, t) 5236 v2 := b.NewValue0(v.Line, OpConst32, t) 5237 v2.AuxInt = c 5238 v1.AddArg(v2) 5239 v1.AddArg(x) 5240 v.AddArg(v1) 5241 return true 5242 } 5243 // match: (Mul32 (Const32 [0]) _) 5244 // cond: 5245 // result: (Const32 [0]) 5246 for { 5247 v_0 := v.Args[0] 5248 if v_0.Op != OpConst32 { 5249 break 5250 } 5251 if v_0.AuxInt != 0 { 5252 break 5253 } 5254 v.reset(OpConst32) 5255 v.AuxInt = 0 5256 return true 5257 } 5258 return false 5259 } 5260 func rewriteValuegeneric_OpMul32F(v *Value, config *Config) bool { 5261 b := v.Block 5262 _ = b 5263 // match: (Mul32F (Const32F [c]) (Const32F [d])) 5264 // cond: 5265 // result: (Const32F [f2i(float64(i2f32(c) * i2f32(d)))]) 5266 for { 5267 v_0 := v.Args[0] 5268 if v_0.Op != OpConst32F { 5269 break 5270 } 5271 c := v_0.AuxInt 5272 v_1 := v.Args[1] 5273 if v_1.Op != OpConst32F { 5274 break 5275 } 5276 d := v_1.AuxInt 5277 v.reset(OpConst32F) 5278 v.AuxInt = f2i(float64(i2f32(c) * i2f32(d))) 5279 return true 5280 } 5281 return false 5282 } 5283 func rewriteValuegeneric_OpMul64(v *Value, config *Config) bool { 5284 b := v.Block 5285 _ = b 5286 // match: (Mul64 (Const64 [c]) (Const64 [d])) 5287 // cond: 5288 // result: (Const64 [c*d]) 5289 for { 5290 v_0 := v.Args[0] 5291 if v_0.Op != OpConst64 { 5292 break 5293 } 5294 c := v_0.AuxInt 5295 v_1 := v.Args[1] 5296 if v_1.Op != OpConst64 { 5297 break 5298 } 5299 d := v_1.AuxInt 5300 v.reset(OpConst64) 5301 v.AuxInt = c * d 5302 return true 5303 } 5304 // match: (Mul64 x (Const64 <t> [c])) 5305 // cond: x.Op != OpConst64 5306 // result: (Mul64 (Const64 <t> [c]) x) 5307 for { 5308 x := v.Args[0] 5309 v_1 := v.Args[1] 5310 if v_1.Op != OpConst64 { 5311 break 5312 } 5313 t := v_1.Type 5314 c := v_1.AuxInt 5315 if !(x.Op != OpConst64) { 5316 break 5317 } 5318 v.reset(OpMul64) 5319 v0 := b.NewValue0(v.Line, OpConst64, t) 5320 v0.AuxInt = c 5321 v.AddArg(v0) 5322 v.AddArg(x) 5323 return true 5324 } 5325 // match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) 5326 // cond: 5327 // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) 5328 for { 5329 v_0 := v.Args[0] 5330 if v_0.Op != OpConst64 { 5331 break 5332 } 5333 t := v_0.Type 5334 c := v_0.AuxInt 5335 v_1 := v.Args[1] 5336 if v_1.Op != OpAdd64 { 5337 break 5338 } 5339 if v_1.Type != t { 5340 break 5341 } 5342 v_1_0 := v_1.Args[0] 5343 if v_1_0.Op != OpConst64 { 5344 break 5345 } 5346 if v_1_0.Type != t { 5347 break 5348 } 5349 d := v_1_0.AuxInt 5350 x := v_1.Args[1] 5351 v.reset(OpAdd64) 5352 v0 := b.NewValue0(v.Line, OpConst64, t) 5353 v0.AuxInt = c * d 5354 v.AddArg(v0) 5355 v1 := b.NewValue0(v.Line, OpMul64, t) 5356 v2 := b.NewValue0(v.Line, OpConst64, t) 5357 v2.AuxInt = c 5358 v1.AddArg(v2) 5359 v1.AddArg(x) 5360 v.AddArg(v1) 5361 return true 5362 } 5363 // match: (Mul64 (Const64 [0]) _) 5364 // cond: 5365 // result: (Const64 [0]) 5366 for { 5367 v_0 := v.Args[0] 5368 if v_0.Op != OpConst64 { 5369 break 5370 } 5371 if v_0.AuxInt != 0 { 5372 break 5373 } 5374 v.reset(OpConst64) 5375 v.AuxInt = 0 5376 return true 5377 } 5378 return false 5379 } 5380 func rewriteValuegeneric_OpMul64F(v *Value, config *Config) bool { 5381 b := v.Block 5382 _ = b 5383 // match: (Mul64F (Const64F [c]) (Const64F [d])) 5384 // cond: 5385 // result: (Const64F [f2i(i2f(c) * i2f(d))]) 5386 for { 5387 v_0 := v.Args[0] 5388 if v_0.Op != OpConst64F { 5389 break 5390 } 5391 c := v_0.AuxInt 5392 v_1 := v.Args[1] 5393 if v_1.Op != OpConst64F { 5394 break 5395 } 5396 d := v_1.AuxInt 5397 v.reset(OpConst64F) 5398 v.AuxInt = f2i(i2f(c) * i2f(d)) 5399 return true 5400 } 5401 return false 5402 } 5403 func rewriteValuegeneric_OpMul8(v *Value, config *Config) bool { 5404 b := v.Block 5405 _ = b 5406 // match: (Mul8 (Const8 [c]) (Const8 [d])) 5407 // cond: 5408 // result: (Const8 [int64(int8(c*d))]) 5409 for { 5410 v_0 := v.Args[0] 5411 if v_0.Op != OpConst8 { 5412 break 5413 } 5414 c := v_0.AuxInt 5415 v_1 := v.Args[1] 5416 if v_1.Op != OpConst8 { 5417 break 5418 } 5419 d := v_1.AuxInt 5420 v.reset(OpConst8) 5421 v.AuxInt = int64(int8(c * d)) 5422 return true 5423 } 5424 // match: (Mul8 x (Const8 <t> [c])) 5425 // cond: x.Op != OpConst8 5426 // result: (Mul8 (Const8 <t> [c]) x) 5427 for { 5428 x := v.Args[0] 5429 v_1 := v.Args[1] 5430 if v_1.Op != OpConst8 { 5431 break 5432 } 5433 t := v_1.Type 5434 c := v_1.AuxInt 5435 if !(x.Op != OpConst8) { 5436 break 5437 } 5438 v.reset(OpMul8) 5439 v0 := b.NewValue0(v.Line, OpConst8, t) 5440 v0.AuxInt = c 5441 v.AddArg(v0) 5442 v.AddArg(x) 5443 return true 5444 } 5445 // match: (Mul8 (Const8 [0]) _) 5446 // cond: 5447 // result: (Const8 [0]) 5448 for { 5449 v_0 := v.Args[0] 5450 if v_0.Op != OpConst8 { 5451 break 5452 } 5453 if v_0.AuxInt != 0 { 5454 break 5455 } 5456 v.reset(OpConst8) 5457 v.AuxInt = 0 5458 return true 5459 } 5460 return false 5461 } 5462 func rewriteValuegeneric_OpNeg16(v *Value, config *Config) bool { 5463 b := v.Block 5464 _ = b 5465 // match: (Neg16 (Sub16 x y)) 5466 // cond: 5467 // result: (Sub16 y x) 5468 for { 5469 v_0 := v.Args[0] 5470 if v_0.Op != OpSub16 { 5471 break 5472 } 5473 x := v_0.Args[0] 5474 y := v_0.Args[1] 5475 v.reset(OpSub16) 5476 v.AddArg(y) 5477 v.AddArg(x) 5478 return true 5479 } 5480 return false 5481 } 5482 func rewriteValuegeneric_OpNeg32(v *Value, config *Config) bool { 5483 b := v.Block 5484 _ = b 5485 // match: (Neg32 (Sub32 x y)) 5486 // cond: 5487 // result: (Sub32 y x) 5488 for { 5489 v_0 := v.Args[0] 5490 if v_0.Op != OpSub32 { 5491 break 5492 } 5493 x := v_0.Args[0] 5494 y := v_0.Args[1] 5495 v.reset(OpSub32) 5496 v.AddArg(y) 5497 v.AddArg(x) 5498 return true 5499 } 5500 return false 5501 } 5502 func rewriteValuegeneric_OpNeg64(v *Value, config *Config) bool { 5503 b := v.Block 5504 _ = b 5505 // match: (Neg64 (Sub64 x y)) 5506 // cond: 5507 // result: (Sub64 y x) 5508 for { 5509 v_0 := v.Args[0] 5510 if v_0.Op != OpSub64 { 5511 break 5512 } 5513 x := v_0.Args[0] 5514 y := v_0.Args[1] 5515 v.reset(OpSub64) 5516 v.AddArg(y) 5517 v.AddArg(x) 5518 return true 5519 } 5520 return false 5521 } 5522 func rewriteValuegeneric_OpNeg8(v *Value, config *Config) bool { 5523 b := v.Block 5524 _ = b 5525 // match: (Neg8 (Sub8 x y)) 5526 // cond: 5527 // result: (Sub8 y x) 5528 for { 5529 v_0 := v.Args[0] 5530 if v_0.Op != OpSub8 { 5531 break 5532 } 5533 x := v_0.Args[0] 5534 y := v_0.Args[1] 5535 v.reset(OpSub8) 5536 v.AddArg(y) 5537 v.AddArg(x) 5538 return true 5539 } 5540 return false 5541 } 5542 func rewriteValuegeneric_OpNeq16(v *Value, config *Config) bool { 5543 b := v.Block 5544 _ = b 5545 // match: (Neq16 x x) 5546 // cond: 5547 // result: (ConstBool [0]) 5548 for { 5549 x := v.Args[0] 5550 if x != v.Args[1] { 5551 break 5552 } 5553 v.reset(OpConstBool) 5554 v.AuxInt = 0 5555 return true 5556 } 5557 // match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 5558 // cond: 5559 // result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x) 5560 for { 5561 v_0 := v.Args[0] 5562 if v_0.Op != OpConst16 { 5563 break 5564 } 5565 t := v_0.Type 5566 c := v_0.AuxInt 5567 v_1 := v.Args[1] 5568 if v_1.Op != OpAdd16 { 5569 break 5570 } 5571 v_1_0 := v_1.Args[0] 5572 if v_1_0.Op != OpConst16 { 5573 break 5574 } 5575 if v_1_0.Type != t { 5576 break 5577 } 5578 d := v_1_0.AuxInt 5579 x := v_1.Args[1] 5580 v.reset(OpNeq16) 5581 v0 := b.NewValue0(v.Line, OpConst16, t) 5582 v0.AuxInt = int64(int16(c - d)) 5583 v.AddArg(v0) 5584 v.AddArg(x) 5585 return true 5586 } 5587 // match: (Neq16 x (Const16 <t> [c])) 5588 // cond: x.Op != OpConst16 5589 // result: (Neq16 (Const16 <t> [c]) x) 5590 for { 5591 x := v.Args[0] 5592 v_1 := v.Args[1] 5593 if v_1.Op != OpConst16 { 5594 break 5595 } 5596 t := v_1.Type 5597 c := v_1.AuxInt 5598 if !(x.Op != OpConst16) { 5599 break 5600 } 5601 v.reset(OpNeq16) 5602 v0 := b.NewValue0(v.Line, OpConst16, t) 5603 v0.AuxInt = c 5604 v.AddArg(v0) 5605 v.AddArg(x) 5606 return true 5607 } 5608 // match: (Neq16 (Const16 [c]) (Const16 [d])) 5609 // cond: 5610 // result: (ConstBool [b2i(c != d)]) 5611 for { 5612 v_0 := v.Args[0] 5613 if v_0.Op != OpConst16 { 5614 break 5615 } 5616 c := v_0.AuxInt 5617 v_1 := v.Args[1] 5618 if v_1.Op != OpConst16 { 5619 break 5620 } 5621 d := v_1.AuxInt 5622 v.reset(OpConstBool) 5623 v.AuxInt = b2i(c != d) 5624 return true 5625 } 5626 return false 5627 } 5628 func rewriteValuegeneric_OpNeq32(v *Value, config *Config) bool { 5629 b := v.Block 5630 _ = b 5631 // match: (Neq32 x x) 5632 // cond: 5633 // result: (ConstBool [0]) 5634 for { 5635 x := v.Args[0] 5636 if x != v.Args[1] { 5637 break 5638 } 5639 v.reset(OpConstBool) 5640 v.AuxInt = 0 5641 return true 5642 } 5643 // match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 5644 // cond: 5645 // result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x) 5646 for { 5647 v_0 := v.Args[0] 5648 if v_0.Op != OpConst32 { 5649 break 5650 } 5651 t := v_0.Type 5652 c := v_0.AuxInt 5653 v_1 := v.Args[1] 5654 if v_1.Op != OpAdd32 { 5655 break 5656 } 5657 v_1_0 := v_1.Args[0] 5658 if v_1_0.Op != OpConst32 { 5659 break 5660 } 5661 if v_1_0.Type != t { 5662 break 5663 } 5664 d := v_1_0.AuxInt 5665 x := v_1.Args[1] 5666 v.reset(OpNeq32) 5667 v0 := b.NewValue0(v.Line, OpConst32, t) 5668 v0.AuxInt = int64(int32(c - d)) 5669 v.AddArg(v0) 5670 v.AddArg(x) 5671 return true 5672 } 5673 // match: (Neq32 x (Const32 <t> [c])) 5674 // cond: x.Op != OpConst32 5675 // result: (Neq32 (Const32 <t> [c]) x) 5676 for { 5677 x := v.Args[0] 5678 v_1 := v.Args[1] 5679 if v_1.Op != OpConst32 { 5680 break 5681 } 5682 t := v_1.Type 5683 c := v_1.AuxInt 5684 if !(x.Op != OpConst32) { 5685 break 5686 } 5687 v.reset(OpNeq32) 5688 v0 := b.NewValue0(v.Line, OpConst32, t) 5689 v0.AuxInt = c 5690 v.AddArg(v0) 5691 v.AddArg(x) 5692 return true 5693 } 5694 // match: (Neq32 (Const32 [c]) (Const32 [d])) 5695 // cond: 5696 // result: (ConstBool [b2i(c != d)]) 5697 for { 5698 v_0 := v.Args[0] 5699 if v_0.Op != OpConst32 { 5700 break 5701 } 5702 c := v_0.AuxInt 5703 v_1 := v.Args[1] 5704 if v_1.Op != OpConst32 { 5705 break 5706 } 5707 d := v_1.AuxInt 5708 v.reset(OpConstBool) 5709 v.AuxInt = b2i(c != d) 5710 return true 5711 } 5712 return false 5713 } 5714 func rewriteValuegeneric_OpNeq64(v *Value, config *Config) bool { 5715 b := v.Block 5716 _ = b 5717 // match: (Neq64 x x) 5718 // cond: 5719 // result: (ConstBool [0]) 5720 for { 5721 x := v.Args[0] 5722 if x != v.Args[1] { 5723 break 5724 } 5725 v.reset(OpConstBool) 5726 v.AuxInt = 0 5727 return true 5728 } 5729 // match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 5730 // cond: 5731 // result: (Neq64 (Const64 <t> [c-d]) x) 5732 for { 5733 v_0 := v.Args[0] 5734 if v_0.Op != OpConst64 { 5735 break 5736 } 5737 t := v_0.Type 5738 c := v_0.AuxInt 5739 v_1 := v.Args[1] 5740 if v_1.Op != OpAdd64 { 5741 break 5742 } 5743 v_1_0 := v_1.Args[0] 5744 if v_1_0.Op != OpConst64 { 5745 break 5746 } 5747 if v_1_0.Type != t { 5748 break 5749 } 5750 d := v_1_0.AuxInt 5751 x := v_1.Args[1] 5752 v.reset(OpNeq64) 5753 v0 := b.NewValue0(v.Line, OpConst64, t) 5754 v0.AuxInt = c - d 5755 v.AddArg(v0) 5756 v.AddArg(x) 5757 return true 5758 } 5759 // match: (Neq64 x (Const64 <t> [c])) 5760 // cond: x.Op != OpConst64 5761 // result: (Neq64 (Const64 <t> [c]) x) 5762 for { 5763 x := v.Args[0] 5764 v_1 := v.Args[1] 5765 if v_1.Op != OpConst64 { 5766 break 5767 } 5768 t := v_1.Type 5769 c := v_1.AuxInt 5770 if !(x.Op != OpConst64) { 5771 break 5772 } 5773 v.reset(OpNeq64) 5774 v0 := b.NewValue0(v.Line, OpConst64, t) 5775 v0.AuxInt = c 5776 v.AddArg(v0) 5777 v.AddArg(x) 5778 return true 5779 } 5780 // match: (Neq64 (Const64 [c]) (Const64 [d])) 5781 // cond: 5782 // result: (ConstBool [b2i(c != d)]) 5783 for { 5784 v_0 := v.Args[0] 5785 if v_0.Op != OpConst64 { 5786 break 5787 } 5788 c := v_0.AuxInt 5789 v_1 := v.Args[1] 5790 if v_1.Op != OpConst64 { 5791 break 5792 } 5793 d := v_1.AuxInt 5794 v.reset(OpConstBool) 5795 v.AuxInt = b2i(c != d) 5796 return true 5797 } 5798 return false 5799 } 5800 func rewriteValuegeneric_OpNeq8(v *Value, config *Config) bool { 5801 b := v.Block 5802 _ = b 5803 // match: (Neq8 x x) 5804 // cond: 5805 // result: (ConstBool [0]) 5806 for { 5807 x := v.Args[0] 5808 if x != v.Args[1] { 5809 break 5810 } 5811 v.reset(OpConstBool) 5812 v.AuxInt = 0 5813 return true 5814 } 5815 // match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 5816 // cond: 5817 // result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x) 5818 for { 5819 v_0 := v.Args[0] 5820 if v_0.Op != OpConst8 { 5821 break 5822 } 5823 t := v_0.Type 5824 c := v_0.AuxInt 5825 v_1 := v.Args[1] 5826 if v_1.Op != OpAdd8 { 5827 break 5828 } 5829 v_1_0 := v_1.Args[0] 5830 if v_1_0.Op != OpConst8 { 5831 break 5832 } 5833 if v_1_0.Type != t { 5834 break 5835 } 5836 d := v_1_0.AuxInt 5837 x := v_1.Args[1] 5838 v.reset(OpNeq8) 5839 v0 := b.NewValue0(v.Line, OpConst8, t) 5840 v0.AuxInt = int64(int8(c - d)) 5841 v.AddArg(v0) 5842 v.AddArg(x) 5843 return true 5844 } 5845 // match: (Neq8 x (Const8 <t> [c])) 5846 // cond: x.Op != OpConst8 5847 // result: (Neq8 (Const8 <t> [c]) x) 5848 for { 5849 x := v.Args[0] 5850 v_1 := v.Args[1] 5851 if v_1.Op != OpConst8 { 5852 break 5853 } 5854 t := v_1.Type 5855 c := v_1.AuxInt 5856 if !(x.Op != OpConst8) { 5857 break 5858 } 5859 v.reset(OpNeq8) 5860 v0 := b.NewValue0(v.Line, OpConst8, t) 5861 v0.AuxInt = c 5862 v.AddArg(v0) 5863 v.AddArg(x) 5864 return true 5865 } 5866 // match: (Neq8 (Const8 [c]) (Const8 [d])) 5867 // cond: 5868 // result: (ConstBool [b2i(c != d)]) 5869 for { 5870 v_0 := v.Args[0] 5871 if v_0.Op != OpConst8 { 5872 break 5873 } 5874 c := v_0.AuxInt 5875 v_1 := v.Args[1] 5876 if v_1.Op != OpConst8 { 5877 break 5878 } 5879 d := v_1.AuxInt 5880 v.reset(OpConstBool) 5881 v.AuxInt = b2i(c != d) 5882 return true 5883 } 5884 return false 5885 } 5886 func rewriteValuegeneric_OpNeqB(v *Value, config *Config) bool { 5887 b := v.Block 5888 _ = b 5889 // match: (NeqB (ConstBool [c]) (ConstBool [d])) 5890 // cond: 5891 // result: (ConstBool [b2i(c != d)]) 5892 for { 5893 v_0 := v.Args[0] 5894 if v_0.Op != OpConstBool { 5895 break 5896 } 5897 c := v_0.AuxInt 5898 v_1 := v.Args[1] 5899 if v_1.Op != OpConstBool { 5900 break 5901 } 5902 d := v_1.AuxInt 5903 v.reset(OpConstBool) 5904 v.AuxInt = b2i(c != d) 5905 return true 5906 } 5907 // match: (NeqB (ConstBool [0]) x) 5908 // cond: 5909 // result: x 5910 for { 5911 v_0 := v.Args[0] 5912 if v_0.Op != OpConstBool { 5913 break 5914 } 5915 if v_0.AuxInt != 0 { 5916 break 5917 } 5918 x := v.Args[1] 5919 v.reset(OpCopy) 5920 v.Type = x.Type 5921 v.AddArg(x) 5922 return true 5923 } 5924 // match: (NeqB (ConstBool [1]) x) 5925 // cond: 5926 // result: (Not x) 5927 for { 5928 v_0 := v.Args[0] 5929 if v_0.Op != OpConstBool { 5930 break 5931 } 5932 if v_0.AuxInt != 1 { 5933 break 5934 } 5935 x := v.Args[1] 5936 v.reset(OpNot) 5937 v.AddArg(x) 5938 return true 5939 } 5940 return false 5941 } 5942 func rewriteValuegeneric_OpNeqInter(v *Value, config *Config) bool { 5943 b := v.Block 5944 _ = b 5945 // match: (NeqInter x y) 5946 // cond: 5947 // result: (NeqPtr (ITab x) (ITab y)) 5948 for { 5949 x := v.Args[0] 5950 y := v.Args[1] 5951 v.reset(OpNeqPtr) 5952 v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 5953 v0.AddArg(x) 5954 v.AddArg(v0) 5955 v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 5956 v1.AddArg(y) 5957 v.AddArg(v1) 5958 return true 5959 } 5960 } 5961 func rewriteValuegeneric_OpNeqPtr(v *Value, config *Config) bool { 5962 b := v.Block 5963 _ = b 5964 // match: (NeqPtr p (ConstNil)) 5965 // cond: 5966 // result: (IsNonNil p) 5967 for { 5968 p := v.Args[0] 5969 v_1 := v.Args[1] 5970 if v_1.Op != OpConstNil { 5971 break 5972 } 5973 v.reset(OpIsNonNil) 5974 v.AddArg(p) 5975 return true 5976 } 5977 // match: (NeqPtr (ConstNil) p) 5978 // cond: 5979 // result: (IsNonNil p) 5980 for { 5981 v_0 := v.Args[0] 5982 if v_0.Op != OpConstNil { 5983 break 5984 } 5985 p := v.Args[1] 5986 v.reset(OpIsNonNil) 5987 v.AddArg(p) 5988 return true 5989 } 5990 return false 5991 } 5992 func rewriteValuegeneric_OpNeqSlice(v *Value, config *Config) bool { 5993 b := v.Block 5994 _ = b 5995 // match: (NeqSlice x y) 5996 // cond: 5997 // result: (NeqPtr (SlicePtr x) (SlicePtr y)) 5998 for { 5999 x := v.Args[0] 6000 y := v.Args[1] 6001 v.reset(OpNeqPtr) 6002 v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 6003 v0.AddArg(x) 6004 v.AddArg(v0) 6005 v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 6006 v1.AddArg(y) 6007 v.AddArg(v1) 6008 return true 6009 } 6010 } 6011 func rewriteValuegeneric_OpOffPtr(v *Value, config *Config) bool { 6012 b := v.Block 6013 _ = b 6014 // match: (OffPtr (OffPtr p [b]) [a]) 6015 // cond: 6016 // result: (OffPtr p [a+b]) 6017 for { 6018 v_0 := v.Args[0] 6019 if v_0.Op != OpOffPtr { 6020 break 6021 } 6022 p := v_0.Args[0] 6023 b := v_0.AuxInt 6024 a := v.AuxInt 6025 v.reset(OpOffPtr) 6026 v.AddArg(p) 6027 v.AuxInt = a + b 6028 return true 6029 } 6030 // match: (OffPtr p [0]) 6031 // cond: v.Type.Compare(p.Type) == CMPeq 6032 // result: p 6033 for { 6034 p := v.Args[0] 6035 if v.AuxInt != 0 { 6036 break 6037 } 6038 if !(v.Type.Compare(p.Type) == CMPeq) { 6039 break 6040 } 6041 v.reset(OpCopy) 6042 v.Type = p.Type 6043 v.AddArg(p) 6044 return true 6045 } 6046 return false 6047 } 6048 func rewriteValuegeneric_OpOr16(v *Value, config *Config) bool { 6049 b := v.Block 6050 _ = b 6051 // match: (Or16 x (Const16 <t> [c])) 6052 // cond: x.Op != OpConst16 6053 // result: (Or16 (Const16 <t> [c]) x) 6054 for { 6055 x := v.Args[0] 6056 v_1 := v.Args[1] 6057 if v_1.Op != OpConst16 { 6058 break 6059 } 6060 t := v_1.Type 6061 c := v_1.AuxInt 6062 if !(x.Op != OpConst16) { 6063 break 6064 } 6065 v.reset(OpOr16) 6066 v0 := b.NewValue0(v.Line, OpConst16, t) 6067 v0.AuxInt = c 6068 v.AddArg(v0) 6069 v.AddArg(x) 6070 return true 6071 } 6072 // match: (Or16 x x) 6073 // cond: 6074 // result: x 6075 for { 6076 x := v.Args[0] 6077 if x != v.Args[1] { 6078 break 6079 } 6080 v.reset(OpCopy) 6081 v.Type = x.Type 6082 v.AddArg(x) 6083 return true 6084 } 6085 // match: (Or16 (Const16 [0]) x) 6086 // cond: 6087 // result: x 6088 for { 6089 v_0 := v.Args[0] 6090 if v_0.Op != OpConst16 { 6091 break 6092 } 6093 if v_0.AuxInt != 0 { 6094 break 6095 } 6096 x := v.Args[1] 6097 v.reset(OpCopy) 6098 v.Type = x.Type 6099 v.AddArg(x) 6100 return true 6101 } 6102 // match: (Or16 (Const16 [-1]) _) 6103 // cond: 6104 // result: (Const16 [-1]) 6105 for { 6106 v_0 := v.Args[0] 6107 if v_0.Op != OpConst16 { 6108 break 6109 } 6110 if v_0.AuxInt != -1 { 6111 break 6112 } 6113 v.reset(OpConst16) 6114 v.AuxInt = -1 6115 return true 6116 } 6117 // match: (Or16 x (Or16 x y)) 6118 // cond: 6119 // result: (Or16 x y) 6120 for { 6121 x := v.Args[0] 6122 v_1 := v.Args[1] 6123 if v_1.Op != OpOr16 { 6124 break 6125 } 6126 if x != v_1.Args[0] { 6127 break 6128 } 6129 y := v_1.Args[1] 6130 v.reset(OpOr16) 6131 v.AddArg(x) 6132 v.AddArg(y) 6133 return true 6134 } 6135 // match: (Or16 x (Or16 y x)) 6136 // cond: 6137 // result: (Or16 x y) 6138 for { 6139 x := v.Args[0] 6140 v_1 := v.Args[1] 6141 if v_1.Op != OpOr16 { 6142 break 6143 } 6144 y := v_1.Args[0] 6145 if x != v_1.Args[1] { 6146 break 6147 } 6148 v.reset(OpOr16) 6149 v.AddArg(x) 6150 v.AddArg(y) 6151 return true 6152 } 6153 // match: (Or16 (Or16 x y) x) 6154 // cond: 6155 // result: (Or16 x y) 6156 for { 6157 v_0 := v.Args[0] 6158 if v_0.Op != OpOr16 { 6159 break 6160 } 6161 x := v_0.Args[0] 6162 y := v_0.Args[1] 6163 if x != v.Args[1] { 6164 break 6165 } 6166 v.reset(OpOr16) 6167 v.AddArg(x) 6168 v.AddArg(y) 6169 return true 6170 } 6171 // match: (Or16 (Or16 x y) y) 6172 // cond: 6173 // result: (Or16 x y) 6174 for { 6175 v_0 := v.Args[0] 6176 if v_0.Op != OpOr16 { 6177 break 6178 } 6179 x := v_0.Args[0] 6180 y := v_0.Args[1] 6181 if y != v.Args[1] { 6182 break 6183 } 6184 v.reset(OpOr16) 6185 v.AddArg(x) 6186 v.AddArg(y) 6187 return true 6188 } 6189 return false 6190 } 6191 func rewriteValuegeneric_OpOr32(v *Value, config *Config) bool { 6192 b := v.Block 6193 _ = b 6194 // match: (Or32 x (Const32 <t> [c])) 6195 // cond: x.Op != OpConst32 6196 // result: (Or32 (Const32 <t> [c]) x) 6197 for { 6198 x := v.Args[0] 6199 v_1 := v.Args[1] 6200 if v_1.Op != OpConst32 { 6201 break 6202 } 6203 t := v_1.Type 6204 c := v_1.AuxInt 6205 if !(x.Op != OpConst32) { 6206 break 6207 } 6208 v.reset(OpOr32) 6209 v0 := b.NewValue0(v.Line, OpConst32, t) 6210 v0.AuxInt = c 6211 v.AddArg(v0) 6212 v.AddArg(x) 6213 return true 6214 } 6215 // match: (Or32 x x) 6216 // cond: 6217 // result: x 6218 for { 6219 x := v.Args[0] 6220 if x != v.Args[1] { 6221 break 6222 } 6223 v.reset(OpCopy) 6224 v.Type = x.Type 6225 v.AddArg(x) 6226 return true 6227 } 6228 // match: (Or32 (Const32 [0]) x) 6229 // cond: 6230 // result: x 6231 for { 6232 v_0 := v.Args[0] 6233 if v_0.Op != OpConst32 { 6234 break 6235 } 6236 if v_0.AuxInt != 0 { 6237 break 6238 } 6239 x := v.Args[1] 6240 v.reset(OpCopy) 6241 v.Type = x.Type 6242 v.AddArg(x) 6243 return true 6244 } 6245 // match: (Or32 (Const32 [-1]) _) 6246 // cond: 6247 // result: (Const32 [-1]) 6248 for { 6249 v_0 := v.Args[0] 6250 if v_0.Op != OpConst32 { 6251 break 6252 } 6253 if v_0.AuxInt != -1 { 6254 break 6255 } 6256 v.reset(OpConst32) 6257 v.AuxInt = -1 6258 return true 6259 } 6260 // match: (Or32 x (Or32 x y)) 6261 // cond: 6262 // result: (Or32 x y) 6263 for { 6264 x := v.Args[0] 6265 v_1 := v.Args[1] 6266 if v_1.Op != OpOr32 { 6267 break 6268 } 6269 if x != v_1.Args[0] { 6270 break 6271 } 6272 y := v_1.Args[1] 6273 v.reset(OpOr32) 6274 v.AddArg(x) 6275 v.AddArg(y) 6276 return true 6277 } 6278 // match: (Or32 x (Or32 y x)) 6279 // cond: 6280 // result: (Or32 x y) 6281 for { 6282 x := v.Args[0] 6283 v_1 := v.Args[1] 6284 if v_1.Op != OpOr32 { 6285 break 6286 } 6287 y := v_1.Args[0] 6288 if x != v_1.Args[1] { 6289 break 6290 } 6291 v.reset(OpOr32) 6292 v.AddArg(x) 6293 v.AddArg(y) 6294 return true 6295 } 6296 // match: (Or32 (Or32 x y) x) 6297 // cond: 6298 // result: (Or32 x y) 6299 for { 6300 v_0 := v.Args[0] 6301 if v_0.Op != OpOr32 { 6302 break 6303 } 6304 x := v_0.Args[0] 6305 y := v_0.Args[1] 6306 if x != v.Args[1] { 6307 break 6308 } 6309 v.reset(OpOr32) 6310 v.AddArg(x) 6311 v.AddArg(y) 6312 return true 6313 } 6314 // match: (Or32 (Or32 x y) y) 6315 // cond: 6316 // result: (Or32 x y) 6317 for { 6318 v_0 := v.Args[0] 6319 if v_0.Op != OpOr32 { 6320 break 6321 } 6322 x := v_0.Args[0] 6323 y := v_0.Args[1] 6324 if y != v.Args[1] { 6325 break 6326 } 6327 v.reset(OpOr32) 6328 v.AddArg(x) 6329 v.AddArg(y) 6330 return true 6331 } 6332 return false 6333 } 6334 func rewriteValuegeneric_OpOr64(v *Value, config *Config) bool { 6335 b := v.Block 6336 _ = b 6337 // match: (Or64 x (Const64 <t> [c])) 6338 // cond: x.Op != OpConst64 6339 // result: (Or64 (Const64 <t> [c]) x) 6340 for { 6341 x := v.Args[0] 6342 v_1 := v.Args[1] 6343 if v_1.Op != OpConst64 { 6344 break 6345 } 6346 t := v_1.Type 6347 c := v_1.AuxInt 6348 if !(x.Op != OpConst64) { 6349 break 6350 } 6351 v.reset(OpOr64) 6352 v0 := b.NewValue0(v.Line, OpConst64, t) 6353 v0.AuxInt = c 6354 v.AddArg(v0) 6355 v.AddArg(x) 6356 return true 6357 } 6358 // match: (Or64 x x) 6359 // cond: 6360 // result: x 6361 for { 6362 x := v.Args[0] 6363 if x != v.Args[1] { 6364 break 6365 } 6366 v.reset(OpCopy) 6367 v.Type = x.Type 6368 v.AddArg(x) 6369 return true 6370 } 6371 // match: (Or64 (Const64 [0]) x) 6372 // cond: 6373 // result: x 6374 for { 6375 v_0 := v.Args[0] 6376 if v_0.Op != OpConst64 { 6377 break 6378 } 6379 if v_0.AuxInt != 0 { 6380 break 6381 } 6382 x := v.Args[1] 6383 v.reset(OpCopy) 6384 v.Type = x.Type 6385 v.AddArg(x) 6386 return true 6387 } 6388 // match: (Or64 (Const64 [-1]) _) 6389 // cond: 6390 // result: (Const64 [-1]) 6391 for { 6392 v_0 := v.Args[0] 6393 if v_0.Op != OpConst64 { 6394 break 6395 } 6396 if v_0.AuxInt != -1 { 6397 break 6398 } 6399 v.reset(OpConst64) 6400 v.AuxInt = -1 6401 return true 6402 } 6403 // match: (Or64 x (Or64 x y)) 6404 // cond: 6405 // result: (Or64 x y) 6406 for { 6407 x := v.Args[0] 6408 v_1 := v.Args[1] 6409 if v_1.Op != OpOr64 { 6410 break 6411 } 6412 if x != v_1.Args[0] { 6413 break 6414 } 6415 y := v_1.Args[1] 6416 v.reset(OpOr64) 6417 v.AddArg(x) 6418 v.AddArg(y) 6419 return true 6420 } 6421 // match: (Or64 x (Or64 y x)) 6422 // cond: 6423 // result: (Or64 x y) 6424 for { 6425 x := v.Args[0] 6426 v_1 := v.Args[1] 6427 if v_1.Op != OpOr64 { 6428 break 6429 } 6430 y := v_1.Args[0] 6431 if x != v_1.Args[1] { 6432 break 6433 } 6434 v.reset(OpOr64) 6435 v.AddArg(x) 6436 v.AddArg(y) 6437 return true 6438 } 6439 // match: (Or64 (Or64 x y) x) 6440 // cond: 6441 // result: (Or64 x y) 6442 for { 6443 v_0 := v.Args[0] 6444 if v_0.Op != OpOr64 { 6445 break 6446 } 6447 x := v_0.Args[0] 6448 y := v_0.Args[1] 6449 if x != v.Args[1] { 6450 break 6451 } 6452 v.reset(OpOr64) 6453 v.AddArg(x) 6454 v.AddArg(y) 6455 return true 6456 } 6457 // match: (Or64 (Or64 x y) y) 6458 // cond: 6459 // result: (Or64 x y) 6460 for { 6461 v_0 := v.Args[0] 6462 if v_0.Op != OpOr64 { 6463 break 6464 } 6465 x := v_0.Args[0] 6466 y := v_0.Args[1] 6467 if y != v.Args[1] { 6468 break 6469 } 6470 v.reset(OpOr64) 6471 v.AddArg(x) 6472 v.AddArg(y) 6473 return true 6474 } 6475 return false 6476 } 6477 func rewriteValuegeneric_OpOr8(v *Value, config *Config) bool { 6478 b := v.Block 6479 _ = b 6480 // match: (Or8 x (Const8 <t> [c])) 6481 // cond: x.Op != OpConst8 6482 // result: (Or8 (Const8 <t> [c]) x) 6483 for { 6484 x := v.Args[0] 6485 v_1 := v.Args[1] 6486 if v_1.Op != OpConst8 { 6487 break 6488 } 6489 t := v_1.Type 6490 c := v_1.AuxInt 6491 if !(x.Op != OpConst8) { 6492 break 6493 } 6494 v.reset(OpOr8) 6495 v0 := b.NewValue0(v.Line, OpConst8, t) 6496 v0.AuxInt = c 6497 v.AddArg(v0) 6498 v.AddArg(x) 6499 return true 6500 } 6501 // match: (Or8 x x) 6502 // cond: 6503 // result: x 6504 for { 6505 x := v.Args[0] 6506 if x != v.Args[1] { 6507 break 6508 } 6509 v.reset(OpCopy) 6510 v.Type = x.Type 6511 v.AddArg(x) 6512 return true 6513 } 6514 // match: (Or8 (Const8 [0]) x) 6515 // cond: 6516 // result: x 6517 for { 6518 v_0 := v.Args[0] 6519 if v_0.Op != OpConst8 { 6520 break 6521 } 6522 if v_0.AuxInt != 0 { 6523 break 6524 } 6525 x := v.Args[1] 6526 v.reset(OpCopy) 6527 v.Type = x.Type 6528 v.AddArg(x) 6529 return true 6530 } 6531 // match: (Or8 (Const8 [-1]) _) 6532 // cond: 6533 // result: (Const8 [-1]) 6534 for { 6535 v_0 := v.Args[0] 6536 if v_0.Op != OpConst8 { 6537 break 6538 } 6539 if v_0.AuxInt != -1 { 6540 break 6541 } 6542 v.reset(OpConst8) 6543 v.AuxInt = -1 6544 return true 6545 } 6546 // match: (Or8 x (Or8 x y)) 6547 // cond: 6548 // result: (Or8 x y) 6549 for { 6550 x := v.Args[0] 6551 v_1 := v.Args[1] 6552 if v_1.Op != OpOr8 { 6553 break 6554 } 6555 if x != v_1.Args[0] { 6556 break 6557 } 6558 y := v_1.Args[1] 6559 v.reset(OpOr8) 6560 v.AddArg(x) 6561 v.AddArg(y) 6562 return true 6563 } 6564 // match: (Or8 x (Or8 y x)) 6565 // cond: 6566 // result: (Or8 x y) 6567 for { 6568 x := v.Args[0] 6569 v_1 := v.Args[1] 6570 if v_1.Op != OpOr8 { 6571 break 6572 } 6573 y := v_1.Args[0] 6574 if x != v_1.Args[1] { 6575 break 6576 } 6577 v.reset(OpOr8) 6578 v.AddArg(x) 6579 v.AddArg(y) 6580 return true 6581 } 6582 // match: (Or8 (Or8 x y) x) 6583 // cond: 6584 // result: (Or8 x y) 6585 for { 6586 v_0 := v.Args[0] 6587 if v_0.Op != OpOr8 { 6588 break 6589 } 6590 x := v_0.Args[0] 6591 y := v_0.Args[1] 6592 if x != v.Args[1] { 6593 break 6594 } 6595 v.reset(OpOr8) 6596 v.AddArg(x) 6597 v.AddArg(y) 6598 return true 6599 } 6600 // match: (Or8 (Or8 x y) y) 6601 // cond: 6602 // result: (Or8 x y) 6603 for { 6604 v_0 := v.Args[0] 6605 if v_0.Op != OpOr8 { 6606 break 6607 } 6608 x := v_0.Args[0] 6609 y := v_0.Args[1] 6610 if y != v.Args[1] { 6611 break 6612 } 6613 v.reset(OpOr8) 6614 v.AddArg(x) 6615 v.AddArg(y) 6616 return true 6617 } 6618 return false 6619 } 6620 func rewriteValuegeneric_OpPhi(v *Value, config *Config) bool { 6621 b := v.Block 6622 _ = b 6623 // match: (Phi (Const8 [c]) (Const8 [c])) 6624 // cond: 6625 // result: (Const8 [c]) 6626 for { 6627 v_0 := v.Args[0] 6628 if v_0.Op != OpConst8 { 6629 break 6630 } 6631 c := v_0.AuxInt 6632 v_1 := v.Args[1] 6633 if v_1.Op != OpConst8 { 6634 break 6635 } 6636 if v_1.AuxInt != c { 6637 break 6638 } 6639 if len(v.Args) != 2 { 6640 break 6641 } 6642 v.reset(OpConst8) 6643 v.AuxInt = c 6644 return true 6645 } 6646 // match: (Phi (Const16 [c]) (Const16 [c])) 6647 // cond: 6648 // result: (Const16 [c]) 6649 for { 6650 v_0 := v.Args[0] 6651 if v_0.Op != OpConst16 { 6652 break 6653 } 6654 c := v_0.AuxInt 6655 v_1 := v.Args[1] 6656 if v_1.Op != OpConst16 { 6657 break 6658 } 6659 if v_1.AuxInt != c { 6660 break 6661 } 6662 if len(v.Args) != 2 { 6663 break 6664 } 6665 v.reset(OpConst16) 6666 v.AuxInt = c 6667 return true 6668 } 6669 // match: (Phi (Const32 [c]) (Const32 [c])) 6670 // cond: 6671 // result: (Const32 [c]) 6672 for { 6673 v_0 := v.Args[0] 6674 if v_0.Op != OpConst32 { 6675 break 6676 } 6677 c := v_0.AuxInt 6678 v_1 := v.Args[1] 6679 if v_1.Op != OpConst32 { 6680 break 6681 } 6682 if v_1.AuxInt != c { 6683 break 6684 } 6685 if len(v.Args) != 2 { 6686 break 6687 } 6688 v.reset(OpConst32) 6689 v.AuxInt = c 6690 return true 6691 } 6692 // match: (Phi (Const64 [c]) (Const64 [c])) 6693 // cond: 6694 // result: (Const64 [c]) 6695 for { 6696 v_0 := v.Args[0] 6697 if v_0.Op != OpConst64 { 6698 break 6699 } 6700 c := v_0.AuxInt 6701 v_1 := v.Args[1] 6702 if v_1.Op != OpConst64 { 6703 break 6704 } 6705 if v_1.AuxInt != c { 6706 break 6707 } 6708 if len(v.Args) != 2 { 6709 break 6710 } 6711 v.reset(OpConst64) 6712 v.AuxInt = c 6713 return true 6714 } 6715 return false 6716 } 6717 func rewriteValuegeneric_OpPtrIndex(v *Value, config *Config) bool { 6718 b := v.Block 6719 _ = b 6720 // match: (PtrIndex <t> ptr idx) 6721 // cond: config.PtrSize == 4 6722 // result: (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.ElemType().Size()]))) 6723 for { 6724 t := v.Type 6725 ptr := v.Args[0] 6726 idx := v.Args[1] 6727 if !(config.PtrSize == 4) { 6728 break 6729 } 6730 v.reset(OpAddPtr) 6731 v.AddArg(ptr) 6732 v0 := b.NewValue0(v.Line, OpMul32, config.fe.TypeInt()) 6733 v0.AddArg(idx) 6734 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 6735 v1.AuxInt = t.ElemType().Size() 6736 v0.AddArg(v1) 6737 v.AddArg(v0) 6738 return true 6739 } 6740 // match: (PtrIndex <t> ptr idx) 6741 // cond: config.PtrSize == 8 6742 // result: (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.ElemType().Size()]))) 6743 for { 6744 t := v.Type 6745 ptr := v.Args[0] 6746 idx := v.Args[1] 6747 if !(config.PtrSize == 8) { 6748 break 6749 } 6750 v.reset(OpAddPtr) 6751 v.AddArg(ptr) 6752 v0 := b.NewValue0(v.Line, OpMul64, config.fe.TypeInt()) 6753 v0.AddArg(idx) 6754 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 6755 v1.AuxInt = t.ElemType().Size() 6756 v0.AddArg(v1) 6757 v.AddArg(v0) 6758 return true 6759 } 6760 return false 6761 } 6762 func rewriteValuegeneric_OpRsh16Ux16(v *Value, config *Config) bool { 6763 b := v.Block 6764 _ = b 6765 // match: (Rsh16Ux16 <t> x (Const16 [c])) 6766 // cond: 6767 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))])) 6768 for { 6769 t := v.Type 6770 x := v.Args[0] 6771 v_1 := v.Args[1] 6772 if v_1.Op != OpConst16 { 6773 break 6774 } 6775 c := v_1.AuxInt 6776 v.reset(OpRsh16Ux64) 6777 v.AddArg(x) 6778 v0 := b.NewValue0(v.Line, OpConst64, t) 6779 v0.AuxInt = int64(uint16(c)) 6780 v.AddArg(v0) 6781 return true 6782 } 6783 // match: (Rsh16Ux16 (Const16 [0]) _) 6784 // cond: 6785 // result: (Const16 [0]) 6786 for { 6787 v_0 := v.Args[0] 6788 if v_0.Op != OpConst16 { 6789 break 6790 } 6791 if v_0.AuxInt != 0 { 6792 break 6793 } 6794 v.reset(OpConst16) 6795 v.AuxInt = 0 6796 return true 6797 } 6798 return false 6799 } 6800 func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool { 6801 b := v.Block 6802 _ = b 6803 // match: (Rsh16Ux32 <t> x (Const32 [c])) 6804 // cond: 6805 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))])) 6806 for { 6807 t := v.Type 6808 x := v.Args[0] 6809 v_1 := v.Args[1] 6810 if v_1.Op != OpConst32 { 6811 break 6812 } 6813 c := v_1.AuxInt 6814 v.reset(OpRsh16Ux64) 6815 v.AddArg(x) 6816 v0 := b.NewValue0(v.Line, OpConst64, t) 6817 v0.AuxInt = int64(uint32(c)) 6818 v.AddArg(v0) 6819 return true 6820 } 6821 // match: (Rsh16Ux32 (Const16 [0]) _) 6822 // cond: 6823 // result: (Const16 [0]) 6824 for { 6825 v_0 := v.Args[0] 6826 if v_0.Op != OpConst16 { 6827 break 6828 } 6829 if v_0.AuxInt != 0 { 6830 break 6831 } 6832 v.reset(OpConst16) 6833 v.AuxInt = 0 6834 return true 6835 } 6836 return false 6837 } 6838 func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { 6839 b := v.Block 6840 _ = b 6841 // match: (Rsh16Ux64 (Const16 [c]) (Const64 [d])) 6842 // cond: 6843 // result: (Const16 [int64(int16(uint16(c) >> uint64(d)))]) 6844 for { 6845 v_0 := v.Args[0] 6846 if v_0.Op != OpConst16 { 6847 break 6848 } 6849 c := v_0.AuxInt 6850 v_1 := v.Args[1] 6851 if v_1.Op != OpConst64 { 6852 break 6853 } 6854 d := v_1.AuxInt 6855 v.reset(OpConst16) 6856 v.AuxInt = int64(int16(uint16(c) >> uint64(d))) 6857 return true 6858 } 6859 // match: (Rsh16Ux64 x (Const64 [0])) 6860 // cond: 6861 // result: x 6862 for { 6863 x := v.Args[0] 6864 v_1 := v.Args[1] 6865 if v_1.Op != OpConst64 { 6866 break 6867 } 6868 if v_1.AuxInt != 0 { 6869 break 6870 } 6871 v.reset(OpCopy) 6872 v.Type = x.Type 6873 v.AddArg(x) 6874 return true 6875 } 6876 // match: (Rsh16Ux64 (Const16 [0]) _) 6877 // cond: 6878 // result: (Const16 [0]) 6879 for { 6880 v_0 := v.Args[0] 6881 if v_0.Op != OpConst16 { 6882 break 6883 } 6884 if v_0.AuxInt != 0 { 6885 break 6886 } 6887 v.reset(OpConst16) 6888 v.AuxInt = 0 6889 return true 6890 } 6891 // match: (Rsh16Ux64 _ (Const64 [c])) 6892 // cond: uint64(c) >= 16 6893 // result: (Const16 [0]) 6894 for { 6895 v_1 := v.Args[1] 6896 if v_1.Op != OpConst64 { 6897 break 6898 } 6899 c := v_1.AuxInt 6900 if !(uint64(c) >= 16) { 6901 break 6902 } 6903 v.reset(OpConst16) 6904 v.AuxInt = 0 6905 return true 6906 } 6907 // match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) 6908 // cond: !uaddOvf(c,d) 6909 // result: (Rsh16Ux64 x (Const64 <t> [c+d])) 6910 for { 6911 t := v.Type 6912 v_0 := v.Args[0] 6913 if v_0.Op != OpRsh16Ux64 { 6914 break 6915 } 6916 x := v_0.Args[0] 6917 v_0_1 := v_0.Args[1] 6918 if v_0_1.Op != OpConst64 { 6919 break 6920 } 6921 c := v_0_1.AuxInt 6922 v_1 := v.Args[1] 6923 if v_1.Op != OpConst64 { 6924 break 6925 } 6926 d := v_1.AuxInt 6927 if !(!uaddOvf(c, d)) { 6928 break 6929 } 6930 v.reset(OpRsh16Ux64) 6931 v.AddArg(x) 6932 v0 := b.NewValue0(v.Line, OpConst64, t) 6933 v0.AuxInt = c + d 6934 v.AddArg(v0) 6935 return true 6936 } 6937 // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 6938 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 6939 // result: (Rsh16Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 6940 for { 6941 v_0 := v.Args[0] 6942 if v_0.Op != OpLsh16x64 { 6943 break 6944 } 6945 v_0_0 := v_0.Args[0] 6946 if v_0_0.Op != OpRsh16Ux64 { 6947 break 6948 } 6949 x := v_0_0.Args[0] 6950 v_0_0_1 := v_0_0.Args[1] 6951 if v_0_0_1.Op != OpConst64 { 6952 break 6953 } 6954 c1 := v_0_0_1.AuxInt 6955 v_0_1 := v_0.Args[1] 6956 if v_0_1.Op != OpConst64 { 6957 break 6958 } 6959 c2 := v_0_1.AuxInt 6960 v_1 := v.Args[1] 6961 if v_1.Op != OpConst64 { 6962 break 6963 } 6964 c3 := v_1.AuxInt 6965 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 6966 break 6967 } 6968 v.reset(OpRsh16Ux64) 6969 v.AddArg(x) 6970 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 6971 v0.AuxInt = c1 - c2 + c3 6972 v.AddArg(v0) 6973 return true 6974 } 6975 return false 6976 } 6977 func rewriteValuegeneric_OpRsh16Ux8(v *Value, config *Config) bool { 6978 b := v.Block 6979 _ = b 6980 // match: (Rsh16Ux8 <t> x (Const8 [c])) 6981 // cond: 6982 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))])) 6983 for { 6984 t := v.Type 6985 x := v.Args[0] 6986 v_1 := v.Args[1] 6987 if v_1.Op != OpConst8 { 6988 break 6989 } 6990 c := v_1.AuxInt 6991 v.reset(OpRsh16Ux64) 6992 v.AddArg(x) 6993 v0 := b.NewValue0(v.Line, OpConst64, t) 6994 v0.AuxInt = int64(uint8(c)) 6995 v.AddArg(v0) 6996 return true 6997 } 6998 // match: (Rsh16Ux8 (Const16 [0]) _) 6999 // cond: 7000 // result: (Const16 [0]) 7001 for { 7002 v_0 := v.Args[0] 7003 if v_0.Op != OpConst16 { 7004 break 7005 } 7006 if v_0.AuxInt != 0 { 7007 break 7008 } 7009 v.reset(OpConst16) 7010 v.AuxInt = 0 7011 return true 7012 } 7013 return false 7014 } 7015 func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool { 7016 b := v.Block 7017 _ = b 7018 // match: (Rsh16x16 <t> x (Const16 [c])) 7019 // cond: 7020 // result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))])) 7021 for { 7022 t := v.Type 7023 x := v.Args[0] 7024 v_1 := v.Args[1] 7025 if v_1.Op != OpConst16 { 7026 break 7027 } 7028 c := v_1.AuxInt 7029 v.reset(OpRsh16x64) 7030 v.AddArg(x) 7031 v0 := b.NewValue0(v.Line, OpConst64, t) 7032 v0.AuxInt = int64(uint16(c)) 7033 v.AddArg(v0) 7034 return true 7035 } 7036 // match: (Rsh16x16 (Const16 [0]) _) 7037 // cond: 7038 // result: (Const16 [0]) 7039 for { 7040 v_0 := v.Args[0] 7041 if v_0.Op != OpConst16 { 7042 break 7043 } 7044 if v_0.AuxInt != 0 { 7045 break 7046 } 7047 v.reset(OpConst16) 7048 v.AuxInt = 0 7049 return true 7050 } 7051 return false 7052 } 7053 func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool { 7054 b := v.Block 7055 _ = b 7056 // match: (Rsh16x32 <t> x (Const32 [c])) 7057 // cond: 7058 // result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))])) 7059 for { 7060 t := v.Type 7061 x := v.Args[0] 7062 v_1 := v.Args[1] 7063 if v_1.Op != OpConst32 { 7064 break 7065 } 7066 c := v_1.AuxInt 7067 v.reset(OpRsh16x64) 7068 v.AddArg(x) 7069 v0 := b.NewValue0(v.Line, OpConst64, t) 7070 v0.AuxInt = int64(uint32(c)) 7071 v.AddArg(v0) 7072 return true 7073 } 7074 // match: (Rsh16x32 (Const16 [0]) _) 7075 // cond: 7076 // result: (Const16 [0]) 7077 for { 7078 v_0 := v.Args[0] 7079 if v_0.Op != OpConst16 { 7080 break 7081 } 7082 if v_0.AuxInt != 0 { 7083 break 7084 } 7085 v.reset(OpConst16) 7086 v.AuxInt = 0 7087 return true 7088 } 7089 return false 7090 } 7091 func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool { 7092 b := v.Block 7093 _ = b 7094 // match: (Rsh16x64 (Const16 [c]) (Const64 [d])) 7095 // cond: 7096 // result: (Const16 [int64(int16(c) >> uint64(d))]) 7097 for { 7098 v_0 := v.Args[0] 7099 if v_0.Op != OpConst16 { 7100 break 7101 } 7102 c := v_0.AuxInt 7103 v_1 := v.Args[1] 7104 if v_1.Op != OpConst64 { 7105 break 7106 } 7107 d := v_1.AuxInt 7108 v.reset(OpConst16) 7109 v.AuxInt = int64(int16(c) >> uint64(d)) 7110 return true 7111 } 7112 // match: (Rsh16x64 x (Const64 [0])) 7113 // cond: 7114 // result: x 7115 for { 7116 x := v.Args[0] 7117 v_1 := v.Args[1] 7118 if v_1.Op != OpConst64 { 7119 break 7120 } 7121 if v_1.AuxInt != 0 { 7122 break 7123 } 7124 v.reset(OpCopy) 7125 v.Type = x.Type 7126 v.AddArg(x) 7127 return true 7128 } 7129 // match: (Rsh16x64 (Const16 [0]) _) 7130 // cond: 7131 // result: (Const16 [0]) 7132 for { 7133 v_0 := v.Args[0] 7134 if v_0.Op != OpConst16 { 7135 break 7136 } 7137 if v_0.AuxInt != 0 { 7138 break 7139 } 7140 v.reset(OpConst16) 7141 v.AuxInt = 0 7142 return true 7143 } 7144 // match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d])) 7145 // cond: !uaddOvf(c,d) 7146 // result: (Rsh16x64 x (Const64 <t> [c+d])) 7147 for { 7148 t := v.Type 7149 v_0 := v.Args[0] 7150 if v_0.Op != OpRsh16x64 { 7151 break 7152 } 7153 x := v_0.Args[0] 7154 v_0_1 := v_0.Args[1] 7155 if v_0_1.Op != OpConst64 { 7156 break 7157 } 7158 c := v_0_1.AuxInt 7159 v_1 := v.Args[1] 7160 if v_1.Op != OpConst64 { 7161 break 7162 } 7163 d := v_1.AuxInt 7164 if !(!uaddOvf(c, d)) { 7165 break 7166 } 7167 v.reset(OpRsh16x64) 7168 v.AddArg(x) 7169 v0 := b.NewValue0(v.Line, OpConst64, t) 7170 v0.AuxInt = c + d 7171 v.AddArg(v0) 7172 return true 7173 } 7174 return false 7175 } 7176 func rewriteValuegeneric_OpRsh16x8(v *Value, config *Config) bool { 7177 b := v.Block 7178 _ = b 7179 // match: (Rsh16x8 <t> x (Const8 [c])) 7180 // cond: 7181 // result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))])) 7182 for { 7183 t := v.Type 7184 x := v.Args[0] 7185 v_1 := v.Args[1] 7186 if v_1.Op != OpConst8 { 7187 break 7188 } 7189 c := v_1.AuxInt 7190 v.reset(OpRsh16x64) 7191 v.AddArg(x) 7192 v0 := b.NewValue0(v.Line, OpConst64, t) 7193 v0.AuxInt = int64(uint8(c)) 7194 v.AddArg(v0) 7195 return true 7196 } 7197 // match: (Rsh16x8 (Const16 [0]) _) 7198 // cond: 7199 // result: (Const16 [0]) 7200 for { 7201 v_0 := v.Args[0] 7202 if v_0.Op != OpConst16 { 7203 break 7204 } 7205 if v_0.AuxInt != 0 { 7206 break 7207 } 7208 v.reset(OpConst16) 7209 v.AuxInt = 0 7210 return true 7211 } 7212 return false 7213 } 7214 func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool { 7215 b := v.Block 7216 _ = b 7217 // match: (Rsh32Ux16 <t> x (Const16 [c])) 7218 // cond: 7219 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))])) 7220 for { 7221 t := v.Type 7222 x := v.Args[0] 7223 v_1 := v.Args[1] 7224 if v_1.Op != OpConst16 { 7225 break 7226 } 7227 c := v_1.AuxInt 7228 v.reset(OpRsh32Ux64) 7229 v.AddArg(x) 7230 v0 := b.NewValue0(v.Line, OpConst64, t) 7231 v0.AuxInt = int64(uint16(c)) 7232 v.AddArg(v0) 7233 return true 7234 } 7235 // match: (Rsh32Ux16 (Const32 [0]) _) 7236 // cond: 7237 // result: (Const32 [0]) 7238 for { 7239 v_0 := v.Args[0] 7240 if v_0.Op != OpConst32 { 7241 break 7242 } 7243 if v_0.AuxInt != 0 { 7244 break 7245 } 7246 v.reset(OpConst32) 7247 v.AuxInt = 0 7248 return true 7249 } 7250 return false 7251 } 7252 func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool { 7253 b := v.Block 7254 _ = b 7255 // match: (Rsh32Ux32 <t> x (Const32 [c])) 7256 // cond: 7257 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))])) 7258 for { 7259 t := v.Type 7260 x := v.Args[0] 7261 v_1 := v.Args[1] 7262 if v_1.Op != OpConst32 { 7263 break 7264 } 7265 c := v_1.AuxInt 7266 v.reset(OpRsh32Ux64) 7267 v.AddArg(x) 7268 v0 := b.NewValue0(v.Line, OpConst64, t) 7269 v0.AuxInt = int64(uint32(c)) 7270 v.AddArg(v0) 7271 return true 7272 } 7273 // match: (Rsh32Ux32 (Const32 [0]) _) 7274 // cond: 7275 // result: (Const32 [0]) 7276 for { 7277 v_0 := v.Args[0] 7278 if v_0.Op != OpConst32 { 7279 break 7280 } 7281 if v_0.AuxInt != 0 { 7282 break 7283 } 7284 v.reset(OpConst32) 7285 v.AuxInt = 0 7286 return true 7287 } 7288 return false 7289 } 7290 func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { 7291 b := v.Block 7292 _ = b 7293 // match: (Rsh32Ux64 (Const32 [c]) (Const64 [d])) 7294 // cond: 7295 // result: (Const32 [int64(int32(uint32(c) >> uint64(d)))]) 7296 for { 7297 v_0 := v.Args[0] 7298 if v_0.Op != OpConst32 { 7299 break 7300 } 7301 c := v_0.AuxInt 7302 v_1 := v.Args[1] 7303 if v_1.Op != OpConst64 { 7304 break 7305 } 7306 d := v_1.AuxInt 7307 v.reset(OpConst32) 7308 v.AuxInt = int64(int32(uint32(c) >> uint64(d))) 7309 return true 7310 } 7311 // match: (Rsh32Ux64 x (Const64 [0])) 7312 // cond: 7313 // result: x 7314 for { 7315 x := v.Args[0] 7316 v_1 := v.Args[1] 7317 if v_1.Op != OpConst64 { 7318 break 7319 } 7320 if v_1.AuxInt != 0 { 7321 break 7322 } 7323 v.reset(OpCopy) 7324 v.Type = x.Type 7325 v.AddArg(x) 7326 return true 7327 } 7328 // match: (Rsh32Ux64 (Const32 [0]) _) 7329 // cond: 7330 // result: (Const32 [0]) 7331 for { 7332 v_0 := v.Args[0] 7333 if v_0.Op != OpConst32 { 7334 break 7335 } 7336 if v_0.AuxInt != 0 { 7337 break 7338 } 7339 v.reset(OpConst32) 7340 v.AuxInt = 0 7341 return true 7342 } 7343 // match: (Rsh32Ux64 _ (Const64 [c])) 7344 // cond: uint64(c) >= 32 7345 // result: (Const32 [0]) 7346 for { 7347 v_1 := v.Args[1] 7348 if v_1.Op != OpConst64 { 7349 break 7350 } 7351 c := v_1.AuxInt 7352 if !(uint64(c) >= 32) { 7353 break 7354 } 7355 v.reset(OpConst32) 7356 v.AuxInt = 0 7357 return true 7358 } 7359 // match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) 7360 // cond: !uaddOvf(c,d) 7361 // result: (Rsh32Ux64 x (Const64 <t> [c+d])) 7362 for { 7363 t := v.Type 7364 v_0 := v.Args[0] 7365 if v_0.Op != OpRsh32Ux64 { 7366 break 7367 } 7368 x := v_0.Args[0] 7369 v_0_1 := v_0.Args[1] 7370 if v_0_1.Op != OpConst64 { 7371 break 7372 } 7373 c := v_0_1.AuxInt 7374 v_1 := v.Args[1] 7375 if v_1.Op != OpConst64 { 7376 break 7377 } 7378 d := v_1.AuxInt 7379 if !(!uaddOvf(c, d)) { 7380 break 7381 } 7382 v.reset(OpRsh32Ux64) 7383 v.AddArg(x) 7384 v0 := b.NewValue0(v.Line, OpConst64, t) 7385 v0.AuxInt = c + d 7386 v.AddArg(v0) 7387 return true 7388 } 7389 // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 7390 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 7391 // result: (Rsh32Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 7392 for { 7393 v_0 := v.Args[0] 7394 if v_0.Op != OpLsh32x64 { 7395 break 7396 } 7397 v_0_0 := v_0.Args[0] 7398 if v_0_0.Op != OpRsh32Ux64 { 7399 break 7400 } 7401 x := v_0_0.Args[0] 7402 v_0_0_1 := v_0_0.Args[1] 7403 if v_0_0_1.Op != OpConst64 { 7404 break 7405 } 7406 c1 := v_0_0_1.AuxInt 7407 v_0_1 := v_0.Args[1] 7408 if v_0_1.Op != OpConst64 { 7409 break 7410 } 7411 c2 := v_0_1.AuxInt 7412 v_1 := v.Args[1] 7413 if v_1.Op != OpConst64 { 7414 break 7415 } 7416 c3 := v_1.AuxInt 7417 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 7418 break 7419 } 7420 v.reset(OpRsh32Ux64) 7421 v.AddArg(x) 7422 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 7423 v0.AuxInt = c1 - c2 + c3 7424 v.AddArg(v0) 7425 return true 7426 } 7427 return false 7428 } 7429 func rewriteValuegeneric_OpRsh32Ux8(v *Value, config *Config) bool { 7430 b := v.Block 7431 _ = b 7432 // match: (Rsh32Ux8 <t> x (Const8 [c])) 7433 // cond: 7434 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))])) 7435 for { 7436 t := v.Type 7437 x := v.Args[0] 7438 v_1 := v.Args[1] 7439 if v_1.Op != OpConst8 { 7440 break 7441 } 7442 c := v_1.AuxInt 7443 v.reset(OpRsh32Ux64) 7444 v.AddArg(x) 7445 v0 := b.NewValue0(v.Line, OpConst64, t) 7446 v0.AuxInt = int64(uint8(c)) 7447 v.AddArg(v0) 7448 return true 7449 } 7450 // match: (Rsh32Ux8 (Const32 [0]) _) 7451 // cond: 7452 // result: (Const32 [0]) 7453 for { 7454 v_0 := v.Args[0] 7455 if v_0.Op != OpConst32 { 7456 break 7457 } 7458 if v_0.AuxInt != 0 { 7459 break 7460 } 7461 v.reset(OpConst32) 7462 v.AuxInt = 0 7463 return true 7464 } 7465 return false 7466 } 7467 func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool { 7468 b := v.Block 7469 _ = b 7470 // match: (Rsh32x16 <t> x (Const16 [c])) 7471 // cond: 7472 // result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))])) 7473 for { 7474 t := v.Type 7475 x := v.Args[0] 7476 v_1 := v.Args[1] 7477 if v_1.Op != OpConst16 { 7478 break 7479 } 7480 c := v_1.AuxInt 7481 v.reset(OpRsh32x64) 7482 v.AddArg(x) 7483 v0 := b.NewValue0(v.Line, OpConst64, t) 7484 v0.AuxInt = int64(uint16(c)) 7485 v.AddArg(v0) 7486 return true 7487 } 7488 // match: (Rsh32x16 (Const32 [0]) _) 7489 // cond: 7490 // result: (Const32 [0]) 7491 for { 7492 v_0 := v.Args[0] 7493 if v_0.Op != OpConst32 { 7494 break 7495 } 7496 if v_0.AuxInt != 0 { 7497 break 7498 } 7499 v.reset(OpConst32) 7500 v.AuxInt = 0 7501 return true 7502 } 7503 return false 7504 } 7505 func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool { 7506 b := v.Block 7507 _ = b 7508 // match: (Rsh32x32 <t> x (Const32 [c])) 7509 // cond: 7510 // result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))])) 7511 for { 7512 t := v.Type 7513 x := v.Args[0] 7514 v_1 := v.Args[1] 7515 if v_1.Op != OpConst32 { 7516 break 7517 } 7518 c := v_1.AuxInt 7519 v.reset(OpRsh32x64) 7520 v.AddArg(x) 7521 v0 := b.NewValue0(v.Line, OpConst64, t) 7522 v0.AuxInt = int64(uint32(c)) 7523 v.AddArg(v0) 7524 return true 7525 } 7526 // match: (Rsh32x32 (Const32 [0]) _) 7527 // cond: 7528 // result: (Const32 [0]) 7529 for { 7530 v_0 := v.Args[0] 7531 if v_0.Op != OpConst32 { 7532 break 7533 } 7534 if v_0.AuxInt != 0 { 7535 break 7536 } 7537 v.reset(OpConst32) 7538 v.AuxInt = 0 7539 return true 7540 } 7541 return false 7542 } 7543 func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool { 7544 b := v.Block 7545 _ = b 7546 // match: (Rsh32x64 (Const32 [c]) (Const64 [d])) 7547 // cond: 7548 // result: (Const32 [int64(int32(c) >> uint64(d))]) 7549 for { 7550 v_0 := v.Args[0] 7551 if v_0.Op != OpConst32 { 7552 break 7553 } 7554 c := v_0.AuxInt 7555 v_1 := v.Args[1] 7556 if v_1.Op != OpConst64 { 7557 break 7558 } 7559 d := v_1.AuxInt 7560 v.reset(OpConst32) 7561 v.AuxInt = int64(int32(c) >> uint64(d)) 7562 return true 7563 } 7564 // match: (Rsh32x64 x (Const64 [0])) 7565 // cond: 7566 // result: x 7567 for { 7568 x := v.Args[0] 7569 v_1 := v.Args[1] 7570 if v_1.Op != OpConst64 { 7571 break 7572 } 7573 if v_1.AuxInt != 0 { 7574 break 7575 } 7576 v.reset(OpCopy) 7577 v.Type = x.Type 7578 v.AddArg(x) 7579 return true 7580 } 7581 // match: (Rsh32x64 (Const32 [0]) _) 7582 // cond: 7583 // result: (Const32 [0]) 7584 for { 7585 v_0 := v.Args[0] 7586 if v_0.Op != OpConst32 { 7587 break 7588 } 7589 if v_0.AuxInt != 0 { 7590 break 7591 } 7592 v.reset(OpConst32) 7593 v.AuxInt = 0 7594 return true 7595 } 7596 // match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d])) 7597 // cond: !uaddOvf(c,d) 7598 // result: (Rsh32x64 x (Const64 <t> [c+d])) 7599 for { 7600 t := v.Type 7601 v_0 := v.Args[0] 7602 if v_0.Op != OpRsh32x64 { 7603 break 7604 } 7605 x := v_0.Args[0] 7606 v_0_1 := v_0.Args[1] 7607 if v_0_1.Op != OpConst64 { 7608 break 7609 } 7610 c := v_0_1.AuxInt 7611 v_1 := v.Args[1] 7612 if v_1.Op != OpConst64 { 7613 break 7614 } 7615 d := v_1.AuxInt 7616 if !(!uaddOvf(c, d)) { 7617 break 7618 } 7619 v.reset(OpRsh32x64) 7620 v.AddArg(x) 7621 v0 := b.NewValue0(v.Line, OpConst64, t) 7622 v0.AuxInt = c + d 7623 v.AddArg(v0) 7624 return true 7625 } 7626 return false 7627 } 7628 func rewriteValuegeneric_OpRsh32x8(v *Value, config *Config) bool { 7629 b := v.Block 7630 _ = b 7631 // match: (Rsh32x8 <t> x (Const8 [c])) 7632 // cond: 7633 // result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))])) 7634 for { 7635 t := v.Type 7636 x := v.Args[0] 7637 v_1 := v.Args[1] 7638 if v_1.Op != OpConst8 { 7639 break 7640 } 7641 c := v_1.AuxInt 7642 v.reset(OpRsh32x64) 7643 v.AddArg(x) 7644 v0 := b.NewValue0(v.Line, OpConst64, t) 7645 v0.AuxInt = int64(uint8(c)) 7646 v.AddArg(v0) 7647 return true 7648 } 7649 // match: (Rsh32x8 (Const32 [0]) _) 7650 // cond: 7651 // result: (Const32 [0]) 7652 for { 7653 v_0 := v.Args[0] 7654 if v_0.Op != OpConst32 { 7655 break 7656 } 7657 if v_0.AuxInt != 0 { 7658 break 7659 } 7660 v.reset(OpConst32) 7661 v.AuxInt = 0 7662 return true 7663 } 7664 return false 7665 } 7666 func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool { 7667 b := v.Block 7668 _ = b 7669 // match: (Rsh64Ux16 <t> x (Const16 [c])) 7670 // cond: 7671 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))])) 7672 for { 7673 t := v.Type 7674 x := v.Args[0] 7675 v_1 := v.Args[1] 7676 if v_1.Op != OpConst16 { 7677 break 7678 } 7679 c := v_1.AuxInt 7680 v.reset(OpRsh64Ux64) 7681 v.AddArg(x) 7682 v0 := b.NewValue0(v.Line, OpConst64, t) 7683 v0.AuxInt = int64(uint16(c)) 7684 v.AddArg(v0) 7685 return true 7686 } 7687 // match: (Rsh64Ux16 (Const64 [0]) _) 7688 // cond: 7689 // result: (Const64 [0]) 7690 for { 7691 v_0 := v.Args[0] 7692 if v_0.Op != OpConst64 { 7693 break 7694 } 7695 if v_0.AuxInt != 0 { 7696 break 7697 } 7698 v.reset(OpConst64) 7699 v.AuxInt = 0 7700 return true 7701 } 7702 return false 7703 } 7704 func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool { 7705 b := v.Block 7706 _ = b 7707 // match: (Rsh64Ux32 <t> x (Const32 [c])) 7708 // cond: 7709 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))])) 7710 for { 7711 t := v.Type 7712 x := v.Args[0] 7713 v_1 := v.Args[1] 7714 if v_1.Op != OpConst32 { 7715 break 7716 } 7717 c := v_1.AuxInt 7718 v.reset(OpRsh64Ux64) 7719 v.AddArg(x) 7720 v0 := b.NewValue0(v.Line, OpConst64, t) 7721 v0.AuxInt = int64(uint32(c)) 7722 v.AddArg(v0) 7723 return true 7724 } 7725 // match: (Rsh64Ux32 (Const64 [0]) _) 7726 // cond: 7727 // result: (Const64 [0]) 7728 for { 7729 v_0 := v.Args[0] 7730 if v_0.Op != OpConst64 { 7731 break 7732 } 7733 if v_0.AuxInt != 0 { 7734 break 7735 } 7736 v.reset(OpConst64) 7737 v.AuxInt = 0 7738 return true 7739 } 7740 return false 7741 } 7742 func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool { 7743 b := v.Block 7744 _ = b 7745 // match: (Rsh64Ux64 (Const64 [c]) (Const64 [d])) 7746 // cond: 7747 // result: (Const64 [int64(uint64(c) >> uint64(d))]) 7748 for { 7749 v_0 := v.Args[0] 7750 if v_0.Op != OpConst64 { 7751 break 7752 } 7753 c := v_0.AuxInt 7754 v_1 := v.Args[1] 7755 if v_1.Op != OpConst64 { 7756 break 7757 } 7758 d := v_1.AuxInt 7759 v.reset(OpConst64) 7760 v.AuxInt = int64(uint64(c) >> uint64(d)) 7761 return true 7762 } 7763 // match: (Rsh64Ux64 x (Const64 [0])) 7764 // cond: 7765 // result: x 7766 for { 7767 x := v.Args[0] 7768 v_1 := v.Args[1] 7769 if v_1.Op != OpConst64 { 7770 break 7771 } 7772 if v_1.AuxInt != 0 { 7773 break 7774 } 7775 v.reset(OpCopy) 7776 v.Type = x.Type 7777 v.AddArg(x) 7778 return true 7779 } 7780 // match: (Rsh64Ux64 (Const64 [0]) _) 7781 // cond: 7782 // result: (Const64 [0]) 7783 for { 7784 v_0 := v.Args[0] 7785 if v_0.Op != OpConst64 { 7786 break 7787 } 7788 if v_0.AuxInt != 0 { 7789 break 7790 } 7791 v.reset(OpConst64) 7792 v.AuxInt = 0 7793 return true 7794 } 7795 // match: (Rsh64Ux64 _ (Const64 [c])) 7796 // cond: uint64(c) >= 64 7797 // result: (Const64 [0]) 7798 for { 7799 v_1 := v.Args[1] 7800 if v_1.Op != OpConst64 { 7801 break 7802 } 7803 c := v_1.AuxInt 7804 if !(uint64(c) >= 64) { 7805 break 7806 } 7807 v.reset(OpConst64) 7808 v.AuxInt = 0 7809 return true 7810 } 7811 // match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) 7812 // cond: !uaddOvf(c,d) 7813 // result: (Rsh64Ux64 x (Const64 <t> [c+d])) 7814 for { 7815 t := v.Type 7816 v_0 := v.Args[0] 7817 if v_0.Op != OpRsh64Ux64 { 7818 break 7819 } 7820 x := v_0.Args[0] 7821 v_0_1 := v_0.Args[1] 7822 if v_0_1.Op != OpConst64 { 7823 break 7824 } 7825 c := v_0_1.AuxInt 7826 v_1 := v.Args[1] 7827 if v_1.Op != OpConst64 { 7828 break 7829 } 7830 d := v_1.AuxInt 7831 if !(!uaddOvf(c, d)) { 7832 break 7833 } 7834 v.reset(OpRsh64Ux64) 7835 v.AddArg(x) 7836 v0 := b.NewValue0(v.Line, OpConst64, t) 7837 v0.AuxInt = c + d 7838 v.AddArg(v0) 7839 return true 7840 } 7841 // match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 7842 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 7843 // result: (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 7844 for { 7845 v_0 := v.Args[0] 7846 if v_0.Op != OpLsh64x64 { 7847 break 7848 } 7849 v_0_0 := v_0.Args[0] 7850 if v_0_0.Op != OpRsh64Ux64 { 7851 break 7852 } 7853 x := v_0_0.Args[0] 7854 v_0_0_1 := v_0_0.Args[1] 7855 if v_0_0_1.Op != OpConst64 { 7856 break 7857 } 7858 c1 := v_0_0_1.AuxInt 7859 v_0_1 := v_0.Args[1] 7860 if v_0_1.Op != OpConst64 { 7861 break 7862 } 7863 c2 := v_0_1.AuxInt 7864 v_1 := v.Args[1] 7865 if v_1.Op != OpConst64 { 7866 break 7867 } 7868 c3 := v_1.AuxInt 7869 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 7870 break 7871 } 7872 v.reset(OpRsh64Ux64) 7873 v.AddArg(x) 7874 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 7875 v0.AuxInt = c1 - c2 + c3 7876 v.AddArg(v0) 7877 return true 7878 } 7879 return false 7880 } 7881 func rewriteValuegeneric_OpRsh64Ux8(v *Value, config *Config) bool { 7882 b := v.Block 7883 _ = b 7884 // match: (Rsh64Ux8 <t> x (Const8 [c])) 7885 // cond: 7886 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))])) 7887 for { 7888 t := v.Type 7889 x := v.Args[0] 7890 v_1 := v.Args[1] 7891 if v_1.Op != OpConst8 { 7892 break 7893 } 7894 c := v_1.AuxInt 7895 v.reset(OpRsh64Ux64) 7896 v.AddArg(x) 7897 v0 := b.NewValue0(v.Line, OpConst64, t) 7898 v0.AuxInt = int64(uint8(c)) 7899 v.AddArg(v0) 7900 return true 7901 } 7902 // match: (Rsh64Ux8 (Const64 [0]) _) 7903 // cond: 7904 // result: (Const64 [0]) 7905 for { 7906 v_0 := v.Args[0] 7907 if v_0.Op != OpConst64 { 7908 break 7909 } 7910 if v_0.AuxInt != 0 { 7911 break 7912 } 7913 v.reset(OpConst64) 7914 v.AuxInt = 0 7915 return true 7916 } 7917 return false 7918 } 7919 func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool { 7920 b := v.Block 7921 _ = b 7922 // match: (Rsh64x16 <t> x (Const16 [c])) 7923 // cond: 7924 // result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))])) 7925 for { 7926 t := v.Type 7927 x := v.Args[0] 7928 v_1 := v.Args[1] 7929 if v_1.Op != OpConst16 { 7930 break 7931 } 7932 c := v_1.AuxInt 7933 v.reset(OpRsh64x64) 7934 v.AddArg(x) 7935 v0 := b.NewValue0(v.Line, OpConst64, t) 7936 v0.AuxInt = int64(uint16(c)) 7937 v.AddArg(v0) 7938 return true 7939 } 7940 // match: (Rsh64x16 (Const64 [0]) _) 7941 // cond: 7942 // result: (Const64 [0]) 7943 for { 7944 v_0 := v.Args[0] 7945 if v_0.Op != OpConst64 { 7946 break 7947 } 7948 if v_0.AuxInt != 0 { 7949 break 7950 } 7951 v.reset(OpConst64) 7952 v.AuxInt = 0 7953 return true 7954 } 7955 return false 7956 } 7957 func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool { 7958 b := v.Block 7959 _ = b 7960 // match: (Rsh64x32 <t> x (Const32 [c])) 7961 // cond: 7962 // result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))])) 7963 for { 7964 t := v.Type 7965 x := v.Args[0] 7966 v_1 := v.Args[1] 7967 if v_1.Op != OpConst32 { 7968 break 7969 } 7970 c := v_1.AuxInt 7971 v.reset(OpRsh64x64) 7972 v.AddArg(x) 7973 v0 := b.NewValue0(v.Line, OpConst64, t) 7974 v0.AuxInt = int64(uint32(c)) 7975 v.AddArg(v0) 7976 return true 7977 } 7978 // match: (Rsh64x32 (Const64 [0]) _) 7979 // cond: 7980 // result: (Const64 [0]) 7981 for { 7982 v_0 := v.Args[0] 7983 if v_0.Op != OpConst64 { 7984 break 7985 } 7986 if v_0.AuxInt != 0 { 7987 break 7988 } 7989 v.reset(OpConst64) 7990 v.AuxInt = 0 7991 return true 7992 } 7993 return false 7994 } 7995 func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool { 7996 b := v.Block 7997 _ = b 7998 // match: (Rsh64x64 (Const64 [c]) (Const64 [d])) 7999 // cond: 8000 // result: (Const64 [c >> uint64(d)]) 8001 for { 8002 v_0 := v.Args[0] 8003 if v_0.Op != OpConst64 { 8004 break 8005 } 8006 c := v_0.AuxInt 8007 v_1 := v.Args[1] 8008 if v_1.Op != OpConst64 { 8009 break 8010 } 8011 d := v_1.AuxInt 8012 v.reset(OpConst64) 8013 v.AuxInt = c >> uint64(d) 8014 return true 8015 } 8016 // match: (Rsh64x64 x (Const64 [0])) 8017 // cond: 8018 // result: x 8019 for { 8020 x := v.Args[0] 8021 v_1 := v.Args[1] 8022 if v_1.Op != OpConst64 { 8023 break 8024 } 8025 if v_1.AuxInt != 0 { 8026 break 8027 } 8028 v.reset(OpCopy) 8029 v.Type = x.Type 8030 v.AddArg(x) 8031 return true 8032 } 8033 // match: (Rsh64x64 (Const64 [0]) _) 8034 // cond: 8035 // result: (Const64 [0]) 8036 for { 8037 v_0 := v.Args[0] 8038 if v_0.Op != OpConst64 { 8039 break 8040 } 8041 if v_0.AuxInt != 0 { 8042 break 8043 } 8044 v.reset(OpConst64) 8045 v.AuxInt = 0 8046 return true 8047 } 8048 // match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d])) 8049 // cond: !uaddOvf(c,d) 8050 // result: (Rsh64x64 x (Const64 <t> [c+d])) 8051 for { 8052 t := v.Type 8053 v_0 := v.Args[0] 8054 if v_0.Op != OpRsh64x64 { 8055 break 8056 } 8057 x := v_0.Args[0] 8058 v_0_1 := v_0.Args[1] 8059 if v_0_1.Op != OpConst64 { 8060 break 8061 } 8062 c := v_0_1.AuxInt 8063 v_1 := v.Args[1] 8064 if v_1.Op != OpConst64 { 8065 break 8066 } 8067 d := v_1.AuxInt 8068 if !(!uaddOvf(c, d)) { 8069 break 8070 } 8071 v.reset(OpRsh64x64) 8072 v.AddArg(x) 8073 v0 := b.NewValue0(v.Line, OpConst64, t) 8074 v0.AuxInt = c + d 8075 v.AddArg(v0) 8076 return true 8077 } 8078 return false 8079 } 8080 func rewriteValuegeneric_OpRsh64x8(v *Value, config *Config) bool { 8081 b := v.Block 8082 _ = b 8083 // match: (Rsh64x8 <t> x (Const8 [c])) 8084 // cond: 8085 // result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))])) 8086 for { 8087 t := v.Type 8088 x := v.Args[0] 8089 v_1 := v.Args[1] 8090 if v_1.Op != OpConst8 { 8091 break 8092 } 8093 c := v_1.AuxInt 8094 v.reset(OpRsh64x64) 8095 v.AddArg(x) 8096 v0 := b.NewValue0(v.Line, OpConst64, t) 8097 v0.AuxInt = int64(uint8(c)) 8098 v.AddArg(v0) 8099 return true 8100 } 8101 // match: (Rsh64x8 (Const64 [0]) _) 8102 // cond: 8103 // result: (Const64 [0]) 8104 for { 8105 v_0 := v.Args[0] 8106 if v_0.Op != OpConst64 { 8107 break 8108 } 8109 if v_0.AuxInt != 0 { 8110 break 8111 } 8112 v.reset(OpConst64) 8113 v.AuxInt = 0 8114 return true 8115 } 8116 return false 8117 } 8118 func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool { 8119 b := v.Block 8120 _ = b 8121 // match: (Rsh8Ux16 <t> x (Const16 [c])) 8122 // cond: 8123 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))])) 8124 for { 8125 t := v.Type 8126 x := v.Args[0] 8127 v_1 := v.Args[1] 8128 if v_1.Op != OpConst16 { 8129 break 8130 } 8131 c := v_1.AuxInt 8132 v.reset(OpRsh8Ux64) 8133 v.AddArg(x) 8134 v0 := b.NewValue0(v.Line, OpConst64, t) 8135 v0.AuxInt = int64(uint16(c)) 8136 v.AddArg(v0) 8137 return true 8138 } 8139 // match: (Rsh8Ux16 (Const8 [0]) _) 8140 // cond: 8141 // result: (Const8 [0]) 8142 for { 8143 v_0 := v.Args[0] 8144 if v_0.Op != OpConst8 { 8145 break 8146 } 8147 if v_0.AuxInt != 0 { 8148 break 8149 } 8150 v.reset(OpConst8) 8151 v.AuxInt = 0 8152 return true 8153 } 8154 return false 8155 } 8156 func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool { 8157 b := v.Block 8158 _ = b 8159 // match: (Rsh8Ux32 <t> x (Const32 [c])) 8160 // cond: 8161 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))])) 8162 for { 8163 t := v.Type 8164 x := v.Args[0] 8165 v_1 := v.Args[1] 8166 if v_1.Op != OpConst32 { 8167 break 8168 } 8169 c := v_1.AuxInt 8170 v.reset(OpRsh8Ux64) 8171 v.AddArg(x) 8172 v0 := b.NewValue0(v.Line, OpConst64, t) 8173 v0.AuxInt = int64(uint32(c)) 8174 v.AddArg(v0) 8175 return true 8176 } 8177 // match: (Rsh8Ux32 (Const8 [0]) _) 8178 // cond: 8179 // result: (Const8 [0]) 8180 for { 8181 v_0 := v.Args[0] 8182 if v_0.Op != OpConst8 { 8183 break 8184 } 8185 if v_0.AuxInt != 0 { 8186 break 8187 } 8188 v.reset(OpConst8) 8189 v.AuxInt = 0 8190 return true 8191 } 8192 return false 8193 } 8194 func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { 8195 b := v.Block 8196 _ = b 8197 // match: (Rsh8Ux64 (Const8 [c]) (Const64 [d])) 8198 // cond: 8199 // result: (Const8 [int64(int8(uint8(c) >> uint64(d)))]) 8200 for { 8201 v_0 := v.Args[0] 8202 if v_0.Op != OpConst8 { 8203 break 8204 } 8205 c := v_0.AuxInt 8206 v_1 := v.Args[1] 8207 if v_1.Op != OpConst64 { 8208 break 8209 } 8210 d := v_1.AuxInt 8211 v.reset(OpConst8) 8212 v.AuxInt = int64(int8(uint8(c) >> uint64(d))) 8213 return true 8214 } 8215 // match: (Rsh8Ux64 x (Const64 [0])) 8216 // cond: 8217 // result: x 8218 for { 8219 x := v.Args[0] 8220 v_1 := v.Args[1] 8221 if v_1.Op != OpConst64 { 8222 break 8223 } 8224 if v_1.AuxInt != 0 { 8225 break 8226 } 8227 v.reset(OpCopy) 8228 v.Type = x.Type 8229 v.AddArg(x) 8230 return true 8231 } 8232 // match: (Rsh8Ux64 (Const8 [0]) _) 8233 // cond: 8234 // result: (Const8 [0]) 8235 for { 8236 v_0 := v.Args[0] 8237 if v_0.Op != OpConst8 { 8238 break 8239 } 8240 if v_0.AuxInt != 0 { 8241 break 8242 } 8243 v.reset(OpConst8) 8244 v.AuxInt = 0 8245 return true 8246 } 8247 // match: (Rsh8Ux64 _ (Const64 [c])) 8248 // cond: uint64(c) >= 8 8249 // result: (Const8 [0]) 8250 for { 8251 v_1 := v.Args[1] 8252 if v_1.Op != OpConst64 { 8253 break 8254 } 8255 c := v_1.AuxInt 8256 if !(uint64(c) >= 8) { 8257 break 8258 } 8259 v.reset(OpConst8) 8260 v.AuxInt = 0 8261 return true 8262 } 8263 // match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) 8264 // cond: !uaddOvf(c,d) 8265 // result: (Rsh8Ux64 x (Const64 <t> [c+d])) 8266 for { 8267 t := v.Type 8268 v_0 := v.Args[0] 8269 if v_0.Op != OpRsh8Ux64 { 8270 break 8271 } 8272 x := v_0.Args[0] 8273 v_0_1 := v_0.Args[1] 8274 if v_0_1.Op != OpConst64 { 8275 break 8276 } 8277 c := v_0_1.AuxInt 8278 v_1 := v.Args[1] 8279 if v_1.Op != OpConst64 { 8280 break 8281 } 8282 d := v_1.AuxInt 8283 if !(!uaddOvf(c, d)) { 8284 break 8285 } 8286 v.reset(OpRsh8Ux64) 8287 v.AddArg(x) 8288 v0 := b.NewValue0(v.Line, OpConst64, t) 8289 v0.AuxInt = c + d 8290 v.AddArg(v0) 8291 return true 8292 } 8293 // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 8294 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 8295 // result: (Rsh8Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 8296 for { 8297 v_0 := v.Args[0] 8298 if v_0.Op != OpLsh8x64 { 8299 break 8300 } 8301 v_0_0 := v_0.Args[0] 8302 if v_0_0.Op != OpRsh8Ux64 { 8303 break 8304 } 8305 x := v_0_0.Args[0] 8306 v_0_0_1 := v_0_0.Args[1] 8307 if v_0_0_1.Op != OpConst64 { 8308 break 8309 } 8310 c1 := v_0_0_1.AuxInt 8311 v_0_1 := v_0.Args[1] 8312 if v_0_1.Op != OpConst64 { 8313 break 8314 } 8315 c2 := v_0_1.AuxInt 8316 v_1 := v.Args[1] 8317 if v_1.Op != OpConst64 { 8318 break 8319 } 8320 c3 := v_1.AuxInt 8321 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 8322 break 8323 } 8324 v.reset(OpRsh8Ux64) 8325 v.AddArg(x) 8326 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 8327 v0.AuxInt = c1 - c2 + c3 8328 v.AddArg(v0) 8329 return true 8330 } 8331 return false 8332 } 8333 func rewriteValuegeneric_OpRsh8Ux8(v *Value, config *Config) bool { 8334 b := v.Block 8335 _ = b 8336 // match: (Rsh8Ux8 <t> x (Const8 [c])) 8337 // cond: 8338 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))])) 8339 for { 8340 t := v.Type 8341 x := v.Args[0] 8342 v_1 := v.Args[1] 8343 if v_1.Op != OpConst8 { 8344 break 8345 } 8346 c := v_1.AuxInt 8347 v.reset(OpRsh8Ux64) 8348 v.AddArg(x) 8349 v0 := b.NewValue0(v.Line, OpConst64, t) 8350 v0.AuxInt = int64(uint8(c)) 8351 v.AddArg(v0) 8352 return true 8353 } 8354 // match: (Rsh8Ux8 (Const8 [0]) _) 8355 // cond: 8356 // result: (Const8 [0]) 8357 for { 8358 v_0 := v.Args[0] 8359 if v_0.Op != OpConst8 { 8360 break 8361 } 8362 if v_0.AuxInt != 0 { 8363 break 8364 } 8365 v.reset(OpConst8) 8366 v.AuxInt = 0 8367 return true 8368 } 8369 return false 8370 } 8371 func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool { 8372 b := v.Block 8373 _ = b 8374 // match: (Rsh8x16 <t> x (Const16 [c])) 8375 // cond: 8376 // result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))])) 8377 for { 8378 t := v.Type 8379 x := v.Args[0] 8380 v_1 := v.Args[1] 8381 if v_1.Op != OpConst16 { 8382 break 8383 } 8384 c := v_1.AuxInt 8385 v.reset(OpRsh8x64) 8386 v.AddArg(x) 8387 v0 := b.NewValue0(v.Line, OpConst64, t) 8388 v0.AuxInt = int64(uint16(c)) 8389 v.AddArg(v0) 8390 return true 8391 } 8392 // match: (Rsh8x16 (Const8 [0]) _) 8393 // cond: 8394 // result: (Const8 [0]) 8395 for { 8396 v_0 := v.Args[0] 8397 if v_0.Op != OpConst8 { 8398 break 8399 } 8400 if v_0.AuxInt != 0 { 8401 break 8402 } 8403 v.reset(OpConst8) 8404 v.AuxInt = 0 8405 return true 8406 } 8407 return false 8408 } 8409 func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool { 8410 b := v.Block 8411 _ = b 8412 // match: (Rsh8x32 <t> x (Const32 [c])) 8413 // cond: 8414 // result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))])) 8415 for { 8416 t := v.Type 8417 x := v.Args[0] 8418 v_1 := v.Args[1] 8419 if v_1.Op != OpConst32 { 8420 break 8421 } 8422 c := v_1.AuxInt 8423 v.reset(OpRsh8x64) 8424 v.AddArg(x) 8425 v0 := b.NewValue0(v.Line, OpConst64, t) 8426 v0.AuxInt = int64(uint32(c)) 8427 v.AddArg(v0) 8428 return true 8429 } 8430 // match: (Rsh8x32 (Const8 [0]) _) 8431 // cond: 8432 // result: (Const8 [0]) 8433 for { 8434 v_0 := v.Args[0] 8435 if v_0.Op != OpConst8 { 8436 break 8437 } 8438 if v_0.AuxInt != 0 { 8439 break 8440 } 8441 v.reset(OpConst8) 8442 v.AuxInt = 0 8443 return true 8444 } 8445 return false 8446 } 8447 func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool { 8448 b := v.Block 8449 _ = b 8450 // match: (Rsh8x64 (Const8 [c]) (Const64 [d])) 8451 // cond: 8452 // result: (Const8 [int64(int8(c) >> uint64(d))]) 8453 for { 8454 v_0 := v.Args[0] 8455 if v_0.Op != OpConst8 { 8456 break 8457 } 8458 c := v_0.AuxInt 8459 v_1 := v.Args[1] 8460 if v_1.Op != OpConst64 { 8461 break 8462 } 8463 d := v_1.AuxInt 8464 v.reset(OpConst8) 8465 v.AuxInt = int64(int8(c) >> uint64(d)) 8466 return true 8467 } 8468 // match: (Rsh8x64 x (Const64 [0])) 8469 // cond: 8470 // result: x 8471 for { 8472 x := v.Args[0] 8473 v_1 := v.Args[1] 8474 if v_1.Op != OpConst64 { 8475 break 8476 } 8477 if v_1.AuxInt != 0 { 8478 break 8479 } 8480 v.reset(OpCopy) 8481 v.Type = x.Type 8482 v.AddArg(x) 8483 return true 8484 } 8485 // match: (Rsh8x64 (Const8 [0]) _) 8486 // cond: 8487 // result: (Const8 [0]) 8488 for { 8489 v_0 := v.Args[0] 8490 if v_0.Op != OpConst8 { 8491 break 8492 } 8493 if v_0.AuxInt != 0 { 8494 break 8495 } 8496 v.reset(OpConst8) 8497 v.AuxInt = 0 8498 return true 8499 } 8500 // match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d])) 8501 // cond: !uaddOvf(c,d) 8502 // result: (Rsh8x64 x (Const64 <t> [c+d])) 8503 for { 8504 t := v.Type 8505 v_0 := v.Args[0] 8506 if v_0.Op != OpRsh8x64 { 8507 break 8508 } 8509 x := v_0.Args[0] 8510 v_0_1 := v_0.Args[1] 8511 if v_0_1.Op != OpConst64 { 8512 break 8513 } 8514 c := v_0_1.AuxInt 8515 v_1 := v.Args[1] 8516 if v_1.Op != OpConst64 { 8517 break 8518 } 8519 d := v_1.AuxInt 8520 if !(!uaddOvf(c, d)) { 8521 break 8522 } 8523 v.reset(OpRsh8x64) 8524 v.AddArg(x) 8525 v0 := b.NewValue0(v.Line, OpConst64, t) 8526 v0.AuxInt = c + d 8527 v.AddArg(v0) 8528 return true 8529 } 8530 return false 8531 } 8532 func rewriteValuegeneric_OpRsh8x8(v *Value, config *Config) bool { 8533 b := v.Block 8534 _ = b 8535 // match: (Rsh8x8 <t> x (Const8 [c])) 8536 // cond: 8537 // result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))])) 8538 for { 8539 t := v.Type 8540 x := v.Args[0] 8541 v_1 := v.Args[1] 8542 if v_1.Op != OpConst8 { 8543 break 8544 } 8545 c := v_1.AuxInt 8546 v.reset(OpRsh8x64) 8547 v.AddArg(x) 8548 v0 := b.NewValue0(v.Line, OpConst64, t) 8549 v0.AuxInt = int64(uint8(c)) 8550 v.AddArg(v0) 8551 return true 8552 } 8553 // match: (Rsh8x8 (Const8 [0]) _) 8554 // cond: 8555 // result: (Const8 [0]) 8556 for { 8557 v_0 := v.Args[0] 8558 if v_0.Op != OpConst8 { 8559 break 8560 } 8561 if v_0.AuxInt != 0 { 8562 break 8563 } 8564 v.reset(OpConst8) 8565 v.AuxInt = 0 8566 return true 8567 } 8568 return false 8569 } 8570 func rewriteValuegeneric_OpSliceCap(v *Value, config *Config) bool { 8571 b := v.Block 8572 _ = b 8573 // match: (SliceCap (SliceMake _ _ (Const64 <t> [c]))) 8574 // cond: 8575 // result: (Const64 <t> [c]) 8576 for { 8577 v_0 := v.Args[0] 8578 if v_0.Op != OpSliceMake { 8579 break 8580 } 8581 v_0_2 := v_0.Args[2] 8582 if v_0_2.Op != OpConst64 { 8583 break 8584 } 8585 t := v_0_2.Type 8586 c := v_0_2.AuxInt 8587 v.reset(OpConst64) 8588 v.Type = t 8589 v.AuxInt = c 8590 return true 8591 } 8592 // match: (SliceCap (SliceMake _ _ (SliceCap x))) 8593 // cond: 8594 // result: (SliceCap x) 8595 for { 8596 v_0 := v.Args[0] 8597 if v_0.Op != OpSliceMake { 8598 break 8599 } 8600 v_0_2 := v_0.Args[2] 8601 if v_0_2.Op != OpSliceCap { 8602 break 8603 } 8604 x := v_0_2.Args[0] 8605 v.reset(OpSliceCap) 8606 v.AddArg(x) 8607 return true 8608 } 8609 // match: (SliceCap (SliceMake _ _ (SliceLen x))) 8610 // cond: 8611 // result: (SliceLen x) 8612 for { 8613 v_0 := v.Args[0] 8614 if v_0.Op != OpSliceMake { 8615 break 8616 } 8617 v_0_2 := v_0.Args[2] 8618 if v_0_2.Op != OpSliceLen { 8619 break 8620 } 8621 x := v_0_2.Args[0] 8622 v.reset(OpSliceLen) 8623 v.AddArg(x) 8624 return true 8625 } 8626 return false 8627 } 8628 func rewriteValuegeneric_OpSliceLen(v *Value, config *Config) bool { 8629 b := v.Block 8630 _ = b 8631 // match: (SliceLen (SliceMake _ (Const64 <t> [c]) _)) 8632 // cond: 8633 // result: (Const64 <t> [c]) 8634 for { 8635 v_0 := v.Args[0] 8636 if v_0.Op != OpSliceMake { 8637 break 8638 } 8639 v_0_1 := v_0.Args[1] 8640 if v_0_1.Op != OpConst64 { 8641 break 8642 } 8643 t := v_0_1.Type 8644 c := v_0_1.AuxInt 8645 v.reset(OpConst64) 8646 v.Type = t 8647 v.AuxInt = c 8648 return true 8649 } 8650 // match: (SliceLen (SliceMake _ (SliceLen x) _)) 8651 // cond: 8652 // result: (SliceLen x) 8653 for { 8654 v_0 := v.Args[0] 8655 if v_0.Op != OpSliceMake { 8656 break 8657 } 8658 v_0_1 := v_0.Args[1] 8659 if v_0_1.Op != OpSliceLen { 8660 break 8661 } 8662 x := v_0_1.Args[0] 8663 v.reset(OpSliceLen) 8664 v.AddArg(x) 8665 return true 8666 } 8667 return false 8668 } 8669 func rewriteValuegeneric_OpSlicePtr(v *Value, config *Config) bool { 8670 b := v.Block 8671 _ = b 8672 // match: (SlicePtr (SliceMake (SlicePtr x) _ _)) 8673 // cond: 8674 // result: (SlicePtr x) 8675 for { 8676 v_0 := v.Args[0] 8677 if v_0.Op != OpSliceMake { 8678 break 8679 } 8680 v_0_0 := v_0.Args[0] 8681 if v_0_0.Op != OpSlicePtr { 8682 break 8683 } 8684 x := v_0_0.Args[0] 8685 v.reset(OpSlicePtr) 8686 v.AddArg(x) 8687 return true 8688 } 8689 return false 8690 } 8691 func rewriteValuegeneric_OpStore(v *Value, config *Config) bool { 8692 b := v.Block 8693 _ = b 8694 // match: (Store _ (StructMake0) mem) 8695 // cond: 8696 // result: mem 8697 for { 8698 v_1 := v.Args[1] 8699 if v_1.Op != OpStructMake0 { 8700 break 8701 } 8702 mem := v.Args[2] 8703 v.reset(OpCopy) 8704 v.Type = mem.Type 8705 v.AddArg(mem) 8706 return true 8707 } 8708 // match: (Store dst (StructMake1 <t> f0) mem) 8709 // cond: 8710 // result: (Store [t.FieldType(0).Size()] dst f0 mem) 8711 for { 8712 dst := v.Args[0] 8713 v_1 := v.Args[1] 8714 if v_1.Op != OpStructMake1 { 8715 break 8716 } 8717 t := v_1.Type 8718 f0 := v_1.Args[0] 8719 mem := v.Args[2] 8720 v.reset(OpStore) 8721 v.AuxInt = t.FieldType(0).Size() 8722 v.AddArg(dst) 8723 v.AddArg(f0) 8724 v.AddArg(mem) 8725 return true 8726 } 8727 // match: (Store dst (StructMake2 <t> f0 f1) mem) 8728 // cond: 8729 // result: (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)) 8730 for { 8731 dst := v.Args[0] 8732 v_1 := v.Args[1] 8733 if v_1.Op != OpStructMake2 { 8734 break 8735 } 8736 t := v_1.Type 8737 f0 := v_1.Args[0] 8738 f1 := v_1.Args[1] 8739 mem := v.Args[2] 8740 v.reset(OpStore) 8741 v.AuxInt = t.FieldType(1).Size() 8742 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 8743 v0.AuxInt = t.FieldOff(1) 8744 v0.AddArg(dst) 8745 v.AddArg(v0) 8746 v.AddArg(f1) 8747 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 8748 v1.AuxInt = t.FieldType(0).Size() 8749 v1.AddArg(dst) 8750 v1.AddArg(f0) 8751 v1.AddArg(mem) 8752 v.AddArg(v1) 8753 return true 8754 } 8755 // match: (Store dst (StructMake3 <t> f0 f1 f2) mem) 8756 // cond: 8757 // result: (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem))) 8758 for { 8759 dst := v.Args[0] 8760 v_1 := v.Args[1] 8761 if v_1.Op != OpStructMake3 { 8762 break 8763 } 8764 t := v_1.Type 8765 f0 := v_1.Args[0] 8766 f1 := v_1.Args[1] 8767 f2 := v_1.Args[2] 8768 mem := v.Args[2] 8769 v.reset(OpStore) 8770 v.AuxInt = t.FieldType(2).Size() 8771 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 8772 v0.AuxInt = t.FieldOff(2) 8773 v0.AddArg(dst) 8774 v.AddArg(v0) 8775 v.AddArg(f2) 8776 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 8777 v1.AuxInt = t.FieldType(1).Size() 8778 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 8779 v2.AuxInt = t.FieldOff(1) 8780 v2.AddArg(dst) 8781 v1.AddArg(v2) 8782 v1.AddArg(f1) 8783 v3 := b.NewValue0(v.Line, OpStore, TypeMem) 8784 v3.AuxInt = t.FieldType(0).Size() 8785 v3.AddArg(dst) 8786 v3.AddArg(f0) 8787 v3.AddArg(mem) 8788 v1.AddArg(v3) 8789 v.AddArg(v1) 8790 return true 8791 } 8792 // match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) 8793 // cond: 8794 // result: (Store [t.FieldType(3).Size()] (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)))) 8795 for { 8796 dst := v.Args[0] 8797 v_1 := v.Args[1] 8798 if v_1.Op != OpStructMake4 { 8799 break 8800 } 8801 t := v_1.Type 8802 f0 := v_1.Args[0] 8803 f1 := v_1.Args[1] 8804 f2 := v_1.Args[2] 8805 f3 := v_1.Args[3] 8806 mem := v.Args[2] 8807 v.reset(OpStore) 8808 v.AuxInt = t.FieldType(3).Size() 8809 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) 8810 v0.AuxInt = t.FieldOff(3) 8811 v0.AddArg(dst) 8812 v.AddArg(v0) 8813 v.AddArg(f3) 8814 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 8815 v1.AuxInt = t.FieldType(2).Size() 8816 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 8817 v2.AuxInt = t.FieldOff(2) 8818 v2.AddArg(dst) 8819 v1.AddArg(v2) 8820 v1.AddArg(f2) 8821 v3 := b.NewValue0(v.Line, OpStore, TypeMem) 8822 v3.AuxInt = t.FieldType(1).Size() 8823 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 8824 v4.AuxInt = t.FieldOff(1) 8825 v4.AddArg(dst) 8826 v3.AddArg(v4) 8827 v3.AddArg(f1) 8828 v5 := b.NewValue0(v.Line, OpStore, TypeMem) 8829 v5.AuxInt = t.FieldType(0).Size() 8830 v5.AddArg(dst) 8831 v5.AddArg(f0) 8832 v5.AddArg(mem) 8833 v3.AddArg(v5) 8834 v1.AddArg(v3) 8835 v.AddArg(v1) 8836 return true 8837 } 8838 // match: (Store [size] dst (Load <t> src mem) mem) 8839 // cond: !config.fe.CanSSA(t) 8840 // result: (Move [size] dst src mem) 8841 for { 8842 size := v.AuxInt 8843 dst := v.Args[0] 8844 v_1 := v.Args[1] 8845 if v_1.Op != OpLoad { 8846 break 8847 } 8848 t := v_1.Type 8849 src := v_1.Args[0] 8850 mem := v_1.Args[1] 8851 if mem != v.Args[2] { 8852 break 8853 } 8854 if !(!config.fe.CanSSA(t)) { 8855 break 8856 } 8857 v.reset(OpMove) 8858 v.AuxInt = size 8859 v.AddArg(dst) 8860 v.AddArg(src) 8861 v.AddArg(mem) 8862 return true 8863 } 8864 // match: (Store [size] dst (Load <t> src mem) (VarDef {x} mem)) 8865 // cond: !config.fe.CanSSA(t) 8866 // result: (Move [size] dst src (VarDef {x} mem)) 8867 for { 8868 size := v.AuxInt 8869 dst := v.Args[0] 8870 v_1 := v.Args[1] 8871 if v_1.Op != OpLoad { 8872 break 8873 } 8874 t := v_1.Type 8875 src := v_1.Args[0] 8876 mem := v_1.Args[1] 8877 v_2 := v.Args[2] 8878 if v_2.Op != OpVarDef { 8879 break 8880 } 8881 x := v_2.Aux 8882 if mem != v_2.Args[0] { 8883 break 8884 } 8885 if !(!config.fe.CanSSA(t)) { 8886 break 8887 } 8888 v.reset(OpMove) 8889 v.AuxInt = size 8890 v.AddArg(dst) 8891 v.AddArg(src) 8892 v0 := b.NewValue0(v.Line, OpVarDef, TypeMem) 8893 v0.Aux = x 8894 v0.AddArg(mem) 8895 v.AddArg(v0) 8896 return true 8897 } 8898 return false 8899 } 8900 func rewriteValuegeneric_OpStringLen(v *Value, config *Config) bool { 8901 b := v.Block 8902 _ = b 8903 // match: (StringLen (StringMake _ (Const64 <t> [c]))) 8904 // cond: 8905 // result: (Const64 <t> [c]) 8906 for { 8907 v_0 := v.Args[0] 8908 if v_0.Op != OpStringMake { 8909 break 8910 } 8911 v_0_1 := v_0.Args[1] 8912 if v_0_1.Op != OpConst64 { 8913 break 8914 } 8915 t := v_0_1.Type 8916 c := v_0_1.AuxInt 8917 v.reset(OpConst64) 8918 v.Type = t 8919 v.AuxInt = c 8920 return true 8921 } 8922 return false 8923 } 8924 func rewriteValuegeneric_OpStringPtr(v *Value, config *Config) bool { 8925 b := v.Block 8926 _ = b 8927 // match: (StringPtr (StringMake (Const64 <t> [c]) _)) 8928 // cond: 8929 // result: (Const64 <t> [c]) 8930 for { 8931 v_0 := v.Args[0] 8932 if v_0.Op != OpStringMake { 8933 break 8934 } 8935 v_0_0 := v_0.Args[0] 8936 if v_0_0.Op != OpConst64 { 8937 break 8938 } 8939 t := v_0_0.Type 8940 c := v_0_0.AuxInt 8941 v.reset(OpConst64) 8942 v.Type = t 8943 v.AuxInt = c 8944 return true 8945 } 8946 return false 8947 } 8948 func rewriteValuegeneric_OpStructSelect(v *Value, config *Config) bool { 8949 b := v.Block 8950 _ = b 8951 // match: (StructSelect (StructMake1 x)) 8952 // cond: 8953 // result: x 8954 for { 8955 v_0 := v.Args[0] 8956 if v_0.Op != OpStructMake1 { 8957 break 8958 } 8959 x := v_0.Args[0] 8960 v.reset(OpCopy) 8961 v.Type = x.Type 8962 v.AddArg(x) 8963 return true 8964 } 8965 // match: (StructSelect [0] (StructMake2 x _)) 8966 // cond: 8967 // result: x 8968 for { 8969 if v.AuxInt != 0 { 8970 break 8971 } 8972 v_0 := v.Args[0] 8973 if v_0.Op != OpStructMake2 { 8974 break 8975 } 8976 x := v_0.Args[0] 8977 v.reset(OpCopy) 8978 v.Type = x.Type 8979 v.AddArg(x) 8980 return true 8981 } 8982 // match: (StructSelect [1] (StructMake2 _ x)) 8983 // cond: 8984 // result: x 8985 for { 8986 if v.AuxInt != 1 { 8987 break 8988 } 8989 v_0 := v.Args[0] 8990 if v_0.Op != OpStructMake2 { 8991 break 8992 } 8993 x := v_0.Args[1] 8994 v.reset(OpCopy) 8995 v.Type = x.Type 8996 v.AddArg(x) 8997 return true 8998 } 8999 // match: (StructSelect [0] (StructMake3 x _ _)) 9000 // cond: 9001 // result: x 9002 for { 9003 if v.AuxInt != 0 { 9004 break 9005 } 9006 v_0 := v.Args[0] 9007 if v_0.Op != OpStructMake3 { 9008 break 9009 } 9010 x := v_0.Args[0] 9011 v.reset(OpCopy) 9012 v.Type = x.Type 9013 v.AddArg(x) 9014 return true 9015 } 9016 // match: (StructSelect [1] (StructMake3 _ x _)) 9017 // cond: 9018 // result: x 9019 for { 9020 if v.AuxInt != 1 { 9021 break 9022 } 9023 v_0 := v.Args[0] 9024 if v_0.Op != OpStructMake3 { 9025 break 9026 } 9027 x := v_0.Args[1] 9028 v.reset(OpCopy) 9029 v.Type = x.Type 9030 v.AddArg(x) 9031 return true 9032 } 9033 // match: (StructSelect [2] (StructMake3 _ _ x)) 9034 // cond: 9035 // result: x 9036 for { 9037 if v.AuxInt != 2 { 9038 break 9039 } 9040 v_0 := v.Args[0] 9041 if v_0.Op != OpStructMake3 { 9042 break 9043 } 9044 x := v_0.Args[2] 9045 v.reset(OpCopy) 9046 v.Type = x.Type 9047 v.AddArg(x) 9048 return true 9049 } 9050 // match: (StructSelect [0] (StructMake4 x _ _ _)) 9051 // cond: 9052 // result: x 9053 for { 9054 if v.AuxInt != 0 { 9055 break 9056 } 9057 v_0 := v.Args[0] 9058 if v_0.Op != OpStructMake4 { 9059 break 9060 } 9061 x := v_0.Args[0] 9062 v.reset(OpCopy) 9063 v.Type = x.Type 9064 v.AddArg(x) 9065 return true 9066 } 9067 // match: (StructSelect [1] (StructMake4 _ x _ _)) 9068 // cond: 9069 // result: x 9070 for { 9071 if v.AuxInt != 1 { 9072 break 9073 } 9074 v_0 := v.Args[0] 9075 if v_0.Op != OpStructMake4 { 9076 break 9077 } 9078 x := v_0.Args[1] 9079 v.reset(OpCopy) 9080 v.Type = x.Type 9081 v.AddArg(x) 9082 return true 9083 } 9084 // match: (StructSelect [2] (StructMake4 _ _ x _)) 9085 // cond: 9086 // result: x 9087 for { 9088 if v.AuxInt != 2 { 9089 break 9090 } 9091 v_0 := v.Args[0] 9092 if v_0.Op != OpStructMake4 { 9093 break 9094 } 9095 x := v_0.Args[2] 9096 v.reset(OpCopy) 9097 v.Type = x.Type 9098 v.AddArg(x) 9099 return true 9100 } 9101 // match: (StructSelect [3] (StructMake4 _ _ _ x)) 9102 // cond: 9103 // result: x 9104 for { 9105 if v.AuxInt != 3 { 9106 break 9107 } 9108 v_0 := v.Args[0] 9109 if v_0.Op != OpStructMake4 { 9110 break 9111 } 9112 x := v_0.Args[3] 9113 v.reset(OpCopy) 9114 v.Type = x.Type 9115 v.AddArg(x) 9116 return true 9117 } 9118 // match: (StructSelect [i] x:(Load <t> ptr mem)) 9119 // cond: !config.fe.CanSSA(t) 9120 // result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem) 9121 for { 9122 i := v.AuxInt 9123 x := v.Args[0] 9124 if x.Op != OpLoad { 9125 break 9126 } 9127 t := x.Type 9128 ptr := x.Args[0] 9129 mem := x.Args[1] 9130 if !(!config.fe.CanSSA(t)) { 9131 break 9132 } 9133 b = x.Block 9134 v0 := b.NewValue0(v.Line, OpLoad, v.Type) 9135 v.reset(OpCopy) 9136 v.AddArg(v0) 9137 v1 := b.NewValue0(v.Line, OpOffPtr, v.Type.PtrTo()) 9138 v1.AuxInt = t.FieldOff(int(i)) 9139 v1.AddArg(ptr) 9140 v0.AddArg(v1) 9141 v0.AddArg(mem) 9142 return true 9143 } 9144 return false 9145 } 9146 func rewriteValuegeneric_OpSub16(v *Value, config *Config) bool { 9147 b := v.Block 9148 _ = b 9149 // match: (Sub16 (Const16 [c]) (Const16 [d])) 9150 // cond: 9151 // result: (Const16 [int64(int16(c-d))]) 9152 for { 9153 v_0 := v.Args[0] 9154 if v_0.Op != OpConst16 { 9155 break 9156 } 9157 c := v_0.AuxInt 9158 v_1 := v.Args[1] 9159 if v_1.Op != OpConst16 { 9160 break 9161 } 9162 d := v_1.AuxInt 9163 v.reset(OpConst16) 9164 v.AuxInt = int64(int16(c - d)) 9165 return true 9166 } 9167 // match: (Sub16 x (Const16 <t> [c])) 9168 // cond: x.Op != OpConst16 9169 // result: (Add16 (Const16 <t> [int64(int16(-c))]) x) 9170 for { 9171 x := v.Args[0] 9172 v_1 := v.Args[1] 9173 if v_1.Op != OpConst16 { 9174 break 9175 } 9176 t := v_1.Type 9177 c := v_1.AuxInt 9178 if !(x.Op != OpConst16) { 9179 break 9180 } 9181 v.reset(OpAdd16) 9182 v0 := b.NewValue0(v.Line, OpConst16, t) 9183 v0.AuxInt = int64(int16(-c)) 9184 v.AddArg(v0) 9185 v.AddArg(x) 9186 return true 9187 } 9188 // match: (Sub16 x x) 9189 // cond: 9190 // result: (Const16 [0]) 9191 for { 9192 x := v.Args[0] 9193 if x != v.Args[1] { 9194 break 9195 } 9196 v.reset(OpConst16) 9197 v.AuxInt = 0 9198 return true 9199 } 9200 // match: (Sub16 (Add16 x y) x) 9201 // cond: 9202 // result: y 9203 for { 9204 v_0 := v.Args[0] 9205 if v_0.Op != OpAdd16 { 9206 break 9207 } 9208 x := v_0.Args[0] 9209 y := v_0.Args[1] 9210 if x != v.Args[1] { 9211 break 9212 } 9213 v.reset(OpCopy) 9214 v.Type = y.Type 9215 v.AddArg(y) 9216 return true 9217 } 9218 // match: (Sub16 (Add16 x y) y) 9219 // cond: 9220 // result: x 9221 for { 9222 v_0 := v.Args[0] 9223 if v_0.Op != OpAdd16 { 9224 break 9225 } 9226 x := v_0.Args[0] 9227 y := v_0.Args[1] 9228 if y != v.Args[1] { 9229 break 9230 } 9231 v.reset(OpCopy) 9232 v.Type = x.Type 9233 v.AddArg(x) 9234 return true 9235 } 9236 return false 9237 } 9238 func rewriteValuegeneric_OpSub32(v *Value, config *Config) bool { 9239 b := v.Block 9240 _ = b 9241 // match: (Sub32 (Const32 [c]) (Const32 [d])) 9242 // cond: 9243 // result: (Const32 [int64(int32(c-d))]) 9244 for { 9245 v_0 := v.Args[0] 9246 if v_0.Op != OpConst32 { 9247 break 9248 } 9249 c := v_0.AuxInt 9250 v_1 := v.Args[1] 9251 if v_1.Op != OpConst32 { 9252 break 9253 } 9254 d := v_1.AuxInt 9255 v.reset(OpConst32) 9256 v.AuxInt = int64(int32(c - d)) 9257 return true 9258 } 9259 // match: (Sub32 x (Const32 <t> [c])) 9260 // cond: x.Op != OpConst32 9261 // result: (Add32 (Const32 <t> [int64(int32(-c))]) x) 9262 for { 9263 x := v.Args[0] 9264 v_1 := v.Args[1] 9265 if v_1.Op != OpConst32 { 9266 break 9267 } 9268 t := v_1.Type 9269 c := v_1.AuxInt 9270 if !(x.Op != OpConst32) { 9271 break 9272 } 9273 v.reset(OpAdd32) 9274 v0 := b.NewValue0(v.Line, OpConst32, t) 9275 v0.AuxInt = int64(int32(-c)) 9276 v.AddArg(v0) 9277 v.AddArg(x) 9278 return true 9279 } 9280 // match: (Sub32 x x) 9281 // cond: 9282 // result: (Const32 [0]) 9283 for { 9284 x := v.Args[0] 9285 if x != v.Args[1] { 9286 break 9287 } 9288 v.reset(OpConst32) 9289 v.AuxInt = 0 9290 return true 9291 } 9292 // match: (Sub32 (Add32 x y) x) 9293 // cond: 9294 // result: y 9295 for { 9296 v_0 := v.Args[0] 9297 if v_0.Op != OpAdd32 { 9298 break 9299 } 9300 x := v_0.Args[0] 9301 y := v_0.Args[1] 9302 if x != v.Args[1] { 9303 break 9304 } 9305 v.reset(OpCopy) 9306 v.Type = y.Type 9307 v.AddArg(y) 9308 return true 9309 } 9310 // match: (Sub32 (Add32 x y) y) 9311 // cond: 9312 // result: x 9313 for { 9314 v_0 := v.Args[0] 9315 if v_0.Op != OpAdd32 { 9316 break 9317 } 9318 x := v_0.Args[0] 9319 y := v_0.Args[1] 9320 if y != v.Args[1] { 9321 break 9322 } 9323 v.reset(OpCopy) 9324 v.Type = x.Type 9325 v.AddArg(x) 9326 return true 9327 } 9328 return false 9329 } 9330 func rewriteValuegeneric_OpSub32F(v *Value, config *Config) bool { 9331 b := v.Block 9332 _ = b 9333 // match: (Sub32F (Const32F [c]) (Const32F [d])) 9334 // cond: 9335 // result: (Const32F [f2i(float64(i2f32(c) - i2f32(d)))]) 9336 for { 9337 v_0 := v.Args[0] 9338 if v_0.Op != OpConst32F { 9339 break 9340 } 9341 c := v_0.AuxInt 9342 v_1 := v.Args[1] 9343 if v_1.Op != OpConst32F { 9344 break 9345 } 9346 d := v_1.AuxInt 9347 v.reset(OpConst32F) 9348 v.AuxInt = f2i(float64(i2f32(c) - i2f32(d))) 9349 return true 9350 } 9351 return false 9352 } 9353 func rewriteValuegeneric_OpSub64(v *Value, config *Config) bool { 9354 b := v.Block 9355 _ = b 9356 // match: (Sub64 (Const64 [c]) (Const64 [d])) 9357 // cond: 9358 // result: (Const64 [c-d]) 9359 for { 9360 v_0 := v.Args[0] 9361 if v_0.Op != OpConst64 { 9362 break 9363 } 9364 c := v_0.AuxInt 9365 v_1 := v.Args[1] 9366 if v_1.Op != OpConst64 { 9367 break 9368 } 9369 d := v_1.AuxInt 9370 v.reset(OpConst64) 9371 v.AuxInt = c - d 9372 return true 9373 } 9374 // match: (Sub64 x (Const64 <t> [c])) 9375 // cond: x.Op != OpConst64 9376 // result: (Add64 (Const64 <t> [-c]) x) 9377 for { 9378 x := v.Args[0] 9379 v_1 := v.Args[1] 9380 if v_1.Op != OpConst64 { 9381 break 9382 } 9383 t := v_1.Type 9384 c := v_1.AuxInt 9385 if !(x.Op != OpConst64) { 9386 break 9387 } 9388 v.reset(OpAdd64) 9389 v0 := b.NewValue0(v.Line, OpConst64, t) 9390 v0.AuxInt = -c 9391 v.AddArg(v0) 9392 v.AddArg(x) 9393 return true 9394 } 9395 // match: (Sub64 x x) 9396 // cond: 9397 // result: (Const64 [0]) 9398 for { 9399 x := v.Args[0] 9400 if x != v.Args[1] { 9401 break 9402 } 9403 v.reset(OpConst64) 9404 v.AuxInt = 0 9405 return true 9406 } 9407 // match: (Sub64 (Add64 x y) x) 9408 // cond: 9409 // result: y 9410 for { 9411 v_0 := v.Args[0] 9412 if v_0.Op != OpAdd64 { 9413 break 9414 } 9415 x := v_0.Args[0] 9416 y := v_0.Args[1] 9417 if x != v.Args[1] { 9418 break 9419 } 9420 v.reset(OpCopy) 9421 v.Type = y.Type 9422 v.AddArg(y) 9423 return true 9424 } 9425 // match: (Sub64 (Add64 x y) y) 9426 // cond: 9427 // result: x 9428 for { 9429 v_0 := v.Args[0] 9430 if v_0.Op != OpAdd64 { 9431 break 9432 } 9433 x := v_0.Args[0] 9434 y := v_0.Args[1] 9435 if y != v.Args[1] { 9436 break 9437 } 9438 v.reset(OpCopy) 9439 v.Type = x.Type 9440 v.AddArg(x) 9441 return true 9442 } 9443 return false 9444 } 9445 func rewriteValuegeneric_OpSub64F(v *Value, config *Config) bool { 9446 b := v.Block 9447 _ = b 9448 // match: (Sub64F (Const64F [c]) (Const64F [d])) 9449 // cond: 9450 // result: (Const64F [f2i(i2f(c) - i2f(d))]) 9451 for { 9452 v_0 := v.Args[0] 9453 if v_0.Op != OpConst64F { 9454 break 9455 } 9456 c := v_0.AuxInt 9457 v_1 := v.Args[1] 9458 if v_1.Op != OpConst64F { 9459 break 9460 } 9461 d := v_1.AuxInt 9462 v.reset(OpConst64F) 9463 v.AuxInt = f2i(i2f(c) - i2f(d)) 9464 return true 9465 } 9466 return false 9467 } 9468 func rewriteValuegeneric_OpSub8(v *Value, config *Config) bool { 9469 b := v.Block 9470 _ = b 9471 // match: (Sub8 (Const8 [c]) (Const8 [d])) 9472 // cond: 9473 // result: (Const8 [int64(int8(c-d))]) 9474 for { 9475 v_0 := v.Args[0] 9476 if v_0.Op != OpConst8 { 9477 break 9478 } 9479 c := v_0.AuxInt 9480 v_1 := v.Args[1] 9481 if v_1.Op != OpConst8 { 9482 break 9483 } 9484 d := v_1.AuxInt 9485 v.reset(OpConst8) 9486 v.AuxInt = int64(int8(c - d)) 9487 return true 9488 } 9489 // match: (Sub8 x (Const8 <t> [c])) 9490 // cond: x.Op != OpConst8 9491 // result: (Add8 (Const8 <t> [int64(int8(-c))]) x) 9492 for { 9493 x := v.Args[0] 9494 v_1 := v.Args[1] 9495 if v_1.Op != OpConst8 { 9496 break 9497 } 9498 t := v_1.Type 9499 c := v_1.AuxInt 9500 if !(x.Op != OpConst8) { 9501 break 9502 } 9503 v.reset(OpAdd8) 9504 v0 := b.NewValue0(v.Line, OpConst8, t) 9505 v0.AuxInt = int64(int8(-c)) 9506 v.AddArg(v0) 9507 v.AddArg(x) 9508 return true 9509 } 9510 // match: (Sub8 x x) 9511 // cond: 9512 // result: (Const8 [0]) 9513 for { 9514 x := v.Args[0] 9515 if x != v.Args[1] { 9516 break 9517 } 9518 v.reset(OpConst8) 9519 v.AuxInt = 0 9520 return true 9521 } 9522 // match: (Sub8 (Add8 x y) x) 9523 // cond: 9524 // result: y 9525 for { 9526 v_0 := v.Args[0] 9527 if v_0.Op != OpAdd8 { 9528 break 9529 } 9530 x := v_0.Args[0] 9531 y := v_0.Args[1] 9532 if x != v.Args[1] { 9533 break 9534 } 9535 v.reset(OpCopy) 9536 v.Type = y.Type 9537 v.AddArg(y) 9538 return true 9539 } 9540 // match: (Sub8 (Add8 x y) y) 9541 // cond: 9542 // result: x 9543 for { 9544 v_0 := v.Args[0] 9545 if v_0.Op != OpAdd8 { 9546 break 9547 } 9548 x := v_0.Args[0] 9549 y := v_0.Args[1] 9550 if y != v.Args[1] { 9551 break 9552 } 9553 v.reset(OpCopy) 9554 v.Type = x.Type 9555 v.AddArg(x) 9556 return true 9557 } 9558 return false 9559 } 9560 func rewriteValuegeneric_OpTrunc16to8(v *Value, config *Config) bool { 9561 b := v.Block 9562 _ = b 9563 // match: (Trunc16to8 (Const16 [c])) 9564 // cond: 9565 // result: (Const8 [int64(int8(c))]) 9566 for { 9567 v_0 := v.Args[0] 9568 if v_0.Op != OpConst16 { 9569 break 9570 } 9571 c := v_0.AuxInt 9572 v.reset(OpConst8) 9573 v.AuxInt = int64(int8(c)) 9574 return true 9575 } 9576 // match: (Trunc16to8 (And16 (Const16 [y]) x)) 9577 // cond: y&0xFF == 0xFF 9578 // result: (Trunc16to8 x) 9579 for { 9580 v_0 := v.Args[0] 9581 if v_0.Op != OpAnd16 { 9582 break 9583 } 9584 v_0_0 := v_0.Args[0] 9585 if v_0_0.Op != OpConst16 { 9586 break 9587 } 9588 y := v_0_0.AuxInt 9589 x := v_0.Args[1] 9590 if !(y&0xFF == 0xFF) { 9591 break 9592 } 9593 v.reset(OpTrunc16to8) 9594 v.AddArg(x) 9595 return true 9596 } 9597 return false 9598 } 9599 func rewriteValuegeneric_OpTrunc32to16(v *Value, config *Config) bool { 9600 b := v.Block 9601 _ = b 9602 // match: (Trunc32to16 (Const32 [c])) 9603 // cond: 9604 // result: (Const16 [int64(int16(c))]) 9605 for { 9606 v_0 := v.Args[0] 9607 if v_0.Op != OpConst32 { 9608 break 9609 } 9610 c := v_0.AuxInt 9611 v.reset(OpConst16) 9612 v.AuxInt = int64(int16(c)) 9613 return true 9614 } 9615 // match: (Trunc32to16 (And32 (Const32 [y]) x)) 9616 // cond: y&0xFFFF == 0xFFFF 9617 // result: (Trunc32to16 x) 9618 for { 9619 v_0 := v.Args[0] 9620 if v_0.Op != OpAnd32 { 9621 break 9622 } 9623 v_0_0 := v_0.Args[0] 9624 if v_0_0.Op != OpConst32 { 9625 break 9626 } 9627 y := v_0_0.AuxInt 9628 x := v_0.Args[1] 9629 if !(y&0xFFFF == 0xFFFF) { 9630 break 9631 } 9632 v.reset(OpTrunc32to16) 9633 v.AddArg(x) 9634 return true 9635 } 9636 return false 9637 } 9638 func rewriteValuegeneric_OpTrunc32to8(v *Value, config *Config) bool { 9639 b := v.Block 9640 _ = b 9641 // match: (Trunc32to8 (Const32 [c])) 9642 // cond: 9643 // result: (Const8 [int64(int8(c))]) 9644 for { 9645 v_0 := v.Args[0] 9646 if v_0.Op != OpConst32 { 9647 break 9648 } 9649 c := v_0.AuxInt 9650 v.reset(OpConst8) 9651 v.AuxInt = int64(int8(c)) 9652 return true 9653 } 9654 // match: (Trunc32to8 (And32 (Const32 [y]) x)) 9655 // cond: y&0xFF == 0xFF 9656 // result: (Trunc32to8 x) 9657 for { 9658 v_0 := v.Args[0] 9659 if v_0.Op != OpAnd32 { 9660 break 9661 } 9662 v_0_0 := v_0.Args[0] 9663 if v_0_0.Op != OpConst32 { 9664 break 9665 } 9666 y := v_0_0.AuxInt 9667 x := v_0.Args[1] 9668 if !(y&0xFF == 0xFF) { 9669 break 9670 } 9671 v.reset(OpTrunc32to8) 9672 v.AddArg(x) 9673 return true 9674 } 9675 return false 9676 } 9677 func rewriteValuegeneric_OpTrunc64to16(v *Value, config *Config) bool { 9678 b := v.Block 9679 _ = b 9680 // match: (Trunc64to16 (Const64 [c])) 9681 // cond: 9682 // result: (Const16 [int64(int16(c))]) 9683 for { 9684 v_0 := v.Args[0] 9685 if v_0.Op != OpConst64 { 9686 break 9687 } 9688 c := v_0.AuxInt 9689 v.reset(OpConst16) 9690 v.AuxInt = int64(int16(c)) 9691 return true 9692 } 9693 // match: (Trunc64to16 (And64 (Const64 [y]) x)) 9694 // cond: y&0xFFFF == 0xFFFF 9695 // result: (Trunc64to16 x) 9696 for { 9697 v_0 := v.Args[0] 9698 if v_0.Op != OpAnd64 { 9699 break 9700 } 9701 v_0_0 := v_0.Args[0] 9702 if v_0_0.Op != OpConst64 { 9703 break 9704 } 9705 y := v_0_0.AuxInt 9706 x := v_0.Args[1] 9707 if !(y&0xFFFF == 0xFFFF) { 9708 break 9709 } 9710 v.reset(OpTrunc64to16) 9711 v.AddArg(x) 9712 return true 9713 } 9714 return false 9715 } 9716 func rewriteValuegeneric_OpTrunc64to32(v *Value, config *Config) bool { 9717 b := v.Block 9718 _ = b 9719 // match: (Trunc64to32 (Const64 [c])) 9720 // cond: 9721 // result: (Const32 [int64(int32(c))]) 9722 for { 9723 v_0 := v.Args[0] 9724 if v_0.Op != OpConst64 { 9725 break 9726 } 9727 c := v_0.AuxInt 9728 v.reset(OpConst32) 9729 v.AuxInt = int64(int32(c)) 9730 return true 9731 } 9732 // match: (Trunc64to32 (And64 (Const64 [y]) x)) 9733 // cond: y&0xFFFFFFFF == 0xFFFFFFFF 9734 // result: (Trunc64to32 x) 9735 for { 9736 v_0 := v.Args[0] 9737 if v_0.Op != OpAnd64 { 9738 break 9739 } 9740 v_0_0 := v_0.Args[0] 9741 if v_0_0.Op != OpConst64 { 9742 break 9743 } 9744 y := v_0_0.AuxInt 9745 x := v_0.Args[1] 9746 if !(y&0xFFFFFFFF == 0xFFFFFFFF) { 9747 break 9748 } 9749 v.reset(OpTrunc64to32) 9750 v.AddArg(x) 9751 return true 9752 } 9753 return false 9754 } 9755 func rewriteValuegeneric_OpTrunc64to8(v *Value, config *Config) bool { 9756 b := v.Block 9757 _ = b 9758 // match: (Trunc64to8 (Const64 [c])) 9759 // cond: 9760 // result: (Const8 [int64(int8(c))]) 9761 for { 9762 v_0 := v.Args[0] 9763 if v_0.Op != OpConst64 { 9764 break 9765 } 9766 c := v_0.AuxInt 9767 v.reset(OpConst8) 9768 v.AuxInt = int64(int8(c)) 9769 return true 9770 } 9771 // match: (Trunc64to8 (And64 (Const64 [y]) x)) 9772 // cond: y&0xFF == 0xFF 9773 // result: (Trunc64to8 x) 9774 for { 9775 v_0 := v.Args[0] 9776 if v_0.Op != OpAnd64 { 9777 break 9778 } 9779 v_0_0 := v_0.Args[0] 9780 if v_0_0.Op != OpConst64 { 9781 break 9782 } 9783 y := v_0_0.AuxInt 9784 x := v_0.Args[1] 9785 if !(y&0xFF == 0xFF) { 9786 break 9787 } 9788 v.reset(OpTrunc64to8) 9789 v.AddArg(x) 9790 return true 9791 } 9792 return false 9793 } 9794 func rewriteValuegeneric_OpXor16(v *Value, config *Config) bool { 9795 b := v.Block 9796 _ = b 9797 // match: (Xor16 x (Const16 <t> [c])) 9798 // cond: x.Op != OpConst16 9799 // result: (Xor16 (Const16 <t> [c]) x) 9800 for { 9801 x := v.Args[0] 9802 v_1 := v.Args[1] 9803 if v_1.Op != OpConst16 { 9804 break 9805 } 9806 t := v_1.Type 9807 c := v_1.AuxInt 9808 if !(x.Op != OpConst16) { 9809 break 9810 } 9811 v.reset(OpXor16) 9812 v0 := b.NewValue0(v.Line, OpConst16, t) 9813 v0.AuxInt = c 9814 v.AddArg(v0) 9815 v.AddArg(x) 9816 return true 9817 } 9818 // match: (Xor16 x x) 9819 // cond: 9820 // result: (Const16 [0]) 9821 for { 9822 x := v.Args[0] 9823 if x != v.Args[1] { 9824 break 9825 } 9826 v.reset(OpConst16) 9827 v.AuxInt = 0 9828 return true 9829 } 9830 // match: (Xor16 (Const16 [0]) x) 9831 // cond: 9832 // result: x 9833 for { 9834 v_0 := v.Args[0] 9835 if v_0.Op != OpConst16 { 9836 break 9837 } 9838 if v_0.AuxInt != 0 { 9839 break 9840 } 9841 x := v.Args[1] 9842 v.reset(OpCopy) 9843 v.Type = x.Type 9844 v.AddArg(x) 9845 return true 9846 } 9847 // match: (Xor16 x (Xor16 x y)) 9848 // cond: 9849 // result: y 9850 for { 9851 x := v.Args[0] 9852 v_1 := v.Args[1] 9853 if v_1.Op != OpXor16 { 9854 break 9855 } 9856 if x != v_1.Args[0] { 9857 break 9858 } 9859 y := v_1.Args[1] 9860 v.reset(OpCopy) 9861 v.Type = y.Type 9862 v.AddArg(y) 9863 return true 9864 } 9865 // match: (Xor16 x (Xor16 y x)) 9866 // cond: 9867 // result: y 9868 for { 9869 x := v.Args[0] 9870 v_1 := v.Args[1] 9871 if v_1.Op != OpXor16 { 9872 break 9873 } 9874 y := v_1.Args[0] 9875 if x != v_1.Args[1] { 9876 break 9877 } 9878 v.reset(OpCopy) 9879 v.Type = y.Type 9880 v.AddArg(y) 9881 return true 9882 } 9883 // match: (Xor16 (Xor16 x y) x) 9884 // cond: 9885 // result: y 9886 for { 9887 v_0 := v.Args[0] 9888 if v_0.Op != OpXor16 { 9889 break 9890 } 9891 x := v_0.Args[0] 9892 y := v_0.Args[1] 9893 if x != v.Args[1] { 9894 break 9895 } 9896 v.reset(OpCopy) 9897 v.Type = y.Type 9898 v.AddArg(y) 9899 return true 9900 } 9901 // match: (Xor16 (Xor16 x y) y) 9902 // cond: 9903 // result: x 9904 for { 9905 v_0 := v.Args[0] 9906 if v_0.Op != OpXor16 { 9907 break 9908 } 9909 x := v_0.Args[0] 9910 y := v_0.Args[1] 9911 if y != v.Args[1] { 9912 break 9913 } 9914 v.reset(OpCopy) 9915 v.Type = x.Type 9916 v.AddArg(x) 9917 return true 9918 } 9919 return false 9920 } 9921 func rewriteValuegeneric_OpXor32(v *Value, config *Config) bool { 9922 b := v.Block 9923 _ = b 9924 // match: (Xor32 x (Const32 <t> [c])) 9925 // cond: x.Op != OpConst32 9926 // result: (Xor32 (Const32 <t> [c]) x) 9927 for { 9928 x := v.Args[0] 9929 v_1 := v.Args[1] 9930 if v_1.Op != OpConst32 { 9931 break 9932 } 9933 t := v_1.Type 9934 c := v_1.AuxInt 9935 if !(x.Op != OpConst32) { 9936 break 9937 } 9938 v.reset(OpXor32) 9939 v0 := b.NewValue0(v.Line, OpConst32, t) 9940 v0.AuxInt = c 9941 v.AddArg(v0) 9942 v.AddArg(x) 9943 return true 9944 } 9945 // match: (Xor32 x x) 9946 // cond: 9947 // result: (Const32 [0]) 9948 for { 9949 x := v.Args[0] 9950 if x != v.Args[1] { 9951 break 9952 } 9953 v.reset(OpConst32) 9954 v.AuxInt = 0 9955 return true 9956 } 9957 // match: (Xor32 (Const32 [0]) x) 9958 // cond: 9959 // result: x 9960 for { 9961 v_0 := v.Args[0] 9962 if v_0.Op != OpConst32 { 9963 break 9964 } 9965 if v_0.AuxInt != 0 { 9966 break 9967 } 9968 x := v.Args[1] 9969 v.reset(OpCopy) 9970 v.Type = x.Type 9971 v.AddArg(x) 9972 return true 9973 } 9974 // match: (Xor32 x (Xor32 x y)) 9975 // cond: 9976 // result: y 9977 for { 9978 x := v.Args[0] 9979 v_1 := v.Args[1] 9980 if v_1.Op != OpXor32 { 9981 break 9982 } 9983 if x != v_1.Args[0] { 9984 break 9985 } 9986 y := v_1.Args[1] 9987 v.reset(OpCopy) 9988 v.Type = y.Type 9989 v.AddArg(y) 9990 return true 9991 } 9992 // match: (Xor32 x (Xor32 y x)) 9993 // cond: 9994 // result: y 9995 for { 9996 x := v.Args[0] 9997 v_1 := v.Args[1] 9998 if v_1.Op != OpXor32 { 9999 break 10000 } 10001 y := v_1.Args[0] 10002 if x != v_1.Args[1] { 10003 break 10004 } 10005 v.reset(OpCopy) 10006 v.Type = y.Type 10007 v.AddArg(y) 10008 return true 10009 } 10010 // match: (Xor32 (Xor32 x y) x) 10011 // cond: 10012 // result: y 10013 for { 10014 v_0 := v.Args[0] 10015 if v_0.Op != OpXor32 { 10016 break 10017 } 10018 x := v_0.Args[0] 10019 y := v_0.Args[1] 10020 if x != v.Args[1] { 10021 break 10022 } 10023 v.reset(OpCopy) 10024 v.Type = y.Type 10025 v.AddArg(y) 10026 return true 10027 } 10028 // match: (Xor32 (Xor32 x y) y) 10029 // cond: 10030 // result: x 10031 for { 10032 v_0 := v.Args[0] 10033 if v_0.Op != OpXor32 { 10034 break 10035 } 10036 x := v_0.Args[0] 10037 y := v_0.Args[1] 10038 if y != v.Args[1] { 10039 break 10040 } 10041 v.reset(OpCopy) 10042 v.Type = x.Type 10043 v.AddArg(x) 10044 return true 10045 } 10046 return false 10047 } 10048 func rewriteValuegeneric_OpXor64(v *Value, config *Config) bool { 10049 b := v.Block 10050 _ = b 10051 // match: (Xor64 x (Const64 <t> [c])) 10052 // cond: x.Op != OpConst64 10053 // result: (Xor64 (Const64 <t> [c]) x) 10054 for { 10055 x := v.Args[0] 10056 v_1 := v.Args[1] 10057 if v_1.Op != OpConst64 { 10058 break 10059 } 10060 t := v_1.Type 10061 c := v_1.AuxInt 10062 if !(x.Op != OpConst64) { 10063 break 10064 } 10065 v.reset(OpXor64) 10066 v0 := b.NewValue0(v.Line, OpConst64, t) 10067 v0.AuxInt = c 10068 v.AddArg(v0) 10069 v.AddArg(x) 10070 return true 10071 } 10072 // match: (Xor64 x x) 10073 // cond: 10074 // result: (Const64 [0]) 10075 for { 10076 x := v.Args[0] 10077 if x != v.Args[1] { 10078 break 10079 } 10080 v.reset(OpConst64) 10081 v.AuxInt = 0 10082 return true 10083 } 10084 // match: (Xor64 (Const64 [0]) x) 10085 // cond: 10086 // result: x 10087 for { 10088 v_0 := v.Args[0] 10089 if v_0.Op != OpConst64 { 10090 break 10091 } 10092 if v_0.AuxInt != 0 { 10093 break 10094 } 10095 x := v.Args[1] 10096 v.reset(OpCopy) 10097 v.Type = x.Type 10098 v.AddArg(x) 10099 return true 10100 } 10101 // match: (Xor64 x (Xor64 x y)) 10102 // cond: 10103 // result: y 10104 for { 10105 x := v.Args[0] 10106 v_1 := v.Args[1] 10107 if v_1.Op != OpXor64 { 10108 break 10109 } 10110 if x != v_1.Args[0] { 10111 break 10112 } 10113 y := v_1.Args[1] 10114 v.reset(OpCopy) 10115 v.Type = y.Type 10116 v.AddArg(y) 10117 return true 10118 } 10119 // match: (Xor64 x (Xor64 y x)) 10120 // cond: 10121 // result: y 10122 for { 10123 x := v.Args[0] 10124 v_1 := v.Args[1] 10125 if v_1.Op != OpXor64 { 10126 break 10127 } 10128 y := v_1.Args[0] 10129 if x != v_1.Args[1] { 10130 break 10131 } 10132 v.reset(OpCopy) 10133 v.Type = y.Type 10134 v.AddArg(y) 10135 return true 10136 } 10137 // match: (Xor64 (Xor64 x y) x) 10138 // cond: 10139 // result: y 10140 for { 10141 v_0 := v.Args[0] 10142 if v_0.Op != OpXor64 { 10143 break 10144 } 10145 x := v_0.Args[0] 10146 y := v_0.Args[1] 10147 if x != v.Args[1] { 10148 break 10149 } 10150 v.reset(OpCopy) 10151 v.Type = y.Type 10152 v.AddArg(y) 10153 return true 10154 } 10155 // match: (Xor64 (Xor64 x y) y) 10156 // cond: 10157 // result: x 10158 for { 10159 v_0 := v.Args[0] 10160 if v_0.Op != OpXor64 { 10161 break 10162 } 10163 x := v_0.Args[0] 10164 y := v_0.Args[1] 10165 if y != v.Args[1] { 10166 break 10167 } 10168 v.reset(OpCopy) 10169 v.Type = x.Type 10170 v.AddArg(x) 10171 return true 10172 } 10173 return false 10174 } 10175 func rewriteValuegeneric_OpXor8(v *Value, config *Config) bool { 10176 b := v.Block 10177 _ = b 10178 // match: (Xor8 x (Const8 <t> [c])) 10179 // cond: x.Op != OpConst8 10180 // result: (Xor8 (Const8 <t> [c]) x) 10181 for { 10182 x := v.Args[0] 10183 v_1 := v.Args[1] 10184 if v_1.Op != OpConst8 { 10185 break 10186 } 10187 t := v_1.Type 10188 c := v_1.AuxInt 10189 if !(x.Op != OpConst8) { 10190 break 10191 } 10192 v.reset(OpXor8) 10193 v0 := b.NewValue0(v.Line, OpConst8, t) 10194 v0.AuxInt = c 10195 v.AddArg(v0) 10196 v.AddArg(x) 10197 return true 10198 } 10199 // match: (Xor8 x x) 10200 // cond: 10201 // result: (Const8 [0]) 10202 for { 10203 x := v.Args[0] 10204 if x != v.Args[1] { 10205 break 10206 } 10207 v.reset(OpConst8) 10208 v.AuxInt = 0 10209 return true 10210 } 10211 // match: (Xor8 (Const8 [0]) x) 10212 // cond: 10213 // result: x 10214 for { 10215 v_0 := v.Args[0] 10216 if v_0.Op != OpConst8 { 10217 break 10218 } 10219 if v_0.AuxInt != 0 { 10220 break 10221 } 10222 x := v.Args[1] 10223 v.reset(OpCopy) 10224 v.Type = x.Type 10225 v.AddArg(x) 10226 return true 10227 } 10228 // match: (Xor8 x (Xor8 x y)) 10229 // cond: 10230 // result: y 10231 for { 10232 x := v.Args[0] 10233 v_1 := v.Args[1] 10234 if v_1.Op != OpXor8 { 10235 break 10236 } 10237 if x != v_1.Args[0] { 10238 break 10239 } 10240 y := v_1.Args[1] 10241 v.reset(OpCopy) 10242 v.Type = y.Type 10243 v.AddArg(y) 10244 return true 10245 } 10246 // match: (Xor8 x (Xor8 y x)) 10247 // cond: 10248 // result: y 10249 for { 10250 x := v.Args[0] 10251 v_1 := v.Args[1] 10252 if v_1.Op != OpXor8 { 10253 break 10254 } 10255 y := v_1.Args[0] 10256 if x != v_1.Args[1] { 10257 break 10258 } 10259 v.reset(OpCopy) 10260 v.Type = y.Type 10261 v.AddArg(y) 10262 return true 10263 } 10264 // match: (Xor8 (Xor8 x y) x) 10265 // cond: 10266 // result: y 10267 for { 10268 v_0 := v.Args[0] 10269 if v_0.Op != OpXor8 { 10270 break 10271 } 10272 x := v_0.Args[0] 10273 y := v_0.Args[1] 10274 if x != v.Args[1] { 10275 break 10276 } 10277 v.reset(OpCopy) 10278 v.Type = y.Type 10279 v.AddArg(y) 10280 return true 10281 } 10282 // match: (Xor8 (Xor8 x y) y) 10283 // cond: 10284 // result: x 10285 for { 10286 v_0 := v.Args[0] 10287 if v_0.Op != OpXor8 { 10288 break 10289 } 10290 x := v_0.Args[0] 10291 y := v_0.Args[1] 10292 if y != v.Args[1] { 10293 break 10294 } 10295 v.reset(OpCopy) 10296 v.Type = x.Type 10297 v.AddArg(x) 10298 return true 10299 } 10300 return false 10301 } 10302 func rewriteBlockgeneric(b *Block) bool { 10303 switch b.Kind { 10304 case BlockCheck: 10305 // match: (Check (NilCheck (GetG _) _) next) 10306 // cond: 10307 // result: (Plain nil next) 10308 for { 10309 v := b.Control 10310 if v.Op != OpNilCheck { 10311 break 10312 } 10313 v_0 := v.Args[0] 10314 if v_0.Op != OpGetG { 10315 break 10316 } 10317 next := b.Succs[0] 10318 b.Kind = BlockPlain 10319 b.SetControl(nil) 10320 _ = next 10321 return true 10322 } 10323 case BlockIf: 10324 // match: (If (Not cond) yes no) 10325 // cond: 10326 // result: (If cond no yes) 10327 for { 10328 v := b.Control 10329 if v.Op != OpNot { 10330 break 10331 } 10332 cond := v.Args[0] 10333 yes := b.Succs[0] 10334 no := b.Succs[1] 10335 b.Kind = BlockIf 10336 b.SetControl(cond) 10337 b.swapSuccessors() 10338 _ = no 10339 _ = yes 10340 return true 10341 } 10342 // match: (If (ConstBool [c]) yes no) 10343 // cond: c == 1 10344 // result: (First nil yes no) 10345 for { 10346 v := b.Control 10347 if v.Op != OpConstBool { 10348 break 10349 } 10350 c := v.AuxInt 10351 yes := b.Succs[0] 10352 no := b.Succs[1] 10353 if !(c == 1) { 10354 break 10355 } 10356 b.Kind = BlockFirst 10357 b.SetControl(nil) 10358 _ = yes 10359 _ = no 10360 return true 10361 } 10362 // match: (If (ConstBool [c]) yes no) 10363 // cond: c == 0 10364 // result: (First nil no yes) 10365 for { 10366 v := b.Control 10367 if v.Op != OpConstBool { 10368 break 10369 } 10370 c := v.AuxInt 10371 yes := b.Succs[0] 10372 no := b.Succs[1] 10373 if !(c == 0) { 10374 break 10375 } 10376 b.Kind = BlockFirst 10377 b.SetControl(nil) 10378 b.swapSuccessors() 10379 _ = no 10380 _ = yes 10381 return true 10382 } 10383 } 10384 return false 10385 }