github.com/miolini/go@v0.0.0-20160405192216-fca68c8cb408/src/cmd/compile/internal/ssa/rewritegeneric.go (about) 1 // autogenerated from gen/generic.rules: do not edit! 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 8 var _ = math.MinInt8 // in case not otherwise used 9 func rewriteValuegeneric(v *Value, config *Config) bool { 10 switch v.Op { 11 case OpAdd16: 12 return rewriteValuegeneric_OpAdd16(v, config) 13 case OpAdd32: 14 return rewriteValuegeneric_OpAdd32(v, config) 15 case OpAdd32F: 16 return rewriteValuegeneric_OpAdd32F(v, config) 17 case OpAdd64: 18 return rewriteValuegeneric_OpAdd64(v, config) 19 case OpAdd64F: 20 return rewriteValuegeneric_OpAdd64F(v, config) 21 case OpAdd8: 22 return rewriteValuegeneric_OpAdd8(v, config) 23 case OpAddPtr: 24 return rewriteValuegeneric_OpAddPtr(v, config) 25 case OpAnd16: 26 return rewriteValuegeneric_OpAnd16(v, config) 27 case OpAnd32: 28 return rewriteValuegeneric_OpAnd32(v, config) 29 case OpAnd64: 30 return rewriteValuegeneric_OpAnd64(v, config) 31 case OpAnd8: 32 return rewriteValuegeneric_OpAnd8(v, config) 33 case OpArg: 34 return rewriteValuegeneric_OpArg(v, config) 35 case OpArrayIndex: 36 return rewriteValuegeneric_OpArrayIndex(v, config) 37 case OpCom16: 38 return rewriteValuegeneric_OpCom16(v, config) 39 case OpCom32: 40 return rewriteValuegeneric_OpCom32(v, config) 41 case OpCom64: 42 return rewriteValuegeneric_OpCom64(v, config) 43 case OpCom8: 44 return rewriteValuegeneric_OpCom8(v, config) 45 case OpConstInterface: 46 return rewriteValuegeneric_OpConstInterface(v, config) 47 case OpConstSlice: 48 return rewriteValuegeneric_OpConstSlice(v, config) 49 case OpConstString: 50 return rewriteValuegeneric_OpConstString(v, config) 51 case OpConvert: 52 return rewriteValuegeneric_OpConvert(v, config) 53 case OpCvt32Fto64F: 54 return rewriteValuegeneric_OpCvt32Fto64F(v, config) 55 case OpCvt64Fto32F: 56 return rewriteValuegeneric_OpCvt64Fto32F(v, config) 57 case OpDiv64: 58 return rewriteValuegeneric_OpDiv64(v, config) 59 case OpDiv64u: 60 return rewriteValuegeneric_OpDiv64u(v, config) 61 case OpEq16: 62 return rewriteValuegeneric_OpEq16(v, config) 63 case OpEq32: 64 return rewriteValuegeneric_OpEq32(v, config) 65 case OpEq64: 66 return rewriteValuegeneric_OpEq64(v, config) 67 case OpEq8: 68 return rewriteValuegeneric_OpEq8(v, config) 69 case OpEqInter: 70 return rewriteValuegeneric_OpEqInter(v, config) 71 case OpEqPtr: 72 return rewriteValuegeneric_OpEqPtr(v, config) 73 case OpEqSlice: 74 return rewriteValuegeneric_OpEqSlice(v, config) 75 case OpGeq16: 76 return rewriteValuegeneric_OpGeq16(v, config) 77 case OpGeq16U: 78 return rewriteValuegeneric_OpGeq16U(v, config) 79 case OpGeq32: 80 return rewriteValuegeneric_OpGeq32(v, config) 81 case OpGeq32U: 82 return rewriteValuegeneric_OpGeq32U(v, config) 83 case OpGeq64: 84 return rewriteValuegeneric_OpGeq64(v, config) 85 case OpGeq64U: 86 return rewriteValuegeneric_OpGeq64U(v, config) 87 case OpGeq8: 88 return rewriteValuegeneric_OpGeq8(v, config) 89 case OpGeq8U: 90 return rewriteValuegeneric_OpGeq8U(v, config) 91 case OpGreater16: 92 return rewriteValuegeneric_OpGreater16(v, config) 93 case OpGreater16U: 94 return rewriteValuegeneric_OpGreater16U(v, config) 95 case OpGreater32: 96 return rewriteValuegeneric_OpGreater32(v, config) 97 case OpGreater32U: 98 return rewriteValuegeneric_OpGreater32U(v, config) 99 case OpGreater64: 100 return rewriteValuegeneric_OpGreater64(v, config) 101 case OpGreater64U: 102 return rewriteValuegeneric_OpGreater64U(v, config) 103 case OpGreater8: 104 return rewriteValuegeneric_OpGreater8(v, config) 105 case OpGreater8U: 106 return rewriteValuegeneric_OpGreater8U(v, config) 107 case OpIsInBounds: 108 return rewriteValuegeneric_OpIsInBounds(v, config) 109 case OpIsSliceInBounds: 110 return rewriteValuegeneric_OpIsSliceInBounds(v, config) 111 case OpLeq16: 112 return rewriteValuegeneric_OpLeq16(v, config) 113 case OpLeq16U: 114 return rewriteValuegeneric_OpLeq16U(v, config) 115 case OpLeq32: 116 return rewriteValuegeneric_OpLeq32(v, config) 117 case OpLeq32U: 118 return rewriteValuegeneric_OpLeq32U(v, config) 119 case OpLeq64: 120 return rewriteValuegeneric_OpLeq64(v, config) 121 case OpLeq64U: 122 return rewriteValuegeneric_OpLeq64U(v, config) 123 case OpLeq8: 124 return rewriteValuegeneric_OpLeq8(v, config) 125 case OpLeq8U: 126 return rewriteValuegeneric_OpLeq8U(v, config) 127 case OpLess16: 128 return rewriteValuegeneric_OpLess16(v, config) 129 case OpLess16U: 130 return rewriteValuegeneric_OpLess16U(v, config) 131 case OpLess32: 132 return rewriteValuegeneric_OpLess32(v, config) 133 case OpLess32U: 134 return rewriteValuegeneric_OpLess32U(v, config) 135 case OpLess64: 136 return rewriteValuegeneric_OpLess64(v, config) 137 case OpLess64U: 138 return rewriteValuegeneric_OpLess64U(v, config) 139 case OpLess8: 140 return rewriteValuegeneric_OpLess8(v, config) 141 case OpLess8U: 142 return rewriteValuegeneric_OpLess8U(v, config) 143 case OpLoad: 144 return rewriteValuegeneric_OpLoad(v, config) 145 case OpLsh16x16: 146 return rewriteValuegeneric_OpLsh16x16(v, config) 147 case OpLsh16x32: 148 return rewriteValuegeneric_OpLsh16x32(v, config) 149 case OpLsh16x64: 150 return rewriteValuegeneric_OpLsh16x64(v, config) 151 case OpLsh16x8: 152 return rewriteValuegeneric_OpLsh16x8(v, config) 153 case OpLsh32x16: 154 return rewriteValuegeneric_OpLsh32x16(v, config) 155 case OpLsh32x32: 156 return rewriteValuegeneric_OpLsh32x32(v, config) 157 case OpLsh32x64: 158 return rewriteValuegeneric_OpLsh32x64(v, config) 159 case OpLsh32x8: 160 return rewriteValuegeneric_OpLsh32x8(v, config) 161 case OpLsh64x16: 162 return rewriteValuegeneric_OpLsh64x16(v, config) 163 case OpLsh64x32: 164 return rewriteValuegeneric_OpLsh64x32(v, config) 165 case OpLsh64x64: 166 return rewriteValuegeneric_OpLsh64x64(v, config) 167 case OpLsh64x8: 168 return rewriteValuegeneric_OpLsh64x8(v, config) 169 case OpLsh8x16: 170 return rewriteValuegeneric_OpLsh8x16(v, config) 171 case OpLsh8x32: 172 return rewriteValuegeneric_OpLsh8x32(v, config) 173 case OpLsh8x64: 174 return rewriteValuegeneric_OpLsh8x64(v, config) 175 case OpLsh8x8: 176 return rewriteValuegeneric_OpLsh8x8(v, config) 177 case OpMod16: 178 return rewriteValuegeneric_OpMod16(v, config) 179 case OpMod16u: 180 return rewriteValuegeneric_OpMod16u(v, config) 181 case OpMod32: 182 return rewriteValuegeneric_OpMod32(v, config) 183 case OpMod32u: 184 return rewriteValuegeneric_OpMod32u(v, config) 185 case OpMod64: 186 return rewriteValuegeneric_OpMod64(v, config) 187 case OpMod64u: 188 return rewriteValuegeneric_OpMod64u(v, config) 189 case OpMod8: 190 return rewriteValuegeneric_OpMod8(v, config) 191 case OpMod8u: 192 return rewriteValuegeneric_OpMod8u(v, config) 193 case OpMul16: 194 return rewriteValuegeneric_OpMul16(v, config) 195 case OpMul32: 196 return rewriteValuegeneric_OpMul32(v, config) 197 case OpMul32F: 198 return rewriteValuegeneric_OpMul32F(v, config) 199 case OpMul64: 200 return rewriteValuegeneric_OpMul64(v, config) 201 case OpMul64F: 202 return rewriteValuegeneric_OpMul64F(v, config) 203 case OpMul8: 204 return rewriteValuegeneric_OpMul8(v, config) 205 case OpNeg16: 206 return rewriteValuegeneric_OpNeg16(v, config) 207 case OpNeg32: 208 return rewriteValuegeneric_OpNeg32(v, config) 209 case OpNeg64: 210 return rewriteValuegeneric_OpNeg64(v, config) 211 case OpNeg8: 212 return rewriteValuegeneric_OpNeg8(v, config) 213 case OpNeq16: 214 return rewriteValuegeneric_OpNeq16(v, config) 215 case OpNeq32: 216 return rewriteValuegeneric_OpNeq32(v, config) 217 case OpNeq64: 218 return rewriteValuegeneric_OpNeq64(v, config) 219 case OpNeq8: 220 return rewriteValuegeneric_OpNeq8(v, config) 221 case OpNeqInter: 222 return rewriteValuegeneric_OpNeqInter(v, config) 223 case OpNeqPtr: 224 return rewriteValuegeneric_OpNeqPtr(v, config) 225 case OpNeqSlice: 226 return rewriteValuegeneric_OpNeqSlice(v, config) 227 case OpOffPtr: 228 return rewriteValuegeneric_OpOffPtr(v, config) 229 case OpOr16: 230 return rewriteValuegeneric_OpOr16(v, config) 231 case OpOr32: 232 return rewriteValuegeneric_OpOr32(v, config) 233 case OpOr64: 234 return rewriteValuegeneric_OpOr64(v, config) 235 case OpOr8: 236 return rewriteValuegeneric_OpOr8(v, config) 237 case OpPhi: 238 return rewriteValuegeneric_OpPhi(v, config) 239 case OpPtrIndex: 240 return rewriteValuegeneric_OpPtrIndex(v, config) 241 case OpRsh16Ux16: 242 return rewriteValuegeneric_OpRsh16Ux16(v, config) 243 case OpRsh16Ux32: 244 return rewriteValuegeneric_OpRsh16Ux32(v, config) 245 case OpRsh16Ux64: 246 return rewriteValuegeneric_OpRsh16Ux64(v, config) 247 case OpRsh16Ux8: 248 return rewriteValuegeneric_OpRsh16Ux8(v, config) 249 case OpRsh16x16: 250 return rewriteValuegeneric_OpRsh16x16(v, config) 251 case OpRsh16x32: 252 return rewriteValuegeneric_OpRsh16x32(v, config) 253 case OpRsh16x64: 254 return rewriteValuegeneric_OpRsh16x64(v, config) 255 case OpRsh16x8: 256 return rewriteValuegeneric_OpRsh16x8(v, config) 257 case OpRsh32Ux16: 258 return rewriteValuegeneric_OpRsh32Ux16(v, config) 259 case OpRsh32Ux32: 260 return rewriteValuegeneric_OpRsh32Ux32(v, config) 261 case OpRsh32Ux64: 262 return rewriteValuegeneric_OpRsh32Ux64(v, config) 263 case OpRsh32Ux8: 264 return rewriteValuegeneric_OpRsh32Ux8(v, config) 265 case OpRsh32x16: 266 return rewriteValuegeneric_OpRsh32x16(v, config) 267 case OpRsh32x32: 268 return rewriteValuegeneric_OpRsh32x32(v, config) 269 case OpRsh32x64: 270 return rewriteValuegeneric_OpRsh32x64(v, config) 271 case OpRsh32x8: 272 return rewriteValuegeneric_OpRsh32x8(v, config) 273 case OpRsh64Ux16: 274 return rewriteValuegeneric_OpRsh64Ux16(v, config) 275 case OpRsh64Ux32: 276 return rewriteValuegeneric_OpRsh64Ux32(v, config) 277 case OpRsh64Ux64: 278 return rewriteValuegeneric_OpRsh64Ux64(v, config) 279 case OpRsh64Ux8: 280 return rewriteValuegeneric_OpRsh64Ux8(v, config) 281 case OpRsh64x16: 282 return rewriteValuegeneric_OpRsh64x16(v, config) 283 case OpRsh64x32: 284 return rewriteValuegeneric_OpRsh64x32(v, config) 285 case OpRsh64x64: 286 return rewriteValuegeneric_OpRsh64x64(v, config) 287 case OpRsh64x8: 288 return rewriteValuegeneric_OpRsh64x8(v, config) 289 case OpRsh8Ux16: 290 return rewriteValuegeneric_OpRsh8Ux16(v, config) 291 case OpRsh8Ux32: 292 return rewriteValuegeneric_OpRsh8Ux32(v, config) 293 case OpRsh8Ux64: 294 return rewriteValuegeneric_OpRsh8Ux64(v, config) 295 case OpRsh8Ux8: 296 return rewriteValuegeneric_OpRsh8Ux8(v, config) 297 case OpRsh8x16: 298 return rewriteValuegeneric_OpRsh8x16(v, config) 299 case OpRsh8x32: 300 return rewriteValuegeneric_OpRsh8x32(v, config) 301 case OpRsh8x64: 302 return rewriteValuegeneric_OpRsh8x64(v, config) 303 case OpRsh8x8: 304 return rewriteValuegeneric_OpRsh8x8(v, config) 305 case OpSliceCap: 306 return rewriteValuegeneric_OpSliceCap(v, config) 307 case OpSliceLen: 308 return rewriteValuegeneric_OpSliceLen(v, config) 309 case OpSlicePtr: 310 return rewriteValuegeneric_OpSlicePtr(v, config) 311 case OpStore: 312 return rewriteValuegeneric_OpStore(v, config) 313 case OpStringLen: 314 return rewriteValuegeneric_OpStringLen(v, config) 315 case OpStringPtr: 316 return rewriteValuegeneric_OpStringPtr(v, config) 317 case OpStructSelect: 318 return rewriteValuegeneric_OpStructSelect(v, config) 319 case OpSub16: 320 return rewriteValuegeneric_OpSub16(v, config) 321 case OpSub32: 322 return rewriteValuegeneric_OpSub32(v, config) 323 case OpSub32F: 324 return rewriteValuegeneric_OpSub32F(v, config) 325 case OpSub64: 326 return rewriteValuegeneric_OpSub64(v, config) 327 case OpSub64F: 328 return rewriteValuegeneric_OpSub64F(v, config) 329 case OpSub8: 330 return rewriteValuegeneric_OpSub8(v, config) 331 case OpTrunc16to8: 332 return rewriteValuegeneric_OpTrunc16to8(v, config) 333 case OpTrunc32to16: 334 return rewriteValuegeneric_OpTrunc32to16(v, config) 335 case OpTrunc32to8: 336 return rewriteValuegeneric_OpTrunc32to8(v, config) 337 case OpTrunc64to16: 338 return rewriteValuegeneric_OpTrunc64to16(v, config) 339 case OpTrunc64to32: 340 return rewriteValuegeneric_OpTrunc64to32(v, config) 341 case OpTrunc64to8: 342 return rewriteValuegeneric_OpTrunc64to8(v, config) 343 case OpXor16: 344 return rewriteValuegeneric_OpXor16(v, config) 345 case OpXor32: 346 return rewriteValuegeneric_OpXor32(v, config) 347 case OpXor64: 348 return rewriteValuegeneric_OpXor64(v, config) 349 case OpXor8: 350 return rewriteValuegeneric_OpXor8(v, config) 351 } 352 return false 353 } 354 func rewriteValuegeneric_OpAdd16(v *Value, config *Config) bool { 355 b := v.Block 356 _ = b 357 // match: (Add16 (Const16 [c]) (Const16 [d])) 358 // cond: 359 // result: (Const16 [int64(int16(c+d))]) 360 for { 361 v_0 := v.Args[0] 362 if v_0.Op != OpConst16 { 363 break 364 } 365 c := v_0.AuxInt 366 v_1 := v.Args[1] 367 if v_1.Op != OpConst16 { 368 break 369 } 370 d := v_1.AuxInt 371 v.reset(OpConst16) 372 v.AuxInt = int64(int16(c + d)) 373 return true 374 } 375 // match: (Add16 x (Const16 <t> [c])) 376 // cond: x.Op != OpConst16 377 // result: (Add16 (Const16 <t> [c]) x) 378 for { 379 x := v.Args[0] 380 v_1 := v.Args[1] 381 if v_1.Op != OpConst16 { 382 break 383 } 384 t := v_1.Type 385 c := v_1.AuxInt 386 if !(x.Op != OpConst16) { 387 break 388 } 389 v.reset(OpAdd16) 390 v0 := b.NewValue0(v.Line, OpConst16, t) 391 v0.AuxInt = c 392 v.AddArg(v0) 393 v.AddArg(x) 394 return true 395 } 396 // match: (Add16 (Const16 [0]) x) 397 // cond: 398 // result: x 399 for { 400 v_0 := v.Args[0] 401 if v_0.Op != OpConst16 { 402 break 403 } 404 if v_0.AuxInt != 0 { 405 break 406 } 407 x := v.Args[1] 408 v.reset(OpCopy) 409 v.Type = x.Type 410 v.AddArg(x) 411 return true 412 } 413 return false 414 } 415 func rewriteValuegeneric_OpAdd32(v *Value, config *Config) bool { 416 b := v.Block 417 _ = b 418 // match: (Add32 (Const32 [c]) (Const32 [d])) 419 // cond: 420 // result: (Const32 [int64(int32(c+d))]) 421 for { 422 v_0 := v.Args[0] 423 if v_0.Op != OpConst32 { 424 break 425 } 426 c := v_0.AuxInt 427 v_1 := v.Args[1] 428 if v_1.Op != OpConst32 { 429 break 430 } 431 d := v_1.AuxInt 432 v.reset(OpConst32) 433 v.AuxInt = int64(int32(c + d)) 434 return true 435 } 436 // match: (Add32 x (Const32 <t> [c])) 437 // cond: x.Op != OpConst32 438 // result: (Add32 (Const32 <t> [c]) x) 439 for { 440 x := v.Args[0] 441 v_1 := v.Args[1] 442 if v_1.Op != OpConst32 { 443 break 444 } 445 t := v_1.Type 446 c := v_1.AuxInt 447 if !(x.Op != OpConst32) { 448 break 449 } 450 v.reset(OpAdd32) 451 v0 := b.NewValue0(v.Line, OpConst32, t) 452 v0.AuxInt = c 453 v.AddArg(v0) 454 v.AddArg(x) 455 return true 456 } 457 // match: (Add32 (Const32 [0]) x) 458 // cond: 459 // result: x 460 for { 461 v_0 := v.Args[0] 462 if v_0.Op != OpConst32 { 463 break 464 } 465 if v_0.AuxInt != 0 { 466 break 467 } 468 x := v.Args[1] 469 v.reset(OpCopy) 470 v.Type = x.Type 471 v.AddArg(x) 472 return true 473 } 474 return false 475 } 476 func rewriteValuegeneric_OpAdd32F(v *Value, config *Config) bool { 477 b := v.Block 478 _ = b 479 // match: (Add32F (Const32F [c]) (Const32F [d])) 480 // cond: 481 // result: (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) 482 for { 483 v_0 := v.Args[0] 484 if v_0.Op != OpConst32F { 485 break 486 } 487 c := v_0.AuxInt 488 v_1 := v.Args[1] 489 if v_1.Op != OpConst32F { 490 break 491 } 492 d := v_1.AuxInt 493 v.reset(OpConst32F) 494 v.AuxInt = f2i(float64(i2f32(c) + i2f32(d))) 495 return true 496 } 497 return false 498 } 499 func rewriteValuegeneric_OpAdd64(v *Value, config *Config) bool { 500 b := v.Block 501 _ = b 502 // match: (Add64 (Const64 [c]) (Const64 [d])) 503 // cond: 504 // result: (Const64 [c+d]) 505 for { 506 v_0 := v.Args[0] 507 if v_0.Op != OpConst64 { 508 break 509 } 510 c := v_0.AuxInt 511 v_1 := v.Args[1] 512 if v_1.Op != OpConst64 { 513 break 514 } 515 d := v_1.AuxInt 516 v.reset(OpConst64) 517 v.AuxInt = c + d 518 return true 519 } 520 // match: (Add64 x (Const64 <t> [c])) 521 // cond: x.Op != OpConst64 522 // result: (Add64 (Const64 <t> [c]) x) 523 for { 524 x := v.Args[0] 525 v_1 := v.Args[1] 526 if v_1.Op != OpConst64 { 527 break 528 } 529 t := v_1.Type 530 c := v_1.AuxInt 531 if !(x.Op != OpConst64) { 532 break 533 } 534 v.reset(OpAdd64) 535 v0 := b.NewValue0(v.Line, OpConst64, t) 536 v0.AuxInt = c 537 v.AddArg(v0) 538 v.AddArg(x) 539 return true 540 } 541 // match: (Add64 (Const64 [0]) x) 542 // cond: 543 // result: x 544 for { 545 v_0 := v.Args[0] 546 if v_0.Op != OpConst64 { 547 break 548 } 549 if v_0.AuxInt != 0 { 550 break 551 } 552 x := v.Args[1] 553 v.reset(OpCopy) 554 v.Type = x.Type 555 v.AddArg(x) 556 return true 557 } 558 return false 559 } 560 func rewriteValuegeneric_OpAdd64F(v *Value, config *Config) bool { 561 b := v.Block 562 _ = b 563 // match: (Add64F (Const64F [c]) (Const64F [d])) 564 // cond: 565 // result: (Const64F [f2i(i2f(c) + i2f(d))]) 566 for { 567 v_0 := v.Args[0] 568 if v_0.Op != OpConst64F { 569 break 570 } 571 c := v_0.AuxInt 572 v_1 := v.Args[1] 573 if v_1.Op != OpConst64F { 574 break 575 } 576 d := v_1.AuxInt 577 v.reset(OpConst64F) 578 v.AuxInt = f2i(i2f(c) + i2f(d)) 579 return true 580 } 581 return false 582 } 583 func rewriteValuegeneric_OpAdd8(v *Value, config *Config) bool { 584 b := v.Block 585 _ = b 586 // match: (Add8 (Const8 [c]) (Const8 [d])) 587 // cond: 588 // result: (Const8 [int64(int8(c+d))]) 589 for { 590 v_0 := v.Args[0] 591 if v_0.Op != OpConst8 { 592 break 593 } 594 c := v_0.AuxInt 595 v_1 := v.Args[1] 596 if v_1.Op != OpConst8 { 597 break 598 } 599 d := v_1.AuxInt 600 v.reset(OpConst8) 601 v.AuxInt = int64(int8(c + d)) 602 return true 603 } 604 // match: (Add8 x (Const8 <t> [c])) 605 // cond: x.Op != OpConst8 606 // result: (Add8 (Const8 <t> [c]) x) 607 for { 608 x := v.Args[0] 609 v_1 := v.Args[1] 610 if v_1.Op != OpConst8 { 611 break 612 } 613 t := v_1.Type 614 c := v_1.AuxInt 615 if !(x.Op != OpConst8) { 616 break 617 } 618 v.reset(OpAdd8) 619 v0 := b.NewValue0(v.Line, OpConst8, t) 620 v0.AuxInt = c 621 v.AddArg(v0) 622 v.AddArg(x) 623 return true 624 } 625 // match: (Add8 (Const8 [0]) x) 626 // cond: 627 // result: x 628 for { 629 v_0 := v.Args[0] 630 if v_0.Op != OpConst8 { 631 break 632 } 633 if v_0.AuxInt != 0 { 634 break 635 } 636 x := v.Args[1] 637 v.reset(OpCopy) 638 v.Type = x.Type 639 v.AddArg(x) 640 return true 641 } 642 return false 643 } 644 func rewriteValuegeneric_OpAddPtr(v *Value, config *Config) bool { 645 b := v.Block 646 _ = b 647 // match: (AddPtr <t> x (Const64 [c])) 648 // cond: 649 // result: (OffPtr <t> x [c]) 650 for { 651 t := v.Type 652 x := v.Args[0] 653 v_1 := v.Args[1] 654 if v_1.Op != OpConst64 { 655 break 656 } 657 c := v_1.AuxInt 658 v.reset(OpOffPtr) 659 v.Type = t 660 v.AddArg(x) 661 v.AuxInt = c 662 return true 663 } 664 return false 665 } 666 func rewriteValuegeneric_OpAnd16(v *Value, config *Config) bool { 667 b := v.Block 668 _ = b 669 // match: (And16 x (Const16 <t> [c])) 670 // cond: x.Op != OpConst16 671 // result: (And16 (Const16 <t> [c]) x) 672 for { 673 x := v.Args[0] 674 v_1 := v.Args[1] 675 if v_1.Op != OpConst16 { 676 break 677 } 678 t := v_1.Type 679 c := v_1.AuxInt 680 if !(x.Op != OpConst16) { 681 break 682 } 683 v.reset(OpAnd16) 684 v0 := b.NewValue0(v.Line, OpConst16, t) 685 v0.AuxInt = c 686 v.AddArg(v0) 687 v.AddArg(x) 688 return true 689 } 690 // match: (And16 x x) 691 // cond: 692 // result: x 693 for { 694 x := v.Args[0] 695 if x != v.Args[1] { 696 break 697 } 698 v.reset(OpCopy) 699 v.Type = x.Type 700 v.AddArg(x) 701 return true 702 } 703 // match: (And16 (Const16 [-1]) x) 704 // cond: 705 // result: x 706 for { 707 v_0 := v.Args[0] 708 if v_0.Op != OpConst16 { 709 break 710 } 711 if v_0.AuxInt != -1 { 712 break 713 } 714 x := v.Args[1] 715 v.reset(OpCopy) 716 v.Type = x.Type 717 v.AddArg(x) 718 return true 719 } 720 // match: (And16 (Const16 [0]) _) 721 // cond: 722 // result: (Const16 [0]) 723 for { 724 v_0 := v.Args[0] 725 if v_0.Op != OpConst16 { 726 break 727 } 728 if v_0.AuxInt != 0 { 729 break 730 } 731 v.reset(OpConst16) 732 v.AuxInt = 0 733 return true 734 } 735 return false 736 } 737 func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool { 738 b := v.Block 739 _ = b 740 // match: (And32 x (Const32 <t> [c])) 741 // cond: x.Op != OpConst32 742 // result: (And32 (Const32 <t> [c]) x) 743 for { 744 x := v.Args[0] 745 v_1 := v.Args[1] 746 if v_1.Op != OpConst32 { 747 break 748 } 749 t := v_1.Type 750 c := v_1.AuxInt 751 if !(x.Op != OpConst32) { 752 break 753 } 754 v.reset(OpAnd32) 755 v0 := b.NewValue0(v.Line, OpConst32, t) 756 v0.AuxInt = c 757 v.AddArg(v0) 758 v.AddArg(x) 759 return true 760 } 761 // match: (And32 x x) 762 // cond: 763 // result: x 764 for { 765 x := v.Args[0] 766 if x != v.Args[1] { 767 break 768 } 769 v.reset(OpCopy) 770 v.Type = x.Type 771 v.AddArg(x) 772 return true 773 } 774 // match: (And32 (Const32 [-1]) x) 775 // cond: 776 // result: x 777 for { 778 v_0 := v.Args[0] 779 if v_0.Op != OpConst32 { 780 break 781 } 782 if v_0.AuxInt != -1 { 783 break 784 } 785 x := v.Args[1] 786 v.reset(OpCopy) 787 v.Type = x.Type 788 v.AddArg(x) 789 return true 790 } 791 // match: (And32 (Const32 [0]) _) 792 // cond: 793 // result: (Const32 [0]) 794 for { 795 v_0 := v.Args[0] 796 if v_0.Op != OpConst32 { 797 break 798 } 799 if v_0.AuxInt != 0 { 800 break 801 } 802 v.reset(OpConst32) 803 v.AuxInt = 0 804 return true 805 } 806 return false 807 } 808 func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool { 809 b := v.Block 810 _ = b 811 // match: (And64 x (Const64 <t> [c])) 812 // cond: x.Op != OpConst64 813 // result: (And64 (Const64 <t> [c]) x) 814 for { 815 x := v.Args[0] 816 v_1 := v.Args[1] 817 if v_1.Op != OpConst64 { 818 break 819 } 820 t := v_1.Type 821 c := v_1.AuxInt 822 if !(x.Op != OpConst64) { 823 break 824 } 825 v.reset(OpAnd64) 826 v0 := b.NewValue0(v.Line, OpConst64, t) 827 v0.AuxInt = c 828 v.AddArg(v0) 829 v.AddArg(x) 830 return true 831 } 832 // match: (And64 x x) 833 // cond: 834 // result: x 835 for { 836 x := v.Args[0] 837 if x != v.Args[1] { 838 break 839 } 840 v.reset(OpCopy) 841 v.Type = x.Type 842 v.AddArg(x) 843 return true 844 } 845 // match: (And64 (Const64 [-1]) x) 846 // cond: 847 // result: x 848 for { 849 v_0 := v.Args[0] 850 if v_0.Op != OpConst64 { 851 break 852 } 853 if v_0.AuxInt != -1 { 854 break 855 } 856 x := v.Args[1] 857 v.reset(OpCopy) 858 v.Type = x.Type 859 v.AddArg(x) 860 return true 861 } 862 // match: (And64 (Const64 [0]) _) 863 // cond: 864 // result: (Const64 [0]) 865 for { 866 v_0 := v.Args[0] 867 if v_0.Op != OpConst64 { 868 break 869 } 870 if v_0.AuxInt != 0 { 871 break 872 } 873 v.reset(OpConst64) 874 v.AuxInt = 0 875 return true 876 } 877 // match: (And64 <t> (Const64 [y]) x) 878 // cond: nlz(y) + nto(y) == 64 && nto(y) >= 32 879 // result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)])) 880 for { 881 t := v.Type 882 v_0 := v.Args[0] 883 if v_0.Op != OpConst64 { 884 break 885 } 886 y := v_0.AuxInt 887 x := v.Args[1] 888 if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) { 889 break 890 } 891 v.reset(OpRsh64Ux64) 892 v0 := b.NewValue0(v.Line, OpLsh64x64, t) 893 v0.AddArg(x) 894 v1 := b.NewValue0(v.Line, OpConst64, t) 895 v1.AuxInt = nlz(y) 896 v0.AddArg(v1) 897 v.AddArg(v0) 898 v2 := b.NewValue0(v.Line, OpConst64, t) 899 v2.AuxInt = nlz(y) 900 v.AddArg(v2) 901 return true 902 } 903 // match: (And64 <t> (Const64 [y]) x) 904 // cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32 905 // result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)])) 906 for { 907 t := v.Type 908 v_0 := v.Args[0] 909 if v_0.Op != OpConst64 { 910 break 911 } 912 y := v_0.AuxInt 913 x := v.Args[1] 914 if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) { 915 break 916 } 917 v.reset(OpLsh64x64) 918 v0 := b.NewValue0(v.Line, OpRsh64Ux64, t) 919 v0.AddArg(x) 920 v1 := b.NewValue0(v.Line, OpConst64, t) 921 v1.AuxInt = ntz(y) 922 v0.AddArg(v1) 923 v.AddArg(v0) 924 v2 := b.NewValue0(v.Line, OpConst64, t) 925 v2.AuxInt = ntz(y) 926 v.AddArg(v2) 927 return true 928 } 929 return false 930 } 931 func rewriteValuegeneric_OpAnd8(v *Value, config *Config) bool { 932 b := v.Block 933 _ = b 934 // match: (And8 x (Const8 <t> [c])) 935 // cond: x.Op != OpConst8 936 // result: (And8 (Const8 <t> [c]) x) 937 for { 938 x := v.Args[0] 939 v_1 := v.Args[1] 940 if v_1.Op != OpConst8 { 941 break 942 } 943 t := v_1.Type 944 c := v_1.AuxInt 945 if !(x.Op != OpConst8) { 946 break 947 } 948 v.reset(OpAnd8) 949 v0 := b.NewValue0(v.Line, OpConst8, t) 950 v0.AuxInt = c 951 v.AddArg(v0) 952 v.AddArg(x) 953 return true 954 } 955 // match: (And8 x x) 956 // cond: 957 // result: x 958 for { 959 x := v.Args[0] 960 if x != v.Args[1] { 961 break 962 } 963 v.reset(OpCopy) 964 v.Type = x.Type 965 v.AddArg(x) 966 return true 967 } 968 // match: (And8 (Const8 [-1]) x) 969 // cond: 970 // result: x 971 for { 972 v_0 := v.Args[0] 973 if v_0.Op != OpConst8 { 974 break 975 } 976 if v_0.AuxInt != -1 { 977 break 978 } 979 x := v.Args[1] 980 v.reset(OpCopy) 981 v.Type = x.Type 982 v.AddArg(x) 983 return true 984 } 985 // match: (And8 (Const8 [0]) _) 986 // cond: 987 // result: (Const8 [0]) 988 for { 989 v_0 := v.Args[0] 990 if v_0.Op != OpConst8 { 991 break 992 } 993 if v_0.AuxInt != 0 { 994 break 995 } 996 v.reset(OpConst8) 997 v.AuxInt = 0 998 return true 999 } 1000 return false 1001 } 1002 func rewriteValuegeneric_OpArg(v *Value, config *Config) bool { 1003 b := v.Block 1004 _ = b 1005 // match: (Arg {n} [off]) 1006 // cond: v.Type.IsString() 1007 // result: (StringMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])) 1008 for { 1009 n := v.Aux 1010 off := v.AuxInt 1011 if !(v.Type.IsString()) { 1012 break 1013 } 1014 v.reset(OpStringMake) 1015 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1016 v0.Aux = n 1017 v0.AuxInt = off 1018 v.AddArg(v0) 1019 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1020 v1.Aux = n 1021 v1.AuxInt = off + config.PtrSize 1022 v.AddArg(v1) 1023 return true 1024 } 1025 // match: (Arg {n} [off]) 1026 // cond: v.Type.IsSlice() 1027 // result: (SliceMake (Arg <v.Type.ElemType().PtrTo()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]) (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize])) 1028 for { 1029 n := v.Aux 1030 off := v.AuxInt 1031 if !(v.Type.IsSlice()) { 1032 break 1033 } 1034 v.reset(OpSliceMake) 1035 v0 := b.NewValue0(v.Line, OpArg, v.Type.ElemType().PtrTo()) 1036 v0.Aux = n 1037 v0.AuxInt = off 1038 v.AddArg(v0) 1039 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1040 v1.Aux = n 1041 v1.AuxInt = off + config.PtrSize 1042 v.AddArg(v1) 1043 v2 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1044 v2.Aux = n 1045 v2.AuxInt = off + 2*config.PtrSize 1046 v.AddArg(v2) 1047 return true 1048 } 1049 // match: (Arg {n} [off]) 1050 // cond: v.Type.IsInterface() 1051 // result: (IMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize])) 1052 for { 1053 n := v.Aux 1054 off := v.AuxInt 1055 if !(v.Type.IsInterface()) { 1056 break 1057 } 1058 v.reset(OpIMake) 1059 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1060 v0.Aux = n 1061 v0.AuxInt = off 1062 v.AddArg(v0) 1063 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1064 v1.Aux = n 1065 v1.AuxInt = off + config.PtrSize 1066 v.AddArg(v1) 1067 return true 1068 } 1069 // match: (Arg {n} [off]) 1070 // cond: v.Type.IsComplex() && v.Type.Size() == 16 1071 // result: (ComplexMake (Arg <config.fe.TypeFloat64()> {n} [off]) (Arg <config.fe.TypeFloat64()> {n} [off+8])) 1072 for { 1073 n := v.Aux 1074 off := v.AuxInt 1075 if !(v.Type.IsComplex() && v.Type.Size() == 16) { 1076 break 1077 } 1078 v.reset(OpComplexMake) 1079 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) 1080 v0.Aux = n 1081 v0.AuxInt = off 1082 v.AddArg(v0) 1083 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) 1084 v1.Aux = n 1085 v1.AuxInt = off + 8 1086 v.AddArg(v1) 1087 return true 1088 } 1089 // match: (Arg {n} [off]) 1090 // cond: v.Type.IsComplex() && v.Type.Size() == 8 1091 // result: (ComplexMake (Arg <config.fe.TypeFloat32()> {n} [off]) (Arg <config.fe.TypeFloat32()> {n} [off+4])) 1092 for { 1093 n := v.Aux 1094 off := v.AuxInt 1095 if !(v.Type.IsComplex() && v.Type.Size() == 8) { 1096 break 1097 } 1098 v.reset(OpComplexMake) 1099 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) 1100 v0.Aux = n 1101 v0.AuxInt = off 1102 v.AddArg(v0) 1103 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) 1104 v1.Aux = n 1105 v1.AuxInt = off + 4 1106 v.AddArg(v1) 1107 return true 1108 } 1109 // match: (Arg <t>) 1110 // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) 1111 // result: (StructMake0) 1112 for { 1113 t := v.Type 1114 if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { 1115 break 1116 } 1117 v.reset(OpStructMake0) 1118 return true 1119 } 1120 // match: (Arg <t> {n} [off]) 1121 // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) 1122 // result: (StructMake1 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])) 1123 for { 1124 t := v.Type 1125 n := v.Aux 1126 off := v.AuxInt 1127 if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { 1128 break 1129 } 1130 v.reset(OpStructMake1) 1131 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1132 v0.Aux = n 1133 v0.AuxInt = off + t.FieldOff(0) 1134 v.AddArg(v0) 1135 return true 1136 } 1137 // match: (Arg <t> {n} [off]) 1138 // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) 1139 // result: (StructMake2 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])) 1140 for { 1141 t := v.Type 1142 n := v.Aux 1143 off := v.AuxInt 1144 if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { 1145 break 1146 } 1147 v.reset(OpStructMake2) 1148 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1149 v0.Aux = n 1150 v0.AuxInt = off + t.FieldOff(0) 1151 v.AddArg(v0) 1152 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1153 v1.Aux = n 1154 v1.AuxInt = off + t.FieldOff(1) 1155 v.AddArg(v1) 1156 return true 1157 } 1158 // match: (Arg <t> {n} [off]) 1159 // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) 1160 // result: (StructMake3 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])) 1161 for { 1162 t := v.Type 1163 n := v.Aux 1164 off := v.AuxInt 1165 if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { 1166 break 1167 } 1168 v.reset(OpStructMake3) 1169 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1170 v0.Aux = n 1171 v0.AuxInt = off + t.FieldOff(0) 1172 v.AddArg(v0) 1173 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1174 v1.Aux = n 1175 v1.AuxInt = off + t.FieldOff(1) 1176 v.AddArg(v1) 1177 v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) 1178 v2.Aux = n 1179 v2.AuxInt = off + t.FieldOff(2) 1180 v.AddArg(v2) 1181 return true 1182 } 1183 // match: (Arg <t> {n} [off]) 1184 // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) 1185 // result: (StructMake4 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]) (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)])) 1186 for { 1187 t := v.Type 1188 n := v.Aux 1189 off := v.AuxInt 1190 if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { 1191 break 1192 } 1193 v.reset(OpStructMake4) 1194 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1195 v0.Aux = n 1196 v0.AuxInt = off + t.FieldOff(0) 1197 v.AddArg(v0) 1198 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1199 v1.Aux = n 1200 v1.AuxInt = off + t.FieldOff(1) 1201 v.AddArg(v1) 1202 v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) 1203 v2.Aux = n 1204 v2.AuxInt = off + t.FieldOff(2) 1205 v.AddArg(v2) 1206 v3 := b.NewValue0(v.Line, OpArg, t.FieldType(3)) 1207 v3.Aux = n 1208 v3.AuxInt = off + t.FieldOff(3) 1209 v.AddArg(v3) 1210 return true 1211 } 1212 return false 1213 } 1214 func rewriteValuegeneric_OpArrayIndex(v *Value, config *Config) bool { 1215 b := v.Block 1216 _ = b 1217 // match: (ArrayIndex <t> [0] x:(Load ptr mem)) 1218 // cond: 1219 // result: @x.Block (Load <t> ptr mem) 1220 for { 1221 t := v.Type 1222 if v.AuxInt != 0 { 1223 break 1224 } 1225 x := v.Args[0] 1226 if x.Op != OpLoad { 1227 break 1228 } 1229 ptr := x.Args[0] 1230 mem := x.Args[1] 1231 b = x.Block 1232 v0 := b.NewValue0(v.Line, OpLoad, t) 1233 v.reset(OpCopy) 1234 v.AddArg(v0) 1235 v0.AddArg(ptr) 1236 v0.AddArg(mem) 1237 return true 1238 } 1239 return false 1240 } 1241 func rewriteValuegeneric_OpCom16(v *Value, config *Config) bool { 1242 b := v.Block 1243 _ = b 1244 // match: (Com16 (Com16 x)) 1245 // cond: 1246 // result: x 1247 for { 1248 v_0 := v.Args[0] 1249 if v_0.Op != OpCom16 { 1250 break 1251 } 1252 x := v_0.Args[0] 1253 v.reset(OpCopy) 1254 v.Type = x.Type 1255 v.AddArg(x) 1256 return true 1257 } 1258 return false 1259 } 1260 func rewriteValuegeneric_OpCom32(v *Value, config *Config) bool { 1261 b := v.Block 1262 _ = b 1263 // match: (Com32 (Com32 x)) 1264 // cond: 1265 // result: x 1266 for { 1267 v_0 := v.Args[0] 1268 if v_0.Op != OpCom32 { 1269 break 1270 } 1271 x := v_0.Args[0] 1272 v.reset(OpCopy) 1273 v.Type = x.Type 1274 v.AddArg(x) 1275 return true 1276 } 1277 return false 1278 } 1279 func rewriteValuegeneric_OpCom64(v *Value, config *Config) bool { 1280 b := v.Block 1281 _ = b 1282 // match: (Com64 (Com64 x)) 1283 // cond: 1284 // result: x 1285 for { 1286 v_0 := v.Args[0] 1287 if v_0.Op != OpCom64 { 1288 break 1289 } 1290 x := v_0.Args[0] 1291 v.reset(OpCopy) 1292 v.Type = x.Type 1293 v.AddArg(x) 1294 return true 1295 } 1296 return false 1297 } 1298 func rewriteValuegeneric_OpCom8(v *Value, config *Config) bool { 1299 b := v.Block 1300 _ = b 1301 // match: (Com8 (Com8 x)) 1302 // cond: 1303 // result: x 1304 for { 1305 v_0 := v.Args[0] 1306 if v_0.Op != OpCom8 { 1307 break 1308 } 1309 x := v_0.Args[0] 1310 v.reset(OpCopy) 1311 v.Type = x.Type 1312 v.AddArg(x) 1313 return true 1314 } 1315 return false 1316 } 1317 func rewriteValuegeneric_OpConstInterface(v *Value, config *Config) bool { 1318 b := v.Block 1319 _ = b 1320 // match: (ConstInterface) 1321 // cond: 1322 // result: (IMake (ConstNil <config.fe.TypeBytePtr()>) (ConstNil <config.fe.TypeBytePtr()>)) 1323 for { 1324 v.reset(OpIMake) 1325 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1326 v.AddArg(v0) 1327 v1 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1328 v.AddArg(v1) 1329 return true 1330 } 1331 return false 1332 } 1333 func rewriteValuegeneric_OpConstSlice(v *Value, config *Config) bool { 1334 b := v.Block 1335 _ = b 1336 // match: (ConstSlice) 1337 // cond: config.PtrSize == 4 1338 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const32 <config.fe.TypeInt()> [0]) (Const32 <config.fe.TypeInt()> [0])) 1339 for { 1340 if !(config.PtrSize == 4) { 1341 break 1342 } 1343 v.reset(OpSliceMake) 1344 v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) 1345 v.AddArg(v0) 1346 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1347 v1.AuxInt = 0 1348 v.AddArg(v1) 1349 v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1350 v2.AuxInt = 0 1351 v.AddArg(v2) 1352 return true 1353 } 1354 // match: (ConstSlice) 1355 // cond: config.PtrSize == 8 1356 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const64 <config.fe.TypeInt()> [0]) (Const64 <config.fe.TypeInt()> [0])) 1357 for { 1358 if !(config.PtrSize == 8) { 1359 break 1360 } 1361 v.reset(OpSliceMake) 1362 v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) 1363 v.AddArg(v0) 1364 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1365 v1.AuxInt = 0 1366 v.AddArg(v1) 1367 v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1368 v2.AuxInt = 0 1369 v.AddArg(v2) 1370 return true 1371 } 1372 return false 1373 } 1374 func rewriteValuegeneric_OpConstString(v *Value, config *Config) bool { 1375 b := v.Block 1376 _ = b 1377 // match: (ConstString {s}) 1378 // cond: config.PtrSize == 4 && s.(string) == "" 1379 // result: (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0])) 1380 for { 1381 s := v.Aux 1382 if !(config.PtrSize == 4 && s.(string) == "") { 1383 break 1384 } 1385 v.reset(OpStringMake) 1386 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1387 v.AddArg(v0) 1388 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1389 v1.AuxInt = 0 1390 v.AddArg(v1) 1391 return true 1392 } 1393 // match: (ConstString {s}) 1394 // cond: config.PtrSize == 8 && s.(string) == "" 1395 // result: (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0])) 1396 for { 1397 s := v.Aux 1398 if !(config.PtrSize == 8 && s.(string) == "") { 1399 break 1400 } 1401 v.reset(OpStringMake) 1402 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1403 v.AddArg(v0) 1404 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1405 v1.AuxInt = 0 1406 v.AddArg(v1) 1407 return true 1408 } 1409 // match: (ConstString {s}) 1410 // cond: config.PtrSize == 4 && s.(string) != "" 1411 // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))])) 1412 for { 1413 s := v.Aux 1414 if !(config.PtrSize == 4 && s.(string) != "") { 1415 break 1416 } 1417 v.reset(OpStringMake) 1418 v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) 1419 v0.Aux = config.fe.StringData(s.(string)) 1420 v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) 1421 v0.AddArg(v1) 1422 v.AddArg(v0) 1423 v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1424 v2.AuxInt = int64(len(s.(string))) 1425 v.AddArg(v2) 1426 return true 1427 } 1428 // match: (ConstString {s}) 1429 // cond: config.PtrSize == 8 && s.(string) != "" 1430 // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))])) 1431 for { 1432 s := v.Aux 1433 if !(config.PtrSize == 8 && s.(string) != "") { 1434 break 1435 } 1436 v.reset(OpStringMake) 1437 v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) 1438 v0.Aux = config.fe.StringData(s.(string)) 1439 v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) 1440 v0.AddArg(v1) 1441 v.AddArg(v0) 1442 v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1443 v2.AuxInt = int64(len(s.(string))) 1444 v.AddArg(v2) 1445 return true 1446 } 1447 return false 1448 } 1449 func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool { 1450 b := v.Block 1451 _ = b 1452 // match: (Convert (Add64 (Convert ptr mem) off) mem) 1453 // cond: 1454 // result: (Add64 ptr off) 1455 for { 1456 v_0 := v.Args[0] 1457 if v_0.Op != OpAdd64 { 1458 break 1459 } 1460 v_0_0 := v_0.Args[0] 1461 if v_0_0.Op != OpConvert { 1462 break 1463 } 1464 ptr := v_0_0.Args[0] 1465 mem := v_0_0.Args[1] 1466 off := v_0.Args[1] 1467 if mem != v.Args[1] { 1468 break 1469 } 1470 v.reset(OpAdd64) 1471 v.AddArg(ptr) 1472 v.AddArg(off) 1473 return true 1474 } 1475 // match: (Convert (Add64 off (Convert ptr mem)) mem) 1476 // cond: 1477 // result: (Add64 ptr off) 1478 for { 1479 v_0 := v.Args[0] 1480 if v_0.Op != OpAdd64 { 1481 break 1482 } 1483 off := v_0.Args[0] 1484 v_0_1 := v_0.Args[1] 1485 if v_0_1.Op != OpConvert { 1486 break 1487 } 1488 ptr := v_0_1.Args[0] 1489 mem := v_0_1.Args[1] 1490 if mem != v.Args[1] { 1491 break 1492 } 1493 v.reset(OpAdd64) 1494 v.AddArg(ptr) 1495 v.AddArg(off) 1496 return true 1497 } 1498 // match: (Convert (Convert ptr mem) mem) 1499 // cond: 1500 // result: ptr 1501 for { 1502 v_0 := v.Args[0] 1503 if v_0.Op != OpConvert { 1504 break 1505 } 1506 ptr := v_0.Args[0] 1507 mem := v_0.Args[1] 1508 if mem != v.Args[1] { 1509 break 1510 } 1511 v.reset(OpCopy) 1512 v.Type = ptr.Type 1513 v.AddArg(ptr) 1514 return true 1515 } 1516 return false 1517 } 1518 func rewriteValuegeneric_OpCvt32Fto64F(v *Value, config *Config) bool { 1519 b := v.Block 1520 _ = b 1521 // match: (Cvt32Fto64F (Const32F [c])) 1522 // cond: 1523 // result: (Const64F [c]) 1524 for { 1525 v_0 := v.Args[0] 1526 if v_0.Op != OpConst32F { 1527 break 1528 } 1529 c := v_0.AuxInt 1530 v.reset(OpConst64F) 1531 v.AuxInt = c 1532 return true 1533 } 1534 return false 1535 } 1536 func rewriteValuegeneric_OpCvt64Fto32F(v *Value, config *Config) bool { 1537 b := v.Block 1538 _ = b 1539 // match: (Cvt64Fto32F (Const64F [c])) 1540 // cond: 1541 // result: (Const32F [f2i(float64(i2f32(c)))]) 1542 for { 1543 v_0 := v.Args[0] 1544 if v_0.Op != OpConst64F { 1545 break 1546 } 1547 c := v_0.AuxInt 1548 v.reset(OpConst32F) 1549 v.AuxInt = f2i(float64(i2f32(c))) 1550 return true 1551 } 1552 return false 1553 } 1554 func rewriteValuegeneric_OpDiv64(v *Value, config *Config) bool { 1555 b := v.Block 1556 _ = b 1557 // match: (Div64 <t> x (Const64 [c])) 1558 // cond: c > 0 && smagic64ok(c) && smagic64m(c) > 0 1559 // result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) 1560 for { 1561 t := v.Type 1562 x := v.Args[0] 1563 v_1 := v.Args[1] 1564 if v_1.Op != OpConst64 { 1565 break 1566 } 1567 c := v_1.AuxInt 1568 if !(c > 0 && smagic64ok(c) && smagic64m(c) > 0) { 1569 break 1570 } 1571 v.reset(OpSub64) 1572 v.Type = t 1573 v0 := b.NewValue0(v.Line, OpRsh64x64, t) 1574 v1 := b.NewValue0(v.Line, OpHmul64, t) 1575 v2 := b.NewValue0(v.Line, OpConst64, t) 1576 v2.AuxInt = smagic64m(c) 1577 v1.AddArg(v2) 1578 v1.AddArg(x) 1579 v0.AddArg(v1) 1580 v3 := b.NewValue0(v.Line, OpConst64, t) 1581 v3.AuxInt = smagic64s(c) 1582 v0.AddArg(v3) 1583 v.AddArg(v0) 1584 v4 := b.NewValue0(v.Line, OpRsh64x64, t) 1585 v4.AddArg(x) 1586 v5 := b.NewValue0(v.Line, OpConst64, t) 1587 v5.AuxInt = 63 1588 v4.AddArg(v5) 1589 v.AddArg(v4) 1590 return true 1591 } 1592 // match: (Div64 <t> x (Const64 [c])) 1593 // cond: c > 0 && smagic64ok(c) && smagic64m(c) < 0 1594 // result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) 1595 for { 1596 t := v.Type 1597 x := v.Args[0] 1598 v_1 := v.Args[1] 1599 if v_1.Op != OpConst64 { 1600 break 1601 } 1602 c := v_1.AuxInt 1603 if !(c > 0 && smagic64ok(c) && smagic64m(c) < 0) { 1604 break 1605 } 1606 v.reset(OpSub64) 1607 v.Type = t 1608 v0 := b.NewValue0(v.Line, OpRsh64x64, t) 1609 v1 := b.NewValue0(v.Line, OpAdd64, t) 1610 v2 := b.NewValue0(v.Line, OpHmul64, t) 1611 v3 := b.NewValue0(v.Line, OpConst64, t) 1612 v3.AuxInt = smagic64m(c) 1613 v2.AddArg(v3) 1614 v2.AddArg(x) 1615 v1.AddArg(v2) 1616 v1.AddArg(x) 1617 v0.AddArg(v1) 1618 v4 := b.NewValue0(v.Line, OpConst64, t) 1619 v4.AuxInt = smagic64s(c) 1620 v0.AddArg(v4) 1621 v.AddArg(v0) 1622 v5 := b.NewValue0(v.Line, OpRsh64x64, t) 1623 v5.AddArg(x) 1624 v6 := b.NewValue0(v.Line, OpConst64, t) 1625 v6.AuxInt = 63 1626 v5.AddArg(v6) 1627 v.AddArg(v5) 1628 return true 1629 } 1630 // match: (Div64 <t> x (Const64 [c])) 1631 // cond: c < 0 && smagic64ok(c) && smagic64m(c) > 0 1632 // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) 1633 for { 1634 t := v.Type 1635 x := v.Args[0] 1636 v_1 := v.Args[1] 1637 if v_1.Op != OpConst64 { 1638 break 1639 } 1640 c := v_1.AuxInt 1641 if !(c < 0 && smagic64ok(c) && smagic64m(c) > 0) { 1642 break 1643 } 1644 v.reset(OpNeg64) 1645 v.Type = t 1646 v0 := b.NewValue0(v.Line, OpSub64, t) 1647 v1 := b.NewValue0(v.Line, OpRsh64x64, t) 1648 v2 := b.NewValue0(v.Line, OpHmul64, t) 1649 v3 := b.NewValue0(v.Line, OpConst64, t) 1650 v3.AuxInt = smagic64m(c) 1651 v2.AddArg(v3) 1652 v2.AddArg(x) 1653 v1.AddArg(v2) 1654 v4 := b.NewValue0(v.Line, OpConst64, t) 1655 v4.AuxInt = smagic64s(c) 1656 v1.AddArg(v4) 1657 v0.AddArg(v1) 1658 v5 := b.NewValue0(v.Line, OpRsh64x64, t) 1659 v5.AddArg(x) 1660 v6 := b.NewValue0(v.Line, OpConst64, t) 1661 v6.AuxInt = 63 1662 v5.AddArg(v6) 1663 v0.AddArg(v5) 1664 v.AddArg(v0) 1665 return true 1666 } 1667 // match: (Div64 <t> x (Const64 [c])) 1668 // cond: c < 0 && smagic64ok(c) && smagic64m(c) < 0 1669 // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) 1670 for { 1671 t := v.Type 1672 x := v.Args[0] 1673 v_1 := v.Args[1] 1674 if v_1.Op != OpConst64 { 1675 break 1676 } 1677 c := v_1.AuxInt 1678 if !(c < 0 && smagic64ok(c) && smagic64m(c) < 0) { 1679 break 1680 } 1681 v.reset(OpNeg64) 1682 v.Type = t 1683 v0 := b.NewValue0(v.Line, OpSub64, t) 1684 v1 := b.NewValue0(v.Line, OpRsh64x64, t) 1685 v2 := b.NewValue0(v.Line, OpAdd64, t) 1686 v3 := b.NewValue0(v.Line, OpHmul64, t) 1687 v4 := b.NewValue0(v.Line, OpConst64, t) 1688 v4.AuxInt = smagic64m(c) 1689 v3.AddArg(v4) 1690 v3.AddArg(x) 1691 v2.AddArg(v3) 1692 v2.AddArg(x) 1693 v1.AddArg(v2) 1694 v5 := b.NewValue0(v.Line, OpConst64, t) 1695 v5.AuxInt = smagic64s(c) 1696 v1.AddArg(v5) 1697 v0.AddArg(v1) 1698 v6 := b.NewValue0(v.Line, OpRsh64x64, t) 1699 v6.AddArg(x) 1700 v7 := b.NewValue0(v.Line, OpConst64, t) 1701 v7.AuxInt = 63 1702 v6.AddArg(v7) 1703 v0.AddArg(v6) 1704 v.AddArg(v0) 1705 return true 1706 } 1707 return false 1708 } 1709 func rewriteValuegeneric_OpDiv64u(v *Value, config *Config) bool { 1710 b := v.Block 1711 _ = b 1712 // match: (Div64u <t> n (Const64 [c])) 1713 // cond: isPowerOfTwo(c) 1714 // result: (Rsh64Ux64 n (Const64 <t> [log2(c)])) 1715 for { 1716 t := v.Type 1717 n := v.Args[0] 1718 v_1 := v.Args[1] 1719 if v_1.Op != OpConst64 { 1720 break 1721 } 1722 c := v_1.AuxInt 1723 if !(isPowerOfTwo(c)) { 1724 break 1725 } 1726 v.reset(OpRsh64Ux64) 1727 v.AddArg(n) 1728 v0 := b.NewValue0(v.Line, OpConst64, t) 1729 v0.AuxInt = log2(c) 1730 v.AddArg(v0) 1731 return true 1732 } 1733 // match: (Div64u <t> x (Const64 [c])) 1734 // cond: umagic64ok(c) && !umagic64a(c) 1735 // result: (Rsh64Ux64 (Hmul64u <t> (Const64 <t> [umagic64m(c)]) x) (Const64 <t> [umagic64s(c)])) 1736 for { 1737 t := v.Type 1738 x := v.Args[0] 1739 v_1 := v.Args[1] 1740 if v_1.Op != OpConst64 { 1741 break 1742 } 1743 c := v_1.AuxInt 1744 if !(umagic64ok(c) && !umagic64a(c)) { 1745 break 1746 } 1747 v.reset(OpRsh64Ux64) 1748 v0 := b.NewValue0(v.Line, OpHmul64u, t) 1749 v1 := b.NewValue0(v.Line, OpConst64, t) 1750 v1.AuxInt = umagic64m(c) 1751 v0.AddArg(v1) 1752 v0.AddArg(x) 1753 v.AddArg(v0) 1754 v2 := b.NewValue0(v.Line, OpConst64, t) 1755 v2.AuxInt = umagic64s(c) 1756 v.AddArg(v2) 1757 return true 1758 } 1759 // match: (Div64u <t> x (Const64 [c])) 1760 // cond: umagic64ok(c) && umagic64a(c) 1761 // result: (Rsh64Ux64 (Avg64u <t> (Hmul64u <t> x (Const64 <t> [umagic64m(c)])) x) (Const64 <t> [umagic64s(c)-1])) 1762 for { 1763 t := v.Type 1764 x := v.Args[0] 1765 v_1 := v.Args[1] 1766 if v_1.Op != OpConst64 { 1767 break 1768 } 1769 c := v_1.AuxInt 1770 if !(umagic64ok(c) && umagic64a(c)) { 1771 break 1772 } 1773 v.reset(OpRsh64Ux64) 1774 v0 := b.NewValue0(v.Line, OpAvg64u, t) 1775 v1 := b.NewValue0(v.Line, OpHmul64u, t) 1776 v1.AddArg(x) 1777 v2 := b.NewValue0(v.Line, OpConst64, t) 1778 v2.AuxInt = umagic64m(c) 1779 v1.AddArg(v2) 1780 v0.AddArg(v1) 1781 v0.AddArg(x) 1782 v.AddArg(v0) 1783 v3 := b.NewValue0(v.Line, OpConst64, t) 1784 v3.AuxInt = umagic64s(c) - 1 1785 v.AddArg(v3) 1786 return true 1787 } 1788 return false 1789 } 1790 func rewriteValuegeneric_OpEq16(v *Value, config *Config) bool { 1791 b := v.Block 1792 _ = b 1793 // match: (Eq16 x x) 1794 // cond: 1795 // result: (ConstBool [1]) 1796 for { 1797 x := v.Args[0] 1798 if x != v.Args[1] { 1799 break 1800 } 1801 v.reset(OpConstBool) 1802 v.AuxInt = 1 1803 return true 1804 } 1805 // match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 1806 // cond: 1807 // result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x) 1808 for { 1809 v_0 := v.Args[0] 1810 if v_0.Op != OpConst16 { 1811 break 1812 } 1813 t := v_0.Type 1814 c := v_0.AuxInt 1815 v_1 := v.Args[1] 1816 if v_1.Op != OpAdd16 { 1817 break 1818 } 1819 v_1_0 := v_1.Args[0] 1820 if v_1_0.Op != OpConst16 { 1821 break 1822 } 1823 if v_1_0.Type != t { 1824 break 1825 } 1826 d := v_1_0.AuxInt 1827 x := v_1.Args[1] 1828 v.reset(OpEq16) 1829 v0 := b.NewValue0(v.Line, OpConst16, t) 1830 v0.AuxInt = int64(int16(c - d)) 1831 v.AddArg(v0) 1832 v.AddArg(x) 1833 return true 1834 } 1835 // match: (Eq16 x (Const16 <t> [c])) 1836 // cond: x.Op != OpConst16 1837 // result: (Eq16 (Const16 <t> [c]) x) 1838 for { 1839 x := v.Args[0] 1840 v_1 := v.Args[1] 1841 if v_1.Op != OpConst16 { 1842 break 1843 } 1844 t := v_1.Type 1845 c := v_1.AuxInt 1846 if !(x.Op != OpConst16) { 1847 break 1848 } 1849 v.reset(OpEq16) 1850 v0 := b.NewValue0(v.Line, OpConst16, t) 1851 v0.AuxInt = c 1852 v.AddArg(v0) 1853 v.AddArg(x) 1854 return true 1855 } 1856 // match: (Eq16 (Const16 [c]) (Const16 [d])) 1857 // cond: 1858 // result: (ConstBool [b2i(c == d)]) 1859 for { 1860 v_0 := v.Args[0] 1861 if v_0.Op != OpConst16 { 1862 break 1863 } 1864 c := v_0.AuxInt 1865 v_1 := v.Args[1] 1866 if v_1.Op != OpConst16 { 1867 break 1868 } 1869 d := v_1.AuxInt 1870 v.reset(OpConstBool) 1871 v.AuxInt = b2i(c == d) 1872 return true 1873 } 1874 return false 1875 } 1876 func rewriteValuegeneric_OpEq32(v *Value, config *Config) bool { 1877 b := v.Block 1878 _ = b 1879 // match: (Eq32 x x) 1880 // cond: 1881 // result: (ConstBool [1]) 1882 for { 1883 x := v.Args[0] 1884 if x != v.Args[1] { 1885 break 1886 } 1887 v.reset(OpConstBool) 1888 v.AuxInt = 1 1889 return true 1890 } 1891 // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 1892 // cond: 1893 // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x) 1894 for { 1895 v_0 := v.Args[0] 1896 if v_0.Op != OpConst32 { 1897 break 1898 } 1899 t := v_0.Type 1900 c := v_0.AuxInt 1901 v_1 := v.Args[1] 1902 if v_1.Op != OpAdd32 { 1903 break 1904 } 1905 v_1_0 := v_1.Args[0] 1906 if v_1_0.Op != OpConst32 { 1907 break 1908 } 1909 if v_1_0.Type != t { 1910 break 1911 } 1912 d := v_1_0.AuxInt 1913 x := v_1.Args[1] 1914 v.reset(OpEq32) 1915 v0 := b.NewValue0(v.Line, OpConst32, t) 1916 v0.AuxInt = int64(int32(c - d)) 1917 v.AddArg(v0) 1918 v.AddArg(x) 1919 return true 1920 } 1921 // match: (Eq32 x (Const32 <t> [c])) 1922 // cond: x.Op != OpConst32 1923 // result: (Eq32 (Const32 <t> [c]) x) 1924 for { 1925 x := v.Args[0] 1926 v_1 := v.Args[1] 1927 if v_1.Op != OpConst32 { 1928 break 1929 } 1930 t := v_1.Type 1931 c := v_1.AuxInt 1932 if !(x.Op != OpConst32) { 1933 break 1934 } 1935 v.reset(OpEq32) 1936 v0 := b.NewValue0(v.Line, OpConst32, t) 1937 v0.AuxInt = c 1938 v.AddArg(v0) 1939 v.AddArg(x) 1940 return true 1941 } 1942 // match: (Eq32 (Const32 [c]) (Const32 [d])) 1943 // cond: 1944 // result: (ConstBool [b2i(c == d)]) 1945 for { 1946 v_0 := v.Args[0] 1947 if v_0.Op != OpConst32 { 1948 break 1949 } 1950 c := v_0.AuxInt 1951 v_1 := v.Args[1] 1952 if v_1.Op != OpConst32 { 1953 break 1954 } 1955 d := v_1.AuxInt 1956 v.reset(OpConstBool) 1957 v.AuxInt = b2i(c == d) 1958 return true 1959 } 1960 return false 1961 } 1962 func rewriteValuegeneric_OpEq64(v *Value, config *Config) bool { 1963 b := v.Block 1964 _ = b 1965 // match: (Eq64 x x) 1966 // cond: 1967 // result: (ConstBool [1]) 1968 for { 1969 x := v.Args[0] 1970 if x != v.Args[1] { 1971 break 1972 } 1973 v.reset(OpConstBool) 1974 v.AuxInt = 1 1975 return true 1976 } 1977 // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 1978 // cond: 1979 // result: (Eq64 (Const64 <t> [c-d]) x) 1980 for { 1981 v_0 := v.Args[0] 1982 if v_0.Op != OpConst64 { 1983 break 1984 } 1985 t := v_0.Type 1986 c := v_0.AuxInt 1987 v_1 := v.Args[1] 1988 if v_1.Op != OpAdd64 { 1989 break 1990 } 1991 v_1_0 := v_1.Args[0] 1992 if v_1_0.Op != OpConst64 { 1993 break 1994 } 1995 if v_1_0.Type != t { 1996 break 1997 } 1998 d := v_1_0.AuxInt 1999 x := v_1.Args[1] 2000 v.reset(OpEq64) 2001 v0 := b.NewValue0(v.Line, OpConst64, t) 2002 v0.AuxInt = c - d 2003 v.AddArg(v0) 2004 v.AddArg(x) 2005 return true 2006 } 2007 // match: (Eq64 x (Const64 <t> [c])) 2008 // cond: x.Op != OpConst64 2009 // result: (Eq64 (Const64 <t> [c]) x) 2010 for { 2011 x := v.Args[0] 2012 v_1 := v.Args[1] 2013 if v_1.Op != OpConst64 { 2014 break 2015 } 2016 t := v_1.Type 2017 c := v_1.AuxInt 2018 if !(x.Op != OpConst64) { 2019 break 2020 } 2021 v.reset(OpEq64) 2022 v0 := b.NewValue0(v.Line, OpConst64, t) 2023 v0.AuxInt = c 2024 v.AddArg(v0) 2025 v.AddArg(x) 2026 return true 2027 } 2028 // match: (Eq64 (Const64 [c]) (Const64 [d])) 2029 // cond: 2030 // result: (ConstBool [b2i(c == d)]) 2031 for { 2032 v_0 := v.Args[0] 2033 if v_0.Op != OpConst64 { 2034 break 2035 } 2036 c := v_0.AuxInt 2037 v_1 := v.Args[1] 2038 if v_1.Op != OpConst64 { 2039 break 2040 } 2041 d := v_1.AuxInt 2042 v.reset(OpConstBool) 2043 v.AuxInt = b2i(c == d) 2044 return true 2045 } 2046 return false 2047 } 2048 func rewriteValuegeneric_OpEq8(v *Value, config *Config) bool { 2049 b := v.Block 2050 _ = b 2051 // match: (Eq8 x x) 2052 // cond: 2053 // result: (ConstBool [1]) 2054 for { 2055 x := v.Args[0] 2056 if x != v.Args[1] { 2057 break 2058 } 2059 v.reset(OpConstBool) 2060 v.AuxInt = 1 2061 return true 2062 } 2063 // match: (Eq8 (ConstBool [c]) (ConstBool [d])) 2064 // cond: 2065 // result: (ConstBool [b2i(c == d)]) 2066 for { 2067 v_0 := v.Args[0] 2068 if v_0.Op != OpConstBool { 2069 break 2070 } 2071 c := v_0.AuxInt 2072 v_1 := v.Args[1] 2073 if v_1.Op != OpConstBool { 2074 break 2075 } 2076 d := v_1.AuxInt 2077 v.reset(OpConstBool) 2078 v.AuxInt = b2i(c == d) 2079 return true 2080 } 2081 // match: (Eq8 (ConstBool [0]) x) 2082 // cond: 2083 // result: (Not x) 2084 for { 2085 v_0 := v.Args[0] 2086 if v_0.Op != OpConstBool { 2087 break 2088 } 2089 if v_0.AuxInt != 0 { 2090 break 2091 } 2092 x := v.Args[1] 2093 v.reset(OpNot) 2094 v.AddArg(x) 2095 return true 2096 } 2097 // match: (Eq8 (ConstBool [1]) x) 2098 // cond: 2099 // result: x 2100 for { 2101 v_0 := v.Args[0] 2102 if v_0.Op != OpConstBool { 2103 break 2104 } 2105 if v_0.AuxInt != 1 { 2106 break 2107 } 2108 x := v.Args[1] 2109 v.reset(OpCopy) 2110 v.Type = x.Type 2111 v.AddArg(x) 2112 return true 2113 } 2114 // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 2115 // cond: 2116 // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x) 2117 for { 2118 v_0 := v.Args[0] 2119 if v_0.Op != OpConst8 { 2120 break 2121 } 2122 t := v_0.Type 2123 c := v_0.AuxInt 2124 v_1 := v.Args[1] 2125 if v_1.Op != OpAdd8 { 2126 break 2127 } 2128 v_1_0 := v_1.Args[0] 2129 if v_1_0.Op != OpConst8 { 2130 break 2131 } 2132 if v_1_0.Type != t { 2133 break 2134 } 2135 d := v_1_0.AuxInt 2136 x := v_1.Args[1] 2137 v.reset(OpEq8) 2138 v0 := b.NewValue0(v.Line, OpConst8, t) 2139 v0.AuxInt = int64(int8(c - d)) 2140 v.AddArg(v0) 2141 v.AddArg(x) 2142 return true 2143 } 2144 // match: (Eq8 x (Const8 <t> [c])) 2145 // cond: x.Op != OpConst8 2146 // result: (Eq8 (Const8 <t> [c]) x) 2147 for { 2148 x := v.Args[0] 2149 v_1 := v.Args[1] 2150 if v_1.Op != OpConst8 { 2151 break 2152 } 2153 t := v_1.Type 2154 c := v_1.AuxInt 2155 if !(x.Op != OpConst8) { 2156 break 2157 } 2158 v.reset(OpEq8) 2159 v0 := b.NewValue0(v.Line, OpConst8, t) 2160 v0.AuxInt = c 2161 v.AddArg(v0) 2162 v.AddArg(x) 2163 return true 2164 } 2165 // match: (Eq8 x (ConstBool <t> [c])) 2166 // cond: x.Op != OpConstBool 2167 // result: (Eq8 (ConstBool <t> [c]) x) 2168 for { 2169 x := v.Args[0] 2170 v_1 := v.Args[1] 2171 if v_1.Op != OpConstBool { 2172 break 2173 } 2174 t := v_1.Type 2175 c := v_1.AuxInt 2176 if !(x.Op != OpConstBool) { 2177 break 2178 } 2179 v.reset(OpEq8) 2180 v0 := b.NewValue0(v.Line, OpConstBool, t) 2181 v0.AuxInt = c 2182 v.AddArg(v0) 2183 v.AddArg(x) 2184 return true 2185 } 2186 // match: (Eq8 (Const8 [c]) (Const8 [d])) 2187 // cond: 2188 // result: (ConstBool [b2i(c == d)]) 2189 for { 2190 v_0 := v.Args[0] 2191 if v_0.Op != OpConst8 { 2192 break 2193 } 2194 c := v_0.AuxInt 2195 v_1 := v.Args[1] 2196 if v_1.Op != OpConst8 { 2197 break 2198 } 2199 d := v_1.AuxInt 2200 v.reset(OpConstBool) 2201 v.AuxInt = b2i(c == d) 2202 return true 2203 } 2204 return false 2205 } 2206 func rewriteValuegeneric_OpEqInter(v *Value, config *Config) bool { 2207 b := v.Block 2208 _ = b 2209 // match: (EqInter x y) 2210 // cond: 2211 // result: (EqPtr (ITab x) (ITab y)) 2212 for { 2213 x := v.Args[0] 2214 y := v.Args[1] 2215 v.reset(OpEqPtr) 2216 v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 2217 v0.AddArg(x) 2218 v.AddArg(v0) 2219 v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 2220 v1.AddArg(y) 2221 v.AddArg(v1) 2222 return true 2223 } 2224 return false 2225 } 2226 func rewriteValuegeneric_OpEqPtr(v *Value, config *Config) bool { 2227 b := v.Block 2228 _ = b 2229 // match: (EqPtr p (ConstNil)) 2230 // cond: 2231 // result: (Not (IsNonNil p)) 2232 for { 2233 p := v.Args[0] 2234 v_1 := v.Args[1] 2235 if v_1.Op != OpConstNil { 2236 break 2237 } 2238 v.reset(OpNot) 2239 v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) 2240 v0.AddArg(p) 2241 v.AddArg(v0) 2242 return true 2243 } 2244 // match: (EqPtr (ConstNil) p) 2245 // cond: 2246 // result: (Not (IsNonNil p)) 2247 for { 2248 v_0 := v.Args[0] 2249 if v_0.Op != OpConstNil { 2250 break 2251 } 2252 p := v.Args[1] 2253 v.reset(OpNot) 2254 v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) 2255 v0.AddArg(p) 2256 v.AddArg(v0) 2257 return true 2258 } 2259 return false 2260 } 2261 func rewriteValuegeneric_OpEqSlice(v *Value, config *Config) bool { 2262 b := v.Block 2263 _ = b 2264 // match: (EqSlice x y) 2265 // cond: 2266 // result: (EqPtr (SlicePtr x) (SlicePtr y)) 2267 for { 2268 x := v.Args[0] 2269 y := v.Args[1] 2270 v.reset(OpEqPtr) 2271 v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 2272 v0.AddArg(x) 2273 v.AddArg(v0) 2274 v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 2275 v1.AddArg(y) 2276 v.AddArg(v1) 2277 return true 2278 } 2279 return false 2280 } 2281 func rewriteValuegeneric_OpGeq16(v *Value, config *Config) bool { 2282 b := v.Block 2283 _ = b 2284 // match: (Geq16 (Const16 [c]) (Const16 [d])) 2285 // cond: 2286 // result: (ConstBool [b2i(c >= d)]) 2287 for { 2288 v_0 := v.Args[0] 2289 if v_0.Op != OpConst16 { 2290 break 2291 } 2292 c := v_0.AuxInt 2293 v_1 := v.Args[1] 2294 if v_1.Op != OpConst16 { 2295 break 2296 } 2297 d := v_1.AuxInt 2298 v.reset(OpConstBool) 2299 v.AuxInt = b2i(c >= d) 2300 return true 2301 } 2302 return false 2303 } 2304 func rewriteValuegeneric_OpGeq16U(v *Value, config *Config) bool { 2305 b := v.Block 2306 _ = b 2307 // match: (Geq16U (Const16 [c]) (Const16 [d])) 2308 // cond: 2309 // result: (ConstBool [b2i(uint16(c) >= uint16(d))]) 2310 for { 2311 v_0 := v.Args[0] 2312 if v_0.Op != OpConst16 { 2313 break 2314 } 2315 c := v_0.AuxInt 2316 v_1 := v.Args[1] 2317 if v_1.Op != OpConst16 { 2318 break 2319 } 2320 d := v_1.AuxInt 2321 v.reset(OpConstBool) 2322 v.AuxInt = b2i(uint16(c) >= uint16(d)) 2323 return true 2324 } 2325 return false 2326 } 2327 func rewriteValuegeneric_OpGeq32(v *Value, config *Config) bool { 2328 b := v.Block 2329 _ = b 2330 // match: (Geq32 (Const32 [c]) (Const32 [d])) 2331 // cond: 2332 // result: (ConstBool [b2i(c >= d)]) 2333 for { 2334 v_0 := v.Args[0] 2335 if v_0.Op != OpConst32 { 2336 break 2337 } 2338 c := v_0.AuxInt 2339 v_1 := v.Args[1] 2340 if v_1.Op != OpConst32 { 2341 break 2342 } 2343 d := v_1.AuxInt 2344 v.reset(OpConstBool) 2345 v.AuxInt = b2i(c >= d) 2346 return true 2347 } 2348 return false 2349 } 2350 func rewriteValuegeneric_OpGeq32U(v *Value, config *Config) bool { 2351 b := v.Block 2352 _ = b 2353 // match: (Geq32U (Const32 [c]) (Const32 [d])) 2354 // cond: 2355 // result: (ConstBool [b2i(uint32(c) >= uint32(d))]) 2356 for { 2357 v_0 := v.Args[0] 2358 if v_0.Op != OpConst32 { 2359 break 2360 } 2361 c := v_0.AuxInt 2362 v_1 := v.Args[1] 2363 if v_1.Op != OpConst32 { 2364 break 2365 } 2366 d := v_1.AuxInt 2367 v.reset(OpConstBool) 2368 v.AuxInt = b2i(uint32(c) >= uint32(d)) 2369 return true 2370 } 2371 return false 2372 } 2373 func rewriteValuegeneric_OpGeq64(v *Value, config *Config) bool { 2374 b := v.Block 2375 _ = b 2376 // match: (Geq64 (Const64 [c]) (Const64 [d])) 2377 // cond: 2378 // result: (ConstBool [b2i(c >= d)]) 2379 for { 2380 v_0 := v.Args[0] 2381 if v_0.Op != OpConst64 { 2382 break 2383 } 2384 c := v_0.AuxInt 2385 v_1 := v.Args[1] 2386 if v_1.Op != OpConst64 { 2387 break 2388 } 2389 d := v_1.AuxInt 2390 v.reset(OpConstBool) 2391 v.AuxInt = b2i(c >= d) 2392 return true 2393 } 2394 return false 2395 } 2396 func rewriteValuegeneric_OpGeq64U(v *Value, config *Config) bool { 2397 b := v.Block 2398 _ = b 2399 // match: (Geq64U (Const64 [c]) (Const64 [d])) 2400 // cond: 2401 // result: (ConstBool [b2i(uint64(c) >= uint64(d))]) 2402 for { 2403 v_0 := v.Args[0] 2404 if v_0.Op != OpConst64 { 2405 break 2406 } 2407 c := v_0.AuxInt 2408 v_1 := v.Args[1] 2409 if v_1.Op != OpConst64 { 2410 break 2411 } 2412 d := v_1.AuxInt 2413 v.reset(OpConstBool) 2414 v.AuxInt = b2i(uint64(c) >= uint64(d)) 2415 return true 2416 } 2417 return false 2418 } 2419 func rewriteValuegeneric_OpGeq8(v *Value, config *Config) bool { 2420 b := v.Block 2421 _ = b 2422 // match: (Geq8 (Const8 [c]) (Const8 [d])) 2423 // cond: 2424 // result: (ConstBool [b2i(c >= d)]) 2425 for { 2426 v_0 := v.Args[0] 2427 if v_0.Op != OpConst8 { 2428 break 2429 } 2430 c := v_0.AuxInt 2431 v_1 := v.Args[1] 2432 if v_1.Op != OpConst8 { 2433 break 2434 } 2435 d := v_1.AuxInt 2436 v.reset(OpConstBool) 2437 v.AuxInt = b2i(c >= d) 2438 return true 2439 } 2440 return false 2441 } 2442 func rewriteValuegeneric_OpGeq8U(v *Value, config *Config) bool { 2443 b := v.Block 2444 _ = b 2445 // match: (Geq8U (Const8 [c]) (Const8 [d])) 2446 // cond: 2447 // result: (ConstBool [b2i(uint8(c) >= uint8(d))]) 2448 for { 2449 v_0 := v.Args[0] 2450 if v_0.Op != OpConst8 { 2451 break 2452 } 2453 c := v_0.AuxInt 2454 v_1 := v.Args[1] 2455 if v_1.Op != OpConst8 { 2456 break 2457 } 2458 d := v_1.AuxInt 2459 v.reset(OpConstBool) 2460 v.AuxInt = b2i(uint8(c) >= uint8(d)) 2461 return true 2462 } 2463 return false 2464 } 2465 func rewriteValuegeneric_OpGreater16(v *Value, config *Config) bool { 2466 b := v.Block 2467 _ = b 2468 // match: (Greater16 (Const16 [c]) (Const16 [d])) 2469 // cond: 2470 // result: (ConstBool [b2i(c > d)]) 2471 for { 2472 v_0 := v.Args[0] 2473 if v_0.Op != OpConst16 { 2474 break 2475 } 2476 c := v_0.AuxInt 2477 v_1 := v.Args[1] 2478 if v_1.Op != OpConst16 { 2479 break 2480 } 2481 d := v_1.AuxInt 2482 v.reset(OpConstBool) 2483 v.AuxInt = b2i(c > d) 2484 return true 2485 } 2486 return false 2487 } 2488 func rewriteValuegeneric_OpGreater16U(v *Value, config *Config) bool { 2489 b := v.Block 2490 _ = b 2491 // match: (Greater16U (Const16 [c]) (Const16 [d])) 2492 // cond: 2493 // result: (ConstBool [b2i(uint16(c) > uint16(d))]) 2494 for { 2495 v_0 := v.Args[0] 2496 if v_0.Op != OpConst16 { 2497 break 2498 } 2499 c := v_0.AuxInt 2500 v_1 := v.Args[1] 2501 if v_1.Op != OpConst16 { 2502 break 2503 } 2504 d := v_1.AuxInt 2505 v.reset(OpConstBool) 2506 v.AuxInt = b2i(uint16(c) > uint16(d)) 2507 return true 2508 } 2509 return false 2510 } 2511 func rewriteValuegeneric_OpGreater32(v *Value, config *Config) bool { 2512 b := v.Block 2513 _ = b 2514 // match: (Greater32 (Const32 [c]) (Const32 [d])) 2515 // cond: 2516 // result: (ConstBool [b2i(c > d)]) 2517 for { 2518 v_0 := v.Args[0] 2519 if v_0.Op != OpConst32 { 2520 break 2521 } 2522 c := v_0.AuxInt 2523 v_1 := v.Args[1] 2524 if v_1.Op != OpConst32 { 2525 break 2526 } 2527 d := v_1.AuxInt 2528 v.reset(OpConstBool) 2529 v.AuxInt = b2i(c > d) 2530 return true 2531 } 2532 return false 2533 } 2534 func rewriteValuegeneric_OpGreater32U(v *Value, config *Config) bool { 2535 b := v.Block 2536 _ = b 2537 // match: (Greater32U (Const32 [c]) (Const32 [d])) 2538 // cond: 2539 // result: (ConstBool [b2i(uint32(c) > uint32(d))]) 2540 for { 2541 v_0 := v.Args[0] 2542 if v_0.Op != OpConst32 { 2543 break 2544 } 2545 c := v_0.AuxInt 2546 v_1 := v.Args[1] 2547 if v_1.Op != OpConst32 { 2548 break 2549 } 2550 d := v_1.AuxInt 2551 v.reset(OpConstBool) 2552 v.AuxInt = b2i(uint32(c) > uint32(d)) 2553 return true 2554 } 2555 return false 2556 } 2557 func rewriteValuegeneric_OpGreater64(v *Value, config *Config) bool { 2558 b := v.Block 2559 _ = b 2560 // match: (Greater64 (Const64 [c]) (Const64 [d])) 2561 // cond: 2562 // result: (ConstBool [b2i(c > d)]) 2563 for { 2564 v_0 := v.Args[0] 2565 if v_0.Op != OpConst64 { 2566 break 2567 } 2568 c := v_0.AuxInt 2569 v_1 := v.Args[1] 2570 if v_1.Op != OpConst64 { 2571 break 2572 } 2573 d := v_1.AuxInt 2574 v.reset(OpConstBool) 2575 v.AuxInt = b2i(c > d) 2576 return true 2577 } 2578 return false 2579 } 2580 func rewriteValuegeneric_OpGreater64U(v *Value, config *Config) bool { 2581 b := v.Block 2582 _ = b 2583 // match: (Greater64U (Const64 [c]) (Const64 [d])) 2584 // cond: 2585 // result: (ConstBool [b2i(uint64(c) > uint64(d))]) 2586 for { 2587 v_0 := v.Args[0] 2588 if v_0.Op != OpConst64 { 2589 break 2590 } 2591 c := v_0.AuxInt 2592 v_1 := v.Args[1] 2593 if v_1.Op != OpConst64 { 2594 break 2595 } 2596 d := v_1.AuxInt 2597 v.reset(OpConstBool) 2598 v.AuxInt = b2i(uint64(c) > uint64(d)) 2599 return true 2600 } 2601 return false 2602 } 2603 func rewriteValuegeneric_OpGreater8(v *Value, config *Config) bool { 2604 b := v.Block 2605 _ = b 2606 // match: (Greater8 (Const8 [c]) (Const8 [d])) 2607 // cond: 2608 // result: (ConstBool [b2i(c > d)]) 2609 for { 2610 v_0 := v.Args[0] 2611 if v_0.Op != OpConst8 { 2612 break 2613 } 2614 c := v_0.AuxInt 2615 v_1 := v.Args[1] 2616 if v_1.Op != OpConst8 { 2617 break 2618 } 2619 d := v_1.AuxInt 2620 v.reset(OpConstBool) 2621 v.AuxInt = b2i(c > d) 2622 return true 2623 } 2624 return false 2625 } 2626 func rewriteValuegeneric_OpGreater8U(v *Value, config *Config) bool { 2627 b := v.Block 2628 _ = b 2629 // match: (Greater8U (Const8 [c]) (Const8 [d])) 2630 // cond: 2631 // result: (ConstBool [b2i(uint8(c) > uint8(d))]) 2632 for { 2633 v_0 := v.Args[0] 2634 if v_0.Op != OpConst8 { 2635 break 2636 } 2637 c := v_0.AuxInt 2638 v_1 := v.Args[1] 2639 if v_1.Op != OpConst8 { 2640 break 2641 } 2642 d := v_1.AuxInt 2643 v.reset(OpConstBool) 2644 v.AuxInt = b2i(uint8(c) > uint8(d)) 2645 return true 2646 } 2647 return false 2648 } 2649 func rewriteValuegeneric_OpIsInBounds(v *Value, config *Config) bool { 2650 b := v.Block 2651 _ = b 2652 // match: (IsInBounds (ZeroExt8to32 _) (Const32 [c])) 2653 // cond: (1 << 8) <= c 2654 // result: (ConstBool [1]) 2655 for { 2656 v_0 := v.Args[0] 2657 if v_0.Op != OpZeroExt8to32 { 2658 break 2659 } 2660 v_1 := v.Args[1] 2661 if v_1.Op != OpConst32 { 2662 break 2663 } 2664 c := v_1.AuxInt 2665 if !((1 << 8) <= c) { 2666 break 2667 } 2668 v.reset(OpConstBool) 2669 v.AuxInt = 1 2670 return true 2671 } 2672 // match: (IsInBounds (ZeroExt8to64 _) (Const64 [c])) 2673 // cond: (1 << 8) <= c 2674 // result: (ConstBool [1]) 2675 for { 2676 v_0 := v.Args[0] 2677 if v_0.Op != OpZeroExt8to64 { 2678 break 2679 } 2680 v_1 := v.Args[1] 2681 if v_1.Op != OpConst64 { 2682 break 2683 } 2684 c := v_1.AuxInt 2685 if !((1 << 8) <= c) { 2686 break 2687 } 2688 v.reset(OpConstBool) 2689 v.AuxInt = 1 2690 return true 2691 } 2692 // match: (IsInBounds (ZeroExt16to32 _) (Const32 [c])) 2693 // cond: (1 << 16) <= c 2694 // result: (ConstBool [1]) 2695 for { 2696 v_0 := v.Args[0] 2697 if v_0.Op != OpZeroExt16to32 { 2698 break 2699 } 2700 v_1 := v.Args[1] 2701 if v_1.Op != OpConst32 { 2702 break 2703 } 2704 c := v_1.AuxInt 2705 if !((1 << 16) <= c) { 2706 break 2707 } 2708 v.reset(OpConstBool) 2709 v.AuxInt = 1 2710 return true 2711 } 2712 // match: (IsInBounds (ZeroExt16to64 _) (Const64 [c])) 2713 // cond: (1 << 16) <= c 2714 // result: (ConstBool [1]) 2715 for { 2716 v_0 := v.Args[0] 2717 if v_0.Op != OpZeroExt16to64 { 2718 break 2719 } 2720 v_1 := v.Args[1] 2721 if v_1.Op != OpConst64 { 2722 break 2723 } 2724 c := v_1.AuxInt 2725 if !((1 << 16) <= c) { 2726 break 2727 } 2728 v.reset(OpConstBool) 2729 v.AuxInt = 1 2730 return true 2731 } 2732 // match: (IsInBounds x x) 2733 // cond: 2734 // result: (ConstBool [0]) 2735 for { 2736 x := v.Args[0] 2737 if x != v.Args[1] { 2738 break 2739 } 2740 v.reset(OpConstBool) 2741 v.AuxInt = 0 2742 return true 2743 } 2744 // match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d])) 2745 // cond: 0 <= c && c < d 2746 // result: (ConstBool [1]) 2747 for { 2748 v_0 := v.Args[0] 2749 if v_0.Op != OpAnd32 { 2750 break 2751 } 2752 v_0_0 := v_0.Args[0] 2753 if v_0_0.Op != OpConst32 { 2754 break 2755 } 2756 c := v_0_0.AuxInt 2757 v_1 := v.Args[1] 2758 if v_1.Op != OpConst32 { 2759 break 2760 } 2761 d := v_1.AuxInt 2762 if !(0 <= c && c < d) { 2763 break 2764 } 2765 v.reset(OpConstBool) 2766 v.AuxInt = 1 2767 return true 2768 } 2769 // match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d])) 2770 // cond: 0 <= c && c < d 2771 // result: (ConstBool [1]) 2772 for { 2773 v_0 := v.Args[0] 2774 if v_0.Op != OpAnd64 { 2775 break 2776 } 2777 v_0_0 := v_0.Args[0] 2778 if v_0_0.Op != OpConst64 { 2779 break 2780 } 2781 c := v_0_0.AuxInt 2782 v_1 := v.Args[1] 2783 if v_1.Op != OpConst64 { 2784 break 2785 } 2786 d := v_1.AuxInt 2787 if !(0 <= c && c < d) { 2788 break 2789 } 2790 v.reset(OpConstBool) 2791 v.AuxInt = 1 2792 return true 2793 } 2794 // match: (IsInBounds (Const32 [c]) (Const32 [d])) 2795 // cond: 2796 // result: (ConstBool [b2i(0 <= c && c < d)]) 2797 for { 2798 v_0 := v.Args[0] 2799 if v_0.Op != OpConst32 { 2800 break 2801 } 2802 c := v_0.AuxInt 2803 v_1 := v.Args[1] 2804 if v_1.Op != OpConst32 { 2805 break 2806 } 2807 d := v_1.AuxInt 2808 v.reset(OpConstBool) 2809 v.AuxInt = b2i(0 <= c && c < d) 2810 return true 2811 } 2812 // match: (IsInBounds (Const64 [c]) (Const64 [d])) 2813 // cond: 2814 // result: (ConstBool [b2i(0 <= c && c < d)]) 2815 for { 2816 v_0 := v.Args[0] 2817 if v_0.Op != OpConst64 { 2818 break 2819 } 2820 c := v_0.AuxInt 2821 v_1 := v.Args[1] 2822 if v_1.Op != OpConst64 { 2823 break 2824 } 2825 d := v_1.AuxInt 2826 v.reset(OpConstBool) 2827 v.AuxInt = b2i(0 <= c && c < d) 2828 return true 2829 } 2830 // match: (IsInBounds (Mod32u _ y) y) 2831 // cond: 2832 // result: (ConstBool [1]) 2833 for { 2834 v_0 := v.Args[0] 2835 if v_0.Op != OpMod32u { 2836 break 2837 } 2838 y := v_0.Args[1] 2839 if y != v.Args[1] { 2840 break 2841 } 2842 v.reset(OpConstBool) 2843 v.AuxInt = 1 2844 return true 2845 } 2846 // match: (IsInBounds (Mod64u _ y) y) 2847 // cond: 2848 // result: (ConstBool [1]) 2849 for { 2850 v_0 := v.Args[0] 2851 if v_0.Op != OpMod64u { 2852 break 2853 } 2854 y := v_0.Args[1] 2855 if y != v.Args[1] { 2856 break 2857 } 2858 v.reset(OpConstBool) 2859 v.AuxInt = 1 2860 return true 2861 } 2862 return false 2863 } 2864 func rewriteValuegeneric_OpIsSliceInBounds(v *Value, config *Config) bool { 2865 b := v.Block 2866 _ = b 2867 // match: (IsSliceInBounds x x) 2868 // cond: 2869 // result: (ConstBool [1]) 2870 for { 2871 x := v.Args[0] 2872 if x != v.Args[1] { 2873 break 2874 } 2875 v.reset(OpConstBool) 2876 v.AuxInt = 1 2877 return true 2878 } 2879 // match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d])) 2880 // cond: 0 <= c && c <= d 2881 // result: (ConstBool [1]) 2882 for { 2883 v_0 := v.Args[0] 2884 if v_0.Op != OpAnd32 { 2885 break 2886 } 2887 v_0_0 := v_0.Args[0] 2888 if v_0_0.Op != OpConst32 { 2889 break 2890 } 2891 c := v_0_0.AuxInt 2892 v_1 := v.Args[1] 2893 if v_1.Op != OpConst32 { 2894 break 2895 } 2896 d := v_1.AuxInt 2897 if !(0 <= c && c <= d) { 2898 break 2899 } 2900 v.reset(OpConstBool) 2901 v.AuxInt = 1 2902 return true 2903 } 2904 // match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d])) 2905 // cond: 0 <= c && c <= d 2906 // result: (ConstBool [1]) 2907 for { 2908 v_0 := v.Args[0] 2909 if v_0.Op != OpAnd64 { 2910 break 2911 } 2912 v_0_0 := v_0.Args[0] 2913 if v_0_0.Op != OpConst64 { 2914 break 2915 } 2916 c := v_0_0.AuxInt 2917 v_1 := v.Args[1] 2918 if v_1.Op != OpConst64 { 2919 break 2920 } 2921 d := v_1.AuxInt 2922 if !(0 <= c && c <= d) { 2923 break 2924 } 2925 v.reset(OpConstBool) 2926 v.AuxInt = 1 2927 return true 2928 } 2929 // match: (IsSliceInBounds (Const32 [0]) _) 2930 // cond: 2931 // result: (ConstBool [1]) 2932 for { 2933 v_0 := v.Args[0] 2934 if v_0.Op != OpConst32 { 2935 break 2936 } 2937 if v_0.AuxInt != 0 { 2938 break 2939 } 2940 v.reset(OpConstBool) 2941 v.AuxInt = 1 2942 return true 2943 } 2944 // match: (IsSliceInBounds (Const64 [0]) _) 2945 // cond: 2946 // result: (ConstBool [1]) 2947 for { 2948 v_0 := v.Args[0] 2949 if v_0.Op != OpConst64 { 2950 break 2951 } 2952 if v_0.AuxInt != 0 { 2953 break 2954 } 2955 v.reset(OpConstBool) 2956 v.AuxInt = 1 2957 return true 2958 } 2959 // match: (IsSliceInBounds (Const32 [c]) (Const32 [d])) 2960 // cond: 2961 // result: (ConstBool [b2i(0 <= c && c <= d)]) 2962 for { 2963 v_0 := v.Args[0] 2964 if v_0.Op != OpConst32 { 2965 break 2966 } 2967 c := v_0.AuxInt 2968 v_1 := v.Args[1] 2969 if v_1.Op != OpConst32 { 2970 break 2971 } 2972 d := v_1.AuxInt 2973 v.reset(OpConstBool) 2974 v.AuxInt = b2i(0 <= c && c <= d) 2975 return true 2976 } 2977 // match: (IsSliceInBounds (Const64 [c]) (Const64 [d])) 2978 // cond: 2979 // result: (ConstBool [b2i(0 <= c && c <= d)]) 2980 for { 2981 v_0 := v.Args[0] 2982 if v_0.Op != OpConst64 { 2983 break 2984 } 2985 c := v_0.AuxInt 2986 v_1 := v.Args[1] 2987 if v_1.Op != OpConst64 { 2988 break 2989 } 2990 d := v_1.AuxInt 2991 v.reset(OpConstBool) 2992 v.AuxInt = b2i(0 <= c && c <= d) 2993 return true 2994 } 2995 // match: (IsSliceInBounds (SliceLen x) (SliceCap x)) 2996 // cond: 2997 // result: (ConstBool [1]) 2998 for { 2999 v_0 := v.Args[0] 3000 if v_0.Op != OpSliceLen { 3001 break 3002 } 3003 x := v_0.Args[0] 3004 v_1 := v.Args[1] 3005 if v_1.Op != OpSliceCap { 3006 break 3007 } 3008 if x != v_1.Args[0] { 3009 break 3010 } 3011 v.reset(OpConstBool) 3012 v.AuxInt = 1 3013 return true 3014 } 3015 return false 3016 } 3017 func rewriteValuegeneric_OpLeq16(v *Value, config *Config) bool { 3018 b := v.Block 3019 _ = b 3020 // match: (Leq16 (Const16 [c]) (Const16 [d])) 3021 // cond: 3022 // result: (ConstBool [b2i(c <= d)]) 3023 for { 3024 v_0 := v.Args[0] 3025 if v_0.Op != OpConst16 { 3026 break 3027 } 3028 c := v_0.AuxInt 3029 v_1 := v.Args[1] 3030 if v_1.Op != OpConst16 { 3031 break 3032 } 3033 d := v_1.AuxInt 3034 v.reset(OpConstBool) 3035 v.AuxInt = b2i(c <= d) 3036 return true 3037 } 3038 return false 3039 } 3040 func rewriteValuegeneric_OpLeq16U(v *Value, config *Config) bool { 3041 b := v.Block 3042 _ = b 3043 // match: (Leq16U (Const16 [c]) (Const16 [d])) 3044 // cond: 3045 // result: (ConstBool [b2i(uint16(c) <= uint16(d))]) 3046 for { 3047 v_0 := v.Args[0] 3048 if v_0.Op != OpConst16 { 3049 break 3050 } 3051 c := v_0.AuxInt 3052 v_1 := v.Args[1] 3053 if v_1.Op != OpConst16 { 3054 break 3055 } 3056 d := v_1.AuxInt 3057 v.reset(OpConstBool) 3058 v.AuxInt = b2i(uint16(c) <= uint16(d)) 3059 return true 3060 } 3061 return false 3062 } 3063 func rewriteValuegeneric_OpLeq32(v *Value, config *Config) bool { 3064 b := v.Block 3065 _ = b 3066 // match: (Leq32 (Const32 [c]) (Const32 [d])) 3067 // cond: 3068 // result: (ConstBool [b2i(c <= d)]) 3069 for { 3070 v_0 := v.Args[0] 3071 if v_0.Op != OpConst32 { 3072 break 3073 } 3074 c := v_0.AuxInt 3075 v_1 := v.Args[1] 3076 if v_1.Op != OpConst32 { 3077 break 3078 } 3079 d := v_1.AuxInt 3080 v.reset(OpConstBool) 3081 v.AuxInt = b2i(c <= d) 3082 return true 3083 } 3084 return false 3085 } 3086 func rewriteValuegeneric_OpLeq32U(v *Value, config *Config) bool { 3087 b := v.Block 3088 _ = b 3089 // match: (Leq32U (Const32 [c]) (Const32 [d])) 3090 // cond: 3091 // result: (ConstBool [b2i(uint32(c) <= uint32(d))]) 3092 for { 3093 v_0 := v.Args[0] 3094 if v_0.Op != OpConst32 { 3095 break 3096 } 3097 c := v_0.AuxInt 3098 v_1 := v.Args[1] 3099 if v_1.Op != OpConst32 { 3100 break 3101 } 3102 d := v_1.AuxInt 3103 v.reset(OpConstBool) 3104 v.AuxInt = b2i(uint32(c) <= uint32(d)) 3105 return true 3106 } 3107 return false 3108 } 3109 func rewriteValuegeneric_OpLeq64(v *Value, config *Config) bool { 3110 b := v.Block 3111 _ = b 3112 // match: (Leq64 (Const64 [c]) (Const64 [d])) 3113 // cond: 3114 // result: (ConstBool [b2i(c <= d)]) 3115 for { 3116 v_0 := v.Args[0] 3117 if v_0.Op != OpConst64 { 3118 break 3119 } 3120 c := v_0.AuxInt 3121 v_1 := v.Args[1] 3122 if v_1.Op != OpConst64 { 3123 break 3124 } 3125 d := v_1.AuxInt 3126 v.reset(OpConstBool) 3127 v.AuxInt = b2i(c <= d) 3128 return true 3129 } 3130 return false 3131 } 3132 func rewriteValuegeneric_OpLeq64U(v *Value, config *Config) bool { 3133 b := v.Block 3134 _ = b 3135 // match: (Leq64U (Const64 [c]) (Const64 [d])) 3136 // cond: 3137 // result: (ConstBool [b2i(uint64(c) <= uint64(d))]) 3138 for { 3139 v_0 := v.Args[0] 3140 if v_0.Op != OpConst64 { 3141 break 3142 } 3143 c := v_0.AuxInt 3144 v_1 := v.Args[1] 3145 if v_1.Op != OpConst64 { 3146 break 3147 } 3148 d := v_1.AuxInt 3149 v.reset(OpConstBool) 3150 v.AuxInt = b2i(uint64(c) <= uint64(d)) 3151 return true 3152 } 3153 return false 3154 } 3155 func rewriteValuegeneric_OpLeq8(v *Value, config *Config) bool { 3156 b := v.Block 3157 _ = b 3158 // match: (Leq8 (Const8 [c]) (Const8 [d])) 3159 // cond: 3160 // result: (ConstBool [b2i(c <= d)]) 3161 for { 3162 v_0 := v.Args[0] 3163 if v_0.Op != OpConst8 { 3164 break 3165 } 3166 c := v_0.AuxInt 3167 v_1 := v.Args[1] 3168 if v_1.Op != OpConst8 { 3169 break 3170 } 3171 d := v_1.AuxInt 3172 v.reset(OpConstBool) 3173 v.AuxInt = b2i(c <= d) 3174 return true 3175 } 3176 return false 3177 } 3178 func rewriteValuegeneric_OpLeq8U(v *Value, config *Config) bool { 3179 b := v.Block 3180 _ = b 3181 // match: (Leq8U (Const8 [c]) (Const8 [d])) 3182 // cond: 3183 // result: (ConstBool [b2i(uint8(c) <= uint8(d))]) 3184 for { 3185 v_0 := v.Args[0] 3186 if v_0.Op != OpConst8 { 3187 break 3188 } 3189 c := v_0.AuxInt 3190 v_1 := v.Args[1] 3191 if v_1.Op != OpConst8 { 3192 break 3193 } 3194 d := v_1.AuxInt 3195 v.reset(OpConstBool) 3196 v.AuxInt = b2i(uint8(c) <= uint8(d)) 3197 return true 3198 } 3199 return false 3200 } 3201 func rewriteValuegeneric_OpLess16(v *Value, config *Config) bool { 3202 b := v.Block 3203 _ = b 3204 // match: (Less16 (Const16 [c]) (Const16 [d])) 3205 // cond: 3206 // result: (ConstBool [b2i(c < d)]) 3207 for { 3208 v_0 := v.Args[0] 3209 if v_0.Op != OpConst16 { 3210 break 3211 } 3212 c := v_0.AuxInt 3213 v_1 := v.Args[1] 3214 if v_1.Op != OpConst16 { 3215 break 3216 } 3217 d := v_1.AuxInt 3218 v.reset(OpConstBool) 3219 v.AuxInt = b2i(c < d) 3220 return true 3221 } 3222 return false 3223 } 3224 func rewriteValuegeneric_OpLess16U(v *Value, config *Config) bool { 3225 b := v.Block 3226 _ = b 3227 // match: (Less16U (Const16 [c]) (Const16 [d])) 3228 // cond: 3229 // result: (ConstBool [b2i(uint16(c) < uint16(d))]) 3230 for { 3231 v_0 := v.Args[0] 3232 if v_0.Op != OpConst16 { 3233 break 3234 } 3235 c := v_0.AuxInt 3236 v_1 := v.Args[1] 3237 if v_1.Op != OpConst16 { 3238 break 3239 } 3240 d := v_1.AuxInt 3241 v.reset(OpConstBool) 3242 v.AuxInt = b2i(uint16(c) < uint16(d)) 3243 return true 3244 } 3245 return false 3246 } 3247 func rewriteValuegeneric_OpLess32(v *Value, config *Config) bool { 3248 b := v.Block 3249 _ = b 3250 // match: (Less32 (Const32 [c]) (Const32 [d])) 3251 // cond: 3252 // result: (ConstBool [b2i(c < d)]) 3253 for { 3254 v_0 := v.Args[0] 3255 if v_0.Op != OpConst32 { 3256 break 3257 } 3258 c := v_0.AuxInt 3259 v_1 := v.Args[1] 3260 if v_1.Op != OpConst32 { 3261 break 3262 } 3263 d := v_1.AuxInt 3264 v.reset(OpConstBool) 3265 v.AuxInt = b2i(c < d) 3266 return true 3267 } 3268 return false 3269 } 3270 func rewriteValuegeneric_OpLess32U(v *Value, config *Config) bool { 3271 b := v.Block 3272 _ = b 3273 // match: (Less32U (Const32 [c]) (Const32 [d])) 3274 // cond: 3275 // result: (ConstBool [b2i(uint32(c) < uint32(d))]) 3276 for { 3277 v_0 := v.Args[0] 3278 if v_0.Op != OpConst32 { 3279 break 3280 } 3281 c := v_0.AuxInt 3282 v_1 := v.Args[1] 3283 if v_1.Op != OpConst32 { 3284 break 3285 } 3286 d := v_1.AuxInt 3287 v.reset(OpConstBool) 3288 v.AuxInt = b2i(uint32(c) < uint32(d)) 3289 return true 3290 } 3291 return false 3292 } 3293 func rewriteValuegeneric_OpLess64(v *Value, config *Config) bool { 3294 b := v.Block 3295 _ = b 3296 // match: (Less64 (Const64 [c]) (Const64 [d])) 3297 // cond: 3298 // result: (ConstBool [b2i(c < d)]) 3299 for { 3300 v_0 := v.Args[0] 3301 if v_0.Op != OpConst64 { 3302 break 3303 } 3304 c := v_0.AuxInt 3305 v_1 := v.Args[1] 3306 if v_1.Op != OpConst64 { 3307 break 3308 } 3309 d := v_1.AuxInt 3310 v.reset(OpConstBool) 3311 v.AuxInt = b2i(c < d) 3312 return true 3313 } 3314 return false 3315 } 3316 func rewriteValuegeneric_OpLess64U(v *Value, config *Config) bool { 3317 b := v.Block 3318 _ = b 3319 // match: (Less64U (Const64 [c]) (Const64 [d])) 3320 // cond: 3321 // result: (ConstBool [b2i(uint64(c) < uint64(d))]) 3322 for { 3323 v_0 := v.Args[0] 3324 if v_0.Op != OpConst64 { 3325 break 3326 } 3327 c := v_0.AuxInt 3328 v_1 := v.Args[1] 3329 if v_1.Op != OpConst64 { 3330 break 3331 } 3332 d := v_1.AuxInt 3333 v.reset(OpConstBool) 3334 v.AuxInt = b2i(uint64(c) < uint64(d)) 3335 return true 3336 } 3337 return false 3338 } 3339 func rewriteValuegeneric_OpLess8(v *Value, config *Config) bool { 3340 b := v.Block 3341 _ = b 3342 // match: (Less8 (Const8 [c]) (Const8 [d])) 3343 // cond: 3344 // result: (ConstBool [b2i(c < d)]) 3345 for { 3346 v_0 := v.Args[0] 3347 if v_0.Op != OpConst8 { 3348 break 3349 } 3350 c := v_0.AuxInt 3351 v_1 := v.Args[1] 3352 if v_1.Op != OpConst8 { 3353 break 3354 } 3355 d := v_1.AuxInt 3356 v.reset(OpConstBool) 3357 v.AuxInt = b2i(c < d) 3358 return true 3359 } 3360 return false 3361 } 3362 func rewriteValuegeneric_OpLess8U(v *Value, config *Config) bool { 3363 b := v.Block 3364 _ = b 3365 // match: (Less8U (Const8 [c]) (Const8 [d])) 3366 // cond: 3367 // result: (ConstBool [b2i(uint8(c) < uint8(d))]) 3368 for { 3369 v_0 := v.Args[0] 3370 if v_0.Op != OpConst8 { 3371 break 3372 } 3373 c := v_0.AuxInt 3374 v_1 := v.Args[1] 3375 if v_1.Op != OpConst8 { 3376 break 3377 } 3378 d := v_1.AuxInt 3379 v.reset(OpConstBool) 3380 v.AuxInt = b2i(uint8(c) < uint8(d)) 3381 return true 3382 } 3383 return false 3384 } 3385 func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool { 3386 b := v.Block 3387 _ = b 3388 // match: (Load <t1> p1 (Store [w] p2 x _)) 3389 // cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() 3390 // result: x 3391 for { 3392 t1 := v.Type 3393 p1 := v.Args[0] 3394 v_1 := v.Args[1] 3395 if v_1.Op != OpStore { 3396 break 3397 } 3398 w := v_1.AuxInt 3399 p2 := v_1.Args[0] 3400 x := v_1.Args[1] 3401 if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && w == t1.Size()) { 3402 break 3403 } 3404 v.reset(OpCopy) 3405 v.Type = x.Type 3406 v.AddArg(x) 3407 return true 3408 } 3409 // match: (Load <t> _ _) 3410 // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) 3411 // result: (StructMake0) 3412 for { 3413 t := v.Type 3414 if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { 3415 break 3416 } 3417 v.reset(OpStructMake0) 3418 return true 3419 } 3420 // match: (Load <t> ptr mem) 3421 // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) 3422 // result: (StructMake1 (Load <t.FieldType(0)> ptr mem)) 3423 for { 3424 t := v.Type 3425 ptr := v.Args[0] 3426 mem := v.Args[1] 3427 if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { 3428 break 3429 } 3430 v.reset(OpStructMake1) 3431 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3432 v0.AddArg(ptr) 3433 v0.AddArg(mem) 3434 v.AddArg(v0) 3435 return true 3436 } 3437 // match: (Load <t> ptr mem) 3438 // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) 3439 // result: (StructMake2 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)) 3440 for { 3441 t := v.Type 3442 ptr := v.Args[0] 3443 mem := v.Args[1] 3444 if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { 3445 break 3446 } 3447 v.reset(OpStructMake2) 3448 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3449 v0.AddArg(ptr) 3450 v0.AddArg(mem) 3451 v.AddArg(v0) 3452 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3453 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 3454 v2.AuxInt = t.FieldOff(1) 3455 v2.AddArg(ptr) 3456 v1.AddArg(v2) 3457 v1.AddArg(mem) 3458 v.AddArg(v1) 3459 return true 3460 } 3461 // match: (Load <t> ptr mem) 3462 // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) 3463 // result: (StructMake3 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)) 3464 for { 3465 t := v.Type 3466 ptr := v.Args[0] 3467 mem := v.Args[1] 3468 if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { 3469 break 3470 } 3471 v.reset(OpStructMake3) 3472 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3473 v0.AddArg(ptr) 3474 v0.AddArg(mem) 3475 v.AddArg(v0) 3476 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3477 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 3478 v2.AuxInt = t.FieldOff(1) 3479 v2.AddArg(ptr) 3480 v1.AddArg(v2) 3481 v1.AddArg(mem) 3482 v.AddArg(v1) 3483 v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) 3484 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 3485 v4.AuxInt = t.FieldOff(2) 3486 v4.AddArg(ptr) 3487 v3.AddArg(v4) 3488 v3.AddArg(mem) 3489 v.AddArg(v3) 3490 return true 3491 } 3492 // match: (Load <t> ptr mem) 3493 // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) 3494 // result: (StructMake4 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem)) 3495 for { 3496 t := v.Type 3497 ptr := v.Args[0] 3498 mem := v.Args[1] 3499 if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { 3500 break 3501 } 3502 v.reset(OpStructMake4) 3503 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3504 v0.AddArg(ptr) 3505 v0.AddArg(mem) 3506 v.AddArg(v0) 3507 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3508 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 3509 v2.AuxInt = t.FieldOff(1) 3510 v2.AddArg(ptr) 3511 v1.AddArg(v2) 3512 v1.AddArg(mem) 3513 v.AddArg(v1) 3514 v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) 3515 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 3516 v4.AuxInt = t.FieldOff(2) 3517 v4.AddArg(ptr) 3518 v3.AddArg(v4) 3519 v3.AddArg(mem) 3520 v.AddArg(v3) 3521 v5 := b.NewValue0(v.Line, OpLoad, t.FieldType(3)) 3522 v6 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) 3523 v6.AuxInt = t.FieldOff(3) 3524 v6.AddArg(ptr) 3525 v5.AddArg(v6) 3526 v5.AddArg(mem) 3527 v.AddArg(v5) 3528 return true 3529 } 3530 return false 3531 } 3532 func rewriteValuegeneric_OpLsh16x16(v *Value, config *Config) bool { 3533 b := v.Block 3534 _ = b 3535 // match: (Lsh16x16 (Rsh16Ux16 (Lsh16x16 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3])) 3536 // cond: uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2) 3537 // result: (Lsh16x16 x (Const16 <config.fe.TypeUInt16()> [int64(int16(c1-c2+c3))])) 3538 for { 3539 v_0 := v.Args[0] 3540 if v_0.Op != OpRsh16Ux16 { 3541 break 3542 } 3543 v_0_0 := v_0.Args[0] 3544 if v_0_0.Op != OpLsh16x16 { 3545 break 3546 } 3547 x := v_0_0.Args[0] 3548 v_0_0_1 := v_0_0.Args[1] 3549 if v_0_0_1.Op != OpConst16 { 3550 break 3551 } 3552 c1 := v_0_0_1.AuxInt 3553 v_0_1 := v_0.Args[1] 3554 if v_0_1.Op != OpConst16 { 3555 break 3556 } 3557 c2 := v_0_1.AuxInt 3558 v_1 := v.Args[1] 3559 if v_1.Op != OpConst16 { 3560 break 3561 } 3562 c3 := v_1.AuxInt 3563 if !(uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2)) { 3564 break 3565 } 3566 v.reset(OpLsh16x16) 3567 v.AddArg(x) 3568 v0 := b.NewValue0(v.Line, OpConst16, config.fe.TypeUInt16()) 3569 v0.AuxInt = int64(int16(c1 - c2 + c3)) 3570 v.AddArg(v0) 3571 return true 3572 } 3573 // match: (Lsh16x16 <t> x (Const16 [c])) 3574 // cond: 3575 // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))])) 3576 for { 3577 t := v.Type 3578 x := v.Args[0] 3579 v_1 := v.Args[1] 3580 if v_1.Op != OpConst16 { 3581 break 3582 } 3583 c := v_1.AuxInt 3584 v.reset(OpLsh16x64) 3585 v.AddArg(x) 3586 v0 := b.NewValue0(v.Line, OpConst64, t) 3587 v0.AuxInt = int64(uint16(c)) 3588 v.AddArg(v0) 3589 return true 3590 } 3591 return false 3592 } 3593 func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool { 3594 b := v.Block 3595 _ = b 3596 // match: (Lsh16x32 <t> x (Const32 [c])) 3597 // cond: 3598 // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))])) 3599 for { 3600 t := v.Type 3601 x := v.Args[0] 3602 v_1 := v.Args[1] 3603 if v_1.Op != OpConst32 { 3604 break 3605 } 3606 c := v_1.AuxInt 3607 v.reset(OpLsh16x64) 3608 v.AddArg(x) 3609 v0 := b.NewValue0(v.Line, OpConst64, t) 3610 v0.AuxInt = int64(uint32(c)) 3611 v.AddArg(v0) 3612 return true 3613 } 3614 return false 3615 } 3616 func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { 3617 b := v.Block 3618 _ = b 3619 // match: (Lsh16x64 (Const16 [c]) (Const64 [d])) 3620 // cond: 3621 // result: (Const16 [int64(int16(c) << uint64(d))]) 3622 for { 3623 v_0 := v.Args[0] 3624 if v_0.Op != OpConst16 { 3625 break 3626 } 3627 c := v_0.AuxInt 3628 v_1 := v.Args[1] 3629 if v_1.Op != OpConst64 { 3630 break 3631 } 3632 d := v_1.AuxInt 3633 v.reset(OpConst16) 3634 v.AuxInt = int64(int16(c) << uint64(d)) 3635 return true 3636 } 3637 // match: (Lsh16x64 (Const16 [0]) _) 3638 // cond: 3639 // result: (Const16 [0]) 3640 for { 3641 v_0 := v.Args[0] 3642 if v_0.Op != OpConst16 { 3643 break 3644 } 3645 if v_0.AuxInt != 0 { 3646 break 3647 } 3648 v.reset(OpConst16) 3649 v.AuxInt = 0 3650 return true 3651 } 3652 // match: (Lsh16x64 x (Const64 [0])) 3653 // cond: 3654 // result: x 3655 for { 3656 x := v.Args[0] 3657 v_1 := v.Args[1] 3658 if v_1.Op != OpConst64 { 3659 break 3660 } 3661 if v_1.AuxInt != 0 { 3662 break 3663 } 3664 v.reset(OpCopy) 3665 v.Type = x.Type 3666 v.AddArg(x) 3667 return true 3668 } 3669 // match: (Lsh16x64 _ (Const64 [c])) 3670 // cond: uint64(c) >= 16 3671 // result: (Const16 [0]) 3672 for { 3673 v_1 := v.Args[1] 3674 if v_1.Op != OpConst64 { 3675 break 3676 } 3677 c := v_1.AuxInt 3678 if !(uint64(c) >= 16) { 3679 break 3680 } 3681 v.reset(OpConst16) 3682 v.AuxInt = 0 3683 return true 3684 } 3685 // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) 3686 // cond: !uaddOvf(c,d) 3687 // result: (Lsh16x64 x (Const64 <t> [c+d])) 3688 for { 3689 t := v.Type 3690 v_0 := v.Args[0] 3691 if v_0.Op != OpLsh16x64 { 3692 break 3693 } 3694 x := v_0.Args[0] 3695 v_0_1 := v_0.Args[1] 3696 if v_0_1.Op != OpConst64 { 3697 break 3698 } 3699 c := v_0_1.AuxInt 3700 v_1 := v.Args[1] 3701 if v_1.Op != OpConst64 { 3702 break 3703 } 3704 d := v_1.AuxInt 3705 if !(!uaddOvf(c, d)) { 3706 break 3707 } 3708 v.reset(OpLsh16x64) 3709 v.AddArg(x) 3710 v0 := b.NewValue0(v.Line, OpConst64, t) 3711 v0.AuxInt = c + d 3712 v.AddArg(v0) 3713 return true 3714 } 3715 return false 3716 } 3717 func rewriteValuegeneric_OpLsh16x8(v *Value, config *Config) bool { 3718 b := v.Block 3719 _ = b 3720 // match: (Lsh16x8 <t> x (Const8 [c])) 3721 // cond: 3722 // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))])) 3723 for { 3724 t := v.Type 3725 x := v.Args[0] 3726 v_1 := v.Args[1] 3727 if v_1.Op != OpConst8 { 3728 break 3729 } 3730 c := v_1.AuxInt 3731 v.reset(OpLsh16x64) 3732 v.AddArg(x) 3733 v0 := b.NewValue0(v.Line, OpConst64, t) 3734 v0.AuxInt = int64(uint8(c)) 3735 v.AddArg(v0) 3736 return true 3737 } 3738 return false 3739 } 3740 func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool { 3741 b := v.Block 3742 _ = b 3743 // match: (Lsh32x16 <t> x (Const16 [c])) 3744 // cond: 3745 // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))])) 3746 for { 3747 t := v.Type 3748 x := v.Args[0] 3749 v_1 := v.Args[1] 3750 if v_1.Op != OpConst16 { 3751 break 3752 } 3753 c := v_1.AuxInt 3754 v.reset(OpLsh32x64) 3755 v.AddArg(x) 3756 v0 := b.NewValue0(v.Line, OpConst64, t) 3757 v0.AuxInt = int64(uint16(c)) 3758 v.AddArg(v0) 3759 return true 3760 } 3761 return false 3762 } 3763 func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool { 3764 b := v.Block 3765 _ = b 3766 // match: (Lsh32x32 (Rsh32Ux32 (Lsh32x32 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3])) 3767 // cond: uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2) 3768 // result: (Lsh32x32 x (Const32 <config.fe.TypeUInt32()> [int64(int32(c1-c2+c3))])) 3769 for { 3770 v_0 := v.Args[0] 3771 if v_0.Op != OpRsh32Ux32 { 3772 break 3773 } 3774 v_0_0 := v_0.Args[0] 3775 if v_0_0.Op != OpLsh32x32 { 3776 break 3777 } 3778 x := v_0_0.Args[0] 3779 v_0_0_1 := v_0_0.Args[1] 3780 if v_0_0_1.Op != OpConst32 { 3781 break 3782 } 3783 c1 := v_0_0_1.AuxInt 3784 v_0_1 := v_0.Args[1] 3785 if v_0_1.Op != OpConst32 { 3786 break 3787 } 3788 c2 := v_0_1.AuxInt 3789 v_1 := v.Args[1] 3790 if v_1.Op != OpConst32 { 3791 break 3792 } 3793 c3 := v_1.AuxInt 3794 if !(uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2)) { 3795 break 3796 } 3797 v.reset(OpLsh32x32) 3798 v.AddArg(x) 3799 v0 := b.NewValue0(v.Line, OpConst32, config.fe.TypeUInt32()) 3800 v0.AuxInt = int64(int32(c1 - c2 + c3)) 3801 v.AddArg(v0) 3802 return true 3803 } 3804 // match: (Lsh32x32 <t> x (Const32 [c])) 3805 // cond: 3806 // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))])) 3807 for { 3808 t := v.Type 3809 x := v.Args[0] 3810 v_1 := v.Args[1] 3811 if v_1.Op != OpConst32 { 3812 break 3813 } 3814 c := v_1.AuxInt 3815 v.reset(OpLsh32x64) 3816 v.AddArg(x) 3817 v0 := b.NewValue0(v.Line, OpConst64, t) 3818 v0.AuxInt = int64(uint32(c)) 3819 v.AddArg(v0) 3820 return true 3821 } 3822 return false 3823 } 3824 func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { 3825 b := v.Block 3826 _ = b 3827 // match: (Lsh32x64 (Const32 [c]) (Const64 [d])) 3828 // cond: 3829 // result: (Const32 [int64(int32(c) << uint64(d))]) 3830 for { 3831 v_0 := v.Args[0] 3832 if v_0.Op != OpConst32 { 3833 break 3834 } 3835 c := v_0.AuxInt 3836 v_1 := v.Args[1] 3837 if v_1.Op != OpConst64 { 3838 break 3839 } 3840 d := v_1.AuxInt 3841 v.reset(OpConst32) 3842 v.AuxInt = int64(int32(c) << uint64(d)) 3843 return true 3844 } 3845 // match: (Lsh32x64 (Const32 [0]) _) 3846 // cond: 3847 // result: (Const32 [0]) 3848 for { 3849 v_0 := v.Args[0] 3850 if v_0.Op != OpConst32 { 3851 break 3852 } 3853 if v_0.AuxInt != 0 { 3854 break 3855 } 3856 v.reset(OpConst32) 3857 v.AuxInt = 0 3858 return true 3859 } 3860 // match: (Lsh32x64 x (Const64 [0])) 3861 // cond: 3862 // result: x 3863 for { 3864 x := v.Args[0] 3865 v_1 := v.Args[1] 3866 if v_1.Op != OpConst64 { 3867 break 3868 } 3869 if v_1.AuxInt != 0 { 3870 break 3871 } 3872 v.reset(OpCopy) 3873 v.Type = x.Type 3874 v.AddArg(x) 3875 return true 3876 } 3877 // match: (Lsh32x64 _ (Const64 [c])) 3878 // cond: uint64(c) >= 32 3879 // result: (Const32 [0]) 3880 for { 3881 v_1 := v.Args[1] 3882 if v_1.Op != OpConst64 { 3883 break 3884 } 3885 c := v_1.AuxInt 3886 if !(uint64(c) >= 32) { 3887 break 3888 } 3889 v.reset(OpConst32) 3890 v.AuxInt = 0 3891 return true 3892 } 3893 // match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d])) 3894 // cond: !uaddOvf(c,d) 3895 // result: (Lsh32x64 x (Const64 <t> [c+d])) 3896 for { 3897 t := v.Type 3898 v_0 := v.Args[0] 3899 if v_0.Op != OpLsh32x64 { 3900 break 3901 } 3902 x := v_0.Args[0] 3903 v_0_1 := v_0.Args[1] 3904 if v_0_1.Op != OpConst64 { 3905 break 3906 } 3907 c := v_0_1.AuxInt 3908 v_1 := v.Args[1] 3909 if v_1.Op != OpConst64 { 3910 break 3911 } 3912 d := v_1.AuxInt 3913 if !(!uaddOvf(c, d)) { 3914 break 3915 } 3916 v.reset(OpLsh32x64) 3917 v.AddArg(x) 3918 v0 := b.NewValue0(v.Line, OpConst64, t) 3919 v0.AuxInt = c + d 3920 v.AddArg(v0) 3921 return true 3922 } 3923 return false 3924 } 3925 func rewriteValuegeneric_OpLsh32x8(v *Value, config *Config) bool { 3926 b := v.Block 3927 _ = b 3928 // match: (Lsh32x8 <t> x (Const8 [c])) 3929 // cond: 3930 // result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))])) 3931 for { 3932 t := v.Type 3933 x := v.Args[0] 3934 v_1 := v.Args[1] 3935 if v_1.Op != OpConst8 { 3936 break 3937 } 3938 c := v_1.AuxInt 3939 v.reset(OpLsh32x64) 3940 v.AddArg(x) 3941 v0 := b.NewValue0(v.Line, OpConst64, t) 3942 v0.AuxInt = int64(uint8(c)) 3943 v.AddArg(v0) 3944 return true 3945 } 3946 return false 3947 } 3948 func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool { 3949 b := v.Block 3950 _ = b 3951 // match: (Lsh64x16 <t> x (Const16 [c])) 3952 // cond: 3953 // result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))])) 3954 for { 3955 t := v.Type 3956 x := v.Args[0] 3957 v_1 := v.Args[1] 3958 if v_1.Op != OpConst16 { 3959 break 3960 } 3961 c := v_1.AuxInt 3962 v.reset(OpLsh64x64) 3963 v.AddArg(x) 3964 v0 := b.NewValue0(v.Line, OpConst64, t) 3965 v0.AuxInt = int64(uint16(c)) 3966 v.AddArg(v0) 3967 return true 3968 } 3969 // match: (Lsh64x16 (Const64 [0]) _) 3970 // cond: 3971 // result: (Const64 [0]) 3972 for { 3973 v_0 := v.Args[0] 3974 if v_0.Op != OpConst64 { 3975 break 3976 } 3977 if v_0.AuxInt != 0 { 3978 break 3979 } 3980 v.reset(OpConst64) 3981 v.AuxInt = 0 3982 return true 3983 } 3984 return false 3985 } 3986 func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool { 3987 b := v.Block 3988 _ = b 3989 // match: (Lsh64x32 <t> x (Const32 [c])) 3990 // cond: 3991 // result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))])) 3992 for { 3993 t := v.Type 3994 x := v.Args[0] 3995 v_1 := v.Args[1] 3996 if v_1.Op != OpConst32 { 3997 break 3998 } 3999 c := v_1.AuxInt 4000 v.reset(OpLsh64x64) 4001 v.AddArg(x) 4002 v0 := b.NewValue0(v.Line, OpConst64, t) 4003 v0.AuxInt = int64(uint32(c)) 4004 v.AddArg(v0) 4005 return true 4006 } 4007 // match: (Lsh64x32 (Const64 [0]) _) 4008 // cond: 4009 // result: (Const64 [0]) 4010 for { 4011 v_0 := v.Args[0] 4012 if v_0.Op != OpConst64 { 4013 break 4014 } 4015 if v_0.AuxInt != 0 { 4016 break 4017 } 4018 v.reset(OpConst64) 4019 v.AuxInt = 0 4020 return true 4021 } 4022 return false 4023 } 4024 func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool { 4025 b := v.Block 4026 _ = b 4027 // match: (Lsh64x64 (Const64 [c]) (Const64 [d])) 4028 // cond: 4029 // result: (Const64 [c << uint64(d)]) 4030 for { 4031 v_0 := v.Args[0] 4032 if v_0.Op != OpConst64 { 4033 break 4034 } 4035 c := v_0.AuxInt 4036 v_1 := v.Args[1] 4037 if v_1.Op != OpConst64 { 4038 break 4039 } 4040 d := v_1.AuxInt 4041 v.reset(OpConst64) 4042 v.AuxInt = c << uint64(d) 4043 return true 4044 } 4045 // match: (Lsh64x64 (Const64 [0]) _) 4046 // cond: 4047 // result: (Const64 [0]) 4048 for { 4049 v_0 := v.Args[0] 4050 if v_0.Op != OpConst64 { 4051 break 4052 } 4053 if v_0.AuxInt != 0 { 4054 break 4055 } 4056 v.reset(OpConst64) 4057 v.AuxInt = 0 4058 return true 4059 } 4060 // match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4061 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) 4062 // result: (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4063 for { 4064 v_0 := v.Args[0] 4065 if v_0.Op != OpRsh64Ux64 { 4066 break 4067 } 4068 v_0_0 := v_0.Args[0] 4069 if v_0_0.Op != OpLsh64x64 { 4070 break 4071 } 4072 x := v_0_0.Args[0] 4073 v_0_0_1 := v_0_0.Args[1] 4074 if v_0_0_1.Op != OpConst64 { 4075 break 4076 } 4077 c1 := v_0_0_1.AuxInt 4078 v_0_1 := v_0.Args[1] 4079 if v_0_1.Op != OpConst64 { 4080 break 4081 } 4082 c2 := v_0_1.AuxInt 4083 v_1 := v.Args[1] 4084 if v_1.Op != OpConst64 { 4085 break 4086 } 4087 c3 := v_1.AuxInt 4088 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2)) { 4089 break 4090 } 4091 v.reset(OpLsh64x64) 4092 v.AddArg(x) 4093 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4094 v0.AuxInt = c1 - c2 + c3 4095 v.AddArg(v0) 4096 return true 4097 } 4098 // match: (Lsh64x64 x (Const64 [0])) 4099 // cond: 4100 // result: x 4101 for { 4102 x := v.Args[0] 4103 v_1 := v.Args[1] 4104 if v_1.Op != OpConst64 { 4105 break 4106 } 4107 if v_1.AuxInt != 0 { 4108 break 4109 } 4110 v.reset(OpCopy) 4111 v.Type = x.Type 4112 v.AddArg(x) 4113 return true 4114 } 4115 // match: (Lsh64x64 (Const64 [0]) _) 4116 // cond: 4117 // result: (Const64 [0]) 4118 for { 4119 v_0 := v.Args[0] 4120 if v_0.Op != OpConst64 { 4121 break 4122 } 4123 if v_0.AuxInt != 0 { 4124 break 4125 } 4126 v.reset(OpConst64) 4127 v.AuxInt = 0 4128 return true 4129 } 4130 // match: (Lsh64x64 _ (Const64 [c])) 4131 // cond: uint64(c) >= 64 4132 // result: (Const64 [0]) 4133 for { 4134 v_1 := v.Args[1] 4135 if v_1.Op != OpConst64 { 4136 break 4137 } 4138 c := v_1.AuxInt 4139 if !(uint64(c) >= 64) { 4140 break 4141 } 4142 v.reset(OpConst64) 4143 v.AuxInt = 0 4144 return true 4145 } 4146 // match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d])) 4147 // cond: !uaddOvf(c,d) 4148 // result: (Lsh64x64 x (Const64 <t> [c+d])) 4149 for { 4150 t := v.Type 4151 v_0 := v.Args[0] 4152 if v_0.Op != OpLsh64x64 { 4153 break 4154 } 4155 x := v_0.Args[0] 4156 v_0_1 := v_0.Args[1] 4157 if v_0_1.Op != OpConst64 { 4158 break 4159 } 4160 c := v_0_1.AuxInt 4161 v_1 := v.Args[1] 4162 if v_1.Op != OpConst64 { 4163 break 4164 } 4165 d := v_1.AuxInt 4166 if !(!uaddOvf(c, d)) { 4167 break 4168 } 4169 v.reset(OpLsh64x64) 4170 v.AddArg(x) 4171 v0 := b.NewValue0(v.Line, OpConst64, t) 4172 v0.AuxInt = c + d 4173 v.AddArg(v0) 4174 return true 4175 } 4176 return false 4177 } 4178 func rewriteValuegeneric_OpLsh64x8(v *Value, config *Config) bool { 4179 b := v.Block 4180 _ = b 4181 // match: (Lsh64x8 <t> x (Const8 [c])) 4182 // cond: 4183 // result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))])) 4184 for { 4185 t := v.Type 4186 x := v.Args[0] 4187 v_1 := v.Args[1] 4188 if v_1.Op != OpConst8 { 4189 break 4190 } 4191 c := v_1.AuxInt 4192 v.reset(OpLsh64x64) 4193 v.AddArg(x) 4194 v0 := b.NewValue0(v.Line, OpConst64, t) 4195 v0.AuxInt = int64(uint8(c)) 4196 v.AddArg(v0) 4197 return true 4198 } 4199 // match: (Lsh64x8 (Const64 [0]) _) 4200 // cond: 4201 // result: (Const64 [0]) 4202 for { 4203 v_0 := v.Args[0] 4204 if v_0.Op != OpConst64 { 4205 break 4206 } 4207 if v_0.AuxInt != 0 { 4208 break 4209 } 4210 v.reset(OpConst64) 4211 v.AuxInt = 0 4212 return true 4213 } 4214 return false 4215 } 4216 func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool { 4217 b := v.Block 4218 _ = b 4219 // match: (Lsh8x16 <t> x (Const16 [c])) 4220 // cond: 4221 // result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))])) 4222 for { 4223 t := v.Type 4224 x := v.Args[0] 4225 v_1 := v.Args[1] 4226 if v_1.Op != OpConst16 { 4227 break 4228 } 4229 c := v_1.AuxInt 4230 v.reset(OpLsh8x64) 4231 v.AddArg(x) 4232 v0 := b.NewValue0(v.Line, OpConst64, t) 4233 v0.AuxInt = int64(uint16(c)) 4234 v.AddArg(v0) 4235 return true 4236 } 4237 return false 4238 } 4239 func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool { 4240 b := v.Block 4241 _ = b 4242 // match: (Lsh8x32 <t> x (Const32 [c])) 4243 // cond: 4244 // result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))])) 4245 for { 4246 t := v.Type 4247 x := v.Args[0] 4248 v_1 := v.Args[1] 4249 if v_1.Op != OpConst32 { 4250 break 4251 } 4252 c := v_1.AuxInt 4253 v.reset(OpLsh8x64) 4254 v.AddArg(x) 4255 v0 := b.NewValue0(v.Line, OpConst64, t) 4256 v0.AuxInt = int64(uint32(c)) 4257 v.AddArg(v0) 4258 return true 4259 } 4260 return false 4261 } 4262 func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { 4263 b := v.Block 4264 _ = b 4265 // match: (Lsh8x64 (Const8 [c]) (Const64 [d])) 4266 // cond: 4267 // result: (Const8 [int64(int8(c) << uint64(d))]) 4268 for { 4269 v_0 := v.Args[0] 4270 if v_0.Op != OpConst8 { 4271 break 4272 } 4273 c := v_0.AuxInt 4274 v_1 := v.Args[1] 4275 if v_1.Op != OpConst64 { 4276 break 4277 } 4278 d := v_1.AuxInt 4279 v.reset(OpConst8) 4280 v.AuxInt = int64(int8(c) << uint64(d)) 4281 return true 4282 } 4283 // match: (Lsh8x64 (Const8 [0]) _) 4284 // cond: 4285 // result: (Const8 [0]) 4286 for { 4287 v_0 := v.Args[0] 4288 if v_0.Op != OpConst8 { 4289 break 4290 } 4291 if v_0.AuxInt != 0 { 4292 break 4293 } 4294 v.reset(OpConst8) 4295 v.AuxInt = 0 4296 return true 4297 } 4298 // match: (Lsh8x64 x (Const64 [0])) 4299 // cond: 4300 // result: x 4301 for { 4302 x := v.Args[0] 4303 v_1 := v.Args[1] 4304 if v_1.Op != OpConst64 { 4305 break 4306 } 4307 if v_1.AuxInt != 0 { 4308 break 4309 } 4310 v.reset(OpCopy) 4311 v.Type = x.Type 4312 v.AddArg(x) 4313 return true 4314 } 4315 // match: (Lsh8x64 _ (Const64 [c])) 4316 // cond: uint64(c) >= 8 4317 // result: (Const8 [0]) 4318 for { 4319 v_1 := v.Args[1] 4320 if v_1.Op != OpConst64 { 4321 break 4322 } 4323 c := v_1.AuxInt 4324 if !(uint64(c) >= 8) { 4325 break 4326 } 4327 v.reset(OpConst8) 4328 v.AuxInt = 0 4329 return true 4330 } 4331 // match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d])) 4332 // cond: !uaddOvf(c,d) 4333 // result: (Lsh8x64 x (Const64 <t> [c+d])) 4334 for { 4335 t := v.Type 4336 v_0 := v.Args[0] 4337 if v_0.Op != OpLsh8x64 { 4338 break 4339 } 4340 x := v_0.Args[0] 4341 v_0_1 := v_0.Args[1] 4342 if v_0_1.Op != OpConst64 { 4343 break 4344 } 4345 c := v_0_1.AuxInt 4346 v_1 := v.Args[1] 4347 if v_1.Op != OpConst64 { 4348 break 4349 } 4350 d := v_1.AuxInt 4351 if !(!uaddOvf(c, d)) { 4352 break 4353 } 4354 v.reset(OpLsh8x64) 4355 v.AddArg(x) 4356 v0 := b.NewValue0(v.Line, OpConst64, t) 4357 v0.AuxInt = c + d 4358 v.AddArg(v0) 4359 return true 4360 } 4361 return false 4362 } 4363 func rewriteValuegeneric_OpLsh8x8(v *Value, config *Config) bool { 4364 b := v.Block 4365 _ = b 4366 // match: (Lsh8x8 (Rsh8Ux8 (Lsh8x8 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3])) 4367 // cond: uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2) 4368 // result: (Lsh8x8 x (Const8 <config.fe.TypeUInt8()> [int64(int8(c1-c2+c3))])) 4369 for { 4370 v_0 := v.Args[0] 4371 if v_0.Op != OpRsh8Ux8 { 4372 break 4373 } 4374 v_0_0 := v_0.Args[0] 4375 if v_0_0.Op != OpLsh8x8 { 4376 break 4377 } 4378 x := v_0_0.Args[0] 4379 v_0_0_1 := v_0_0.Args[1] 4380 if v_0_0_1.Op != OpConst8 { 4381 break 4382 } 4383 c1 := v_0_0_1.AuxInt 4384 v_0_1 := v_0.Args[1] 4385 if v_0_1.Op != OpConst8 { 4386 break 4387 } 4388 c2 := v_0_1.AuxInt 4389 v_1 := v.Args[1] 4390 if v_1.Op != OpConst8 { 4391 break 4392 } 4393 c3 := v_1.AuxInt 4394 if !(uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2)) { 4395 break 4396 } 4397 v.reset(OpLsh8x8) 4398 v.AddArg(x) 4399 v0 := b.NewValue0(v.Line, OpConst8, config.fe.TypeUInt8()) 4400 v0.AuxInt = int64(int8(c1 - c2 + c3)) 4401 v.AddArg(v0) 4402 return true 4403 } 4404 // match: (Lsh8x8 <t> x (Const8 [c])) 4405 // cond: 4406 // result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))])) 4407 for { 4408 t := v.Type 4409 x := v.Args[0] 4410 v_1 := v.Args[1] 4411 if v_1.Op != OpConst8 { 4412 break 4413 } 4414 c := v_1.AuxInt 4415 v.reset(OpLsh8x64) 4416 v.AddArg(x) 4417 v0 := b.NewValue0(v.Line, OpConst64, t) 4418 v0.AuxInt = int64(uint8(c)) 4419 v.AddArg(v0) 4420 return true 4421 } 4422 return false 4423 } 4424 func rewriteValuegeneric_OpMod16(v *Value, config *Config) bool { 4425 b := v.Block 4426 _ = b 4427 // match: (Mod16 (Const16 [c]) (Const16 [d])) 4428 // cond: d != 0 4429 // result: (Const16 [int64(int16(c % d))]) 4430 for { 4431 v_0 := v.Args[0] 4432 if v_0.Op != OpConst16 { 4433 break 4434 } 4435 c := v_0.AuxInt 4436 v_1 := v.Args[1] 4437 if v_1.Op != OpConst16 { 4438 break 4439 } 4440 d := v_1.AuxInt 4441 if !(d != 0) { 4442 break 4443 } 4444 v.reset(OpConst16) 4445 v.AuxInt = int64(int16(c % d)) 4446 return true 4447 } 4448 return false 4449 } 4450 func rewriteValuegeneric_OpMod16u(v *Value, config *Config) bool { 4451 b := v.Block 4452 _ = b 4453 // match: (Mod16u (Const16 [c]) (Const16 [d])) 4454 // cond: d != 0 4455 // result: (Const16 [int64(uint16(c) % uint16(d))]) 4456 for { 4457 v_0 := v.Args[0] 4458 if v_0.Op != OpConst16 { 4459 break 4460 } 4461 c := v_0.AuxInt 4462 v_1 := v.Args[1] 4463 if v_1.Op != OpConst16 { 4464 break 4465 } 4466 d := v_1.AuxInt 4467 if !(d != 0) { 4468 break 4469 } 4470 v.reset(OpConst16) 4471 v.AuxInt = int64(uint16(c) % uint16(d)) 4472 return true 4473 } 4474 return false 4475 } 4476 func rewriteValuegeneric_OpMod32(v *Value, config *Config) bool { 4477 b := v.Block 4478 _ = b 4479 // match: (Mod32 (Const32 [c]) (Const32 [d])) 4480 // cond: d != 0 4481 // result: (Const32 [int64(int32(c % d))]) 4482 for { 4483 v_0 := v.Args[0] 4484 if v_0.Op != OpConst32 { 4485 break 4486 } 4487 c := v_0.AuxInt 4488 v_1 := v.Args[1] 4489 if v_1.Op != OpConst32 { 4490 break 4491 } 4492 d := v_1.AuxInt 4493 if !(d != 0) { 4494 break 4495 } 4496 v.reset(OpConst32) 4497 v.AuxInt = int64(int32(c % d)) 4498 return true 4499 } 4500 return false 4501 } 4502 func rewriteValuegeneric_OpMod32u(v *Value, config *Config) bool { 4503 b := v.Block 4504 _ = b 4505 // match: (Mod32u (Const32 [c]) (Const32 [d])) 4506 // cond: d != 0 4507 // result: (Const32 [int64(uint32(c) % uint32(d))]) 4508 for { 4509 v_0 := v.Args[0] 4510 if v_0.Op != OpConst32 { 4511 break 4512 } 4513 c := v_0.AuxInt 4514 v_1 := v.Args[1] 4515 if v_1.Op != OpConst32 { 4516 break 4517 } 4518 d := v_1.AuxInt 4519 if !(d != 0) { 4520 break 4521 } 4522 v.reset(OpConst32) 4523 v.AuxInt = int64(uint32(c) % uint32(d)) 4524 return true 4525 } 4526 return false 4527 } 4528 func rewriteValuegeneric_OpMod64(v *Value, config *Config) bool { 4529 b := v.Block 4530 _ = b 4531 // match: (Mod64 (Const64 [c]) (Const64 [d])) 4532 // cond: d != 0 4533 // result: (Const64 [c % d]) 4534 for { 4535 v_0 := v.Args[0] 4536 if v_0.Op != OpConst64 { 4537 break 4538 } 4539 c := v_0.AuxInt 4540 v_1 := v.Args[1] 4541 if v_1.Op != OpConst64 { 4542 break 4543 } 4544 d := v_1.AuxInt 4545 if !(d != 0) { 4546 break 4547 } 4548 v.reset(OpConst64) 4549 v.AuxInt = c % d 4550 return true 4551 } 4552 // match: (Mod64 <t> x (Const64 [c])) 4553 // cond: x.Op != OpConst64 && smagic64ok(c) 4554 // result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 4555 for { 4556 t := v.Type 4557 x := v.Args[0] 4558 v_1 := v.Args[1] 4559 if v_1.Op != OpConst64 { 4560 break 4561 } 4562 c := v_1.AuxInt 4563 if !(x.Op != OpConst64 && smagic64ok(c)) { 4564 break 4565 } 4566 v.reset(OpSub64) 4567 v.AddArg(x) 4568 v0 := b.NewValue0(v.Line, OpMul64, t) 4569 v1 := b.NewValue0(v.Line, OpDiv64, t) 4570 v1.AddArg(x) 4571 v2 := b.NewValue0(v.Line, OpConst64, t) 4572 v2.AuxInt = c 4573 v1.AddArg(v2) 4574 v0.AddArg(v1) 4575 v3 := b.NewValue0(v.Line, OpConst64, t) 4576 v3.AuxInt = c 4577 v0.AddArg(v3) 4578 v.AddArg(v0) 4579 return true 4580 } 4581 return false 4582 } 4583 func rewriteValuegeneric_OpMod64u(v *Value, config *Config) bool { 4584 b := v.Block 4585 _ = b 4586 // match: (Mod64u (Const64 [c]) (Const64 [d])) 4587 // cond: d != 0 4588 // result: (Const64 [int64(uint64(c) % uint64(d))]) 4589 for { 4590 v_0 := v.Args[0] 4591 if v_0.Op != OpConst64 { 4592 break 4593 } 4594 c := v_0.AuxInt 4595 v_1 := v.Args[1] 4596 if v_1.Op != OpConst64 { 4597 break 4598 } 4599 d := v_1.AuxInt 4600 if !(d != 0) { 4601 break 4602 } 4603 v.reset(OpConst64) 4604 v.AuxInt = int64(uint64(c) % uint64(d)) 4605 return true 4606 } 4607 // match: (Mod64u <t> n (Const64 [c])) 4608 // cond: isPowerOfTwo(c) 4609 // result: (And64 n (Const64 <t> [c-1])) 4610 for { 4611 t := v.Type 4612 n := v.Args[0] 4613 v_1 := v.Args[1] 4614 if v_1.Op != OpConst64 { 4615 break 4616 } 4617 c := v_1.AuxInt 4618 if !(isPowerOfTwo(c)) { 4619 break 4620 } 4621 v.reset(OpAnd64) 4622 v.AddArg(n) 4623 v0 := b.NewValue0(v.Line, OpConst64, t) 4624 v0.AuxInt = c - 1 4625 v.AddArg(v0) 4626 return true 4627 } 4628 // match: (Mod64u <t> x (Const64 [c])) 4629 // cond: x.Op != OpConst64 && umagic64ok(c) 4630 // result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 4631 for { 4632 t := v.Type 4633 x := v.Args[0] 4634 v_1 := v.Args[1] 4635 if v_1.Op != OpConst64 { 4636 break 4637 } 4638 c := v_1.AuxInt 4639 if !(x.Op != OpConst64 && umagic64ok(c)) { 4640 break 4641 } 4642 v.reset(OpSub64) 4643 v.AddArg(x) 4644 v0 := b.NewValue0(v.Line, OpMul64, t) 4645 v1 := b.NewValue0(v.Line, OpDiv64u, t) 4646 v1.AddArg(x) 4647 v2 := b.NewValue0(v.Line, OpConst64, t) 4648 v2.AuxInt = c 4649 v1.AddArg(v2) 4650 v0.AddArg(v1) 4651 v3 := b.NewValue0(v.Line, OpConst64, t) 4652 v3.AuxInt = c 4653 v0.AddArg(v3) 4654 v.AddArg(v0) 4655 return true 4656 } 4657 return false 4658 } 4659 func rewriteValuegeneric_OpMod8(v *Value, config *Config) bool { 4660 b := v.Block 4661 _ = b 4662 // match: (Mod8 (Const8 [c]) (Const8 [d])) 4663 // cond: d != 0 4664 // result: (Const8 [int64(int8(c % d))]) 4665 for { 4666 v_0 := v.Args[0] 4667 if v_0.Op != OpConst8 { 4668 break 4669 } 4670 c := v_0.AuxInt 4671 v_1 := v.Args[1] 4672 if v_1.Op != OpConst8 { 4673 break 4674 } 4675 d := v_1.AuxInt 4676 if !(d != 0) { 4677 break 4678 } 4679 v.reset(OpConst8) 4680 v.AuxInt = int64(int8(c % d)) 4681 return true 4682 } 4683 return false 4684 } 4685 func rewriteValuegeneric_OpMod8u(v *Value, config *Config) bool { 4686 b := v.Block 4687 _ = b 4688 // match: (Mod8u (Const8 [c]) (Const8 [d])) 4689 // cond: d != 0 4690 // result: (Const8 [int64(uint8(c) % uint8(d))]) 4691 for { 4692 v_0 := v.Args[0] 4693 if v_0.Op != OpConst8 { 4694 break 4695 } 4696 c := v_0.AuxInt 4697 v_1 := v.Args[1] 4698 if v_1.Op != OpConst8 { 4699 break 4700 } 4701 d := v_1.AuxInt 4702 if !(d != 0) { 4703 break 4704 } 4705 v.reset(OpConst8) 4706 v.AuxInt = int64(uint8(c) % uint8(d)) 4707 return true 4708 } 4709 return false 4710 } 4711 func rewriteValuegeneric_OpMul16(v *Value, config *Config) bool { 4712 b := v.Block 4713 _ = b 4714 // match: (Mul16 (Const16 [c]) (Const16 [d])) 4715 // cond: 4716 // result: (Const16 [int64(int16(c*d))]) 4717 for { 4718 v_0 := v.Args[0] 4719 if v_0.Op != OpConst16 { 4720 break 4721 } 4722 c := v_0.AuxInt 4723 v_1 := v.Args[1] 4724 if v_1.Op != OpConst16 { 4725 break 4726 } 4727 d := v_1.AuxInt 4728 v.reset(OpConst16) 4729 v.AuxInt = int64(int16(c * d)) 4730 return true 4731 } 4732 // match: (Mul16 x (Const16 <t> [c])) 4733 // cond: x.Op != OpConst16 4734 // result: (Mul16 (Const16 <t> [c]) x) 4735 for { 4736 x := v.Args[0] 4737 v_1 := v.Args[1] 4738 if v_1.Op != OpConst16 { 4739 break 4740 } 4741 t := v_1.Type 4742 c := v_1.AuxInt 4743 if !(x.Op != OpConst16) { 4744 break 4745 } 4746 v.reset(OpMul16) 4747 v0 := b.NewValue0(v.Line, OpConst16, t) 4748 v0.AuxInt = c 4749 v.AddArg(v0) 4750 v.AddArg(x) 4751 return true 4752 } 4753 // match: (Mul16 (Const16 [0]) _) 4754 // cond: 4755 // result: (Const16 [0]) 4756 for { 4757 v_0 := v.Args[0] 4758 if v_0.Op != OpConst16 { 4759 break 4760 } 4761 if v_0.AuxInt != 0 { 4762 break 4763 } 4764 v.reset(OpConst16) 4765 v.AuxInt = 0 4766 return true 4767 } 4768 return false 4769 } 4770 func rewriteValuegeneric_OpMul32(v *Value, config *Config) bool { 4771 b := v.Block 4772 _ = b 4773 // match: (Mul32 (Const32 [c]) (Const32 [d])) 4774 // cond: 4775 // result: (Const32 [int64(int32(c*d))]) 4776 for { 4777 v_0 := v.Args[0] 4778 if v_0.Op != OpConst32 { 4779 break 4780 } 4781 c := v_0.AuxInt 4782 v_1 := v.Args[1] 4783 if v_1.Op != OpConst32 { 4784 break 4785 } 4786 d := v_1.AuxInt 4787 v.reset(OpConst32) 4788 v.AuxInt = int64(int32(c * d)) 4789 return true 4790 } 4791 // match: (Mul32 x (Const32 <t> [c])) 4792 // cond: x.Op != OpConst32 4793 // result: (Mul32 (Const32 <t> [c]) x) 4794 for { 4795 x := v.Args[0] 4796 v_1 := v.Args[1] 4797 if v_1.Op != OpConst32 { 4798 break 4799 } 4800 t := v_1.Type 4801 c := v_1.AuxInt 4802 if !(x.Op != OpConst32) { 4803 break 4804 } 4805 v.reset(OpMul32) 4806 v0 := b.NewValue0(v.Line, OpConst32, t) 4807 v0.AuxInt = c 4808 v.AddArg(v0) 4809 v.AddArg(x) 4810 return true 4811 } 4812 // match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x)) 4813 // cond: 4814 // result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x)) 4815 for { 4816 v_0 := v.Args[0] 4817 if v_0.Op != OpConst32 { 4818 break 4819 } 4820 t := v_0.Type 4821 c := v_0.AuxInt 4822 v_1 := v.Args[1] 4823 if v_1.Op != OpAdd32 { 4824 break 4825 } 4826 if v_1.Type != t { 4827 break 4828 } 4829 v_1_0 := v_1.Args[0] 4830 if v_1_0.Op != OpConst32 { 4831 break 4832 } 4833 if v_1_0.Type != t { 4834 break 4835 } 4836 d := v_1_0.AuxInt 4837 x := v_1.Args[1] 4838 v.reset(OpAdd32) 4839 v0 := b.NewValue0(v.Line, OpConst32, t) 4840 v0.AuxInt = int64(int32(c * d)) 4841 v.AddArg(v0) 4842 v1 := b.NewValue0(v.Line, OpMul32, t) 4843 v2 := b.NewValue0(v.Line, OpConst32, t) 4844 v2.AuxInt = c 4845 v1.AddArg(v2) 4846 v1.AddArg(x) 4847 v.AddArg(v1) 4848 return true 4849 } 4850 // match: (Mul32 (Const32 [0]) _) 4851 // cond: 4852 // result: (Const32 [0]) 4853 for { 4854 v_0 := v.Args[0] 4855 if v_0.Op != OpConst32 { 4856 break 4857 } 4858 if v_0.AuxInt != 0 { 4859 break 4860 } 4861 v.reset(OpConst32) 4862 v.AuxInt = 0 4863 return true 4864 } 4865 return false 4866 } 4867 func rewriteValuegeneric_OpMul32F(v *Value, config *Config) bool { 4868 b := v.Block 4869 _ = b 4870 // match: (Mul32F (Const32F [c]) (Const32F [d])) 4871 // cond: 4872 // result: (Const32F [f2i(float64(i2f32(c) * i2f32(d)))]) 4873 for { 4874 v_0 := v.Args[0] 4875 if v_0.Op != OpConst32F { 4876 break 4877 } 4878 c := v_0.AuxInt 4879 v_1 := v.Args[1] 4880 if v_1.Op != OpConst32F { 4881 break 4882 } 4883 d := v_1.AuxInt 4884 v.reset(OpConst32F) 4885 v.AuxInt = f2i(float64(i2f32(c) * i2f32(d))) 4886 return true 4887 } 4888 return false 4889 } 4890 func rewriteValuegeneric_OpMul64(v *Value, config *Config) bool { 4891 b := v.Block 4892 _ = b 4893 // match: (Mul64 (Const64 [c]) (Const64 [d])) 4894 // cond: 4895 // result: (Const64 [c*d]) 4896 for { 4897 v_0 := v.Args[0] 4898 if v_0.Op != OpConst64 { 4899 break 4900 } 4901 c := v_0.AuxInt 4902 v_1 := v.Args[1] 4903 if v_1.Op != OpConst64 { 4904 break 4905 } 4906 d := v_1.AuxInt 4907 v.reset(OpConst64) 4908 v.AuxInt = c * d 4909 return true 4910 } 4911 // match: (Mul64 x (Const64 <t> [c])) 4912 // cond: x.Op != OpConst64 4913 // result: (Mul64 (Const64 <t> [c]) x) 4914 for { 4915 x := v.Args[0] 4916 v_1 := v.Args[1] 4917 if v_1.Op != OpConst64 { 4918 break 4919 } 4920 t := v_1.Type 4921 c := v_1.AuxInt 4922 if !(x.Op != OpConst64) { 4923 break 4924 } 4925 v.reset(OpMul64) 4926 v0 := b.NewValue0(v.Line, OpConst64, t) 4927 v0.AuxInt = c 4928 v.AddArg(v0) 4929 v.AddArg(x) 4930 return true 4931 } 4932 // match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) 4933 // cond: 4934 // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) 4935 for { 4936 v_0 := v.Args[0] 4937 if v_0.Op != OpConst64 { 4938 break 4939 } 4940 t := v_0.Type 4941 c := v_0.AuxInt 4942 v_1 := v.Args[1] 4943 if v_1.Op != OpAdd64 { 4944 break 4945 } 4946 if v_1.Type != t { 4947 break 4948 } 4949 v_1_0 := v_1.Args[0] 4950 if v_1_0.Op != OpConst64 { 4951 break 4952 } 4953 if v_1_0.Type != t { 4954 break 4955 } 4956 d := v_1_0.AuxInt 4957 x := v_1.Args[1] 4958 v.reset(OpAdd64) 4959 v0 := b.NewValue0(v.Line, OpConst64, t) 4960 v0.AuxInt = c * d 4961 v.AddArg(v0) 4962 v1 := b.NewValue0(v.Line, OpMul64, t) 4963 v2 := b.NewValue0(v.Line, OpConst64, t) 4964 v2.AuxInt = c 4965 v1.AddArg(v2) 4966 v1.AddArg(x) 4967 v.AddArg(v1) 4968 return true 4969 } 4970 // match: (Mul64 (Const64 [0]) _) 4971 // cond: 4972 // result: (Const64 [0]) 4973 for { 4974 v_0 := v.Args[0] 4975 if v_0.Op != OpConst64 { 4976 break 4977 } 4978 if v_0.AuxInt != 0 { 4979 break 4980 } 4981 v.reset(OpConst64) 4982 v.AuxInt = 0 4983 return true 4984 } 4985 return false 4986 } 4987 func rewriteValuegeneric_OpMul64F(v *Value, config *Config) bool { 4988 b := v.Block 4989 _ = b 4990 // match: (Mul64F (Const64F [c]) (Const64F [d])) 4991 // cond: 4992 // result: (Const64F [f2i(i2f(c) * i2f(d))]) 4993 for { 4994 v_0 := v.Args[0] 4995 if v_0.Op != OpConst64F { 4996 break 4997 } 4998 c := v_0.AuxInt 4999 v_1 := v.Args[1] 5000 if v_1.Op != OpConst64F { 5001 break 5002 } 5003 d := v_1.AuxInt 5004 v.reset(OpConst64F) 5005 v.AuxInt = f2i(i2f(c) * i2f(d)) 5006 return true 5007 } 5008 return false 5009 } 5010 func rewriteValuegeneric_OpMul8(v *Value, config *Config) bool { 5011 b := v.Block 5012 _ = b 5013 // match: (Mul8 (Const8 [c]) (Const8 [d])) 5014 // cond: 5015 // result: (Const8 [int64(int8(c*d))]) 5016 for { 5017 v_0 := v.Args[0] 5018 if v_0.Op != OpConst8 { 5019 break 5020 } 5021 c := v_0.AuxInt 5022 v_1 := v.Args[1] 5023 if v_1.Op != OpConst8 { 5024 break 5025 } 5026 d := v_1.AuxInt 5027 v.reset(OpConst8) 5028 v.AuxInt = int64(int8(c * d)) 5029 return true 5030 } 5031 // match: (Mul8 x (Const8 <t> [c])) 5032 // cond: x.Op != OpConst8 5033 // result: (Mul8 (Const8 <t> [c]) x) 5034 for { 5035 x := v.Args[0] 5036 v_1 := v.Args[1] 5037 if v_1.Op != OpConst8 { 5038 break 5039 } 5040 t := v_1.Type 5041 c := v_1.AuxInt 5042 if !(x.Op != OpConst8) { 5043 break 5044 } 5045 v.reset(OpMul8) 5046 v0 := b.NewValue0(v.Line, OpConst8, t) 5047 v0.AuxInt = c 5048 v.AddArg(v0) 5049 v.AddArg(x) 5050 return true 5051 } 5052 // match: (Mul8 (Const8 [0]) _) 5053 // cond: 5054 // result: (Const8 [0]) 5055 for { 5056 v_0 := v.Args[0] 5057 if v_0.Op != OpConst8 { 5058 break 5059 } 5060 if v_0.AuxInt != 0 { 5061 break 5062 } 5063 v.reset(OpConst8) 5064 v.AuxInt = 0 5065 return true 5066 } 5067 return false 5068 } 5069 func rewriteValuegeneric_OpNeg16(v *Value, config *Config) bool { 5070 b := v.Block 5071 _ = b 5072 // match: (Neg16 (Sub16 x y)) 5073 // cond: 5074 // result: (Sub16 y x) 5075 for { 5076 v_0 := v.Args[0] 5077 if v_0.Op != OpSub16 { 5078 break 5079 } 5080 x := v_0.Args[0] 5081 y := v_0.Args[1] 5082 v.reset(OpSub16) 5083 v.AddArg(y) 5084 v.AddArg(x) 5085 return true 5086 } 5087 return false 5088 } 5089 func rewriteValuegeneric_OpNeg32(v *Value, config *Config) bool { 5090 b := v.Block 5091 _ = b 5092 // match: (Neg32 (Sub32 x y)) 5093 // cond: 5094 // result: (Sub32 y x) 5095 for { 5096 v_0 := v.Args[0] 5097 if v_0.Op != OpSub32 { 5098 break 5099 } 5100 x := v_0.Args[0] 5101 y := v_0.Args[1] 5102 v.reset(OpSub32) 5103 v.AddArg(y) 5104 v.AddArg(x) 5105 return true 5106 } 5107 return false 5108 } 5109 func rewriteValuegeneric_OpNeg64(v *Value, config *Config) bool { 5110 b := v.Block 5111 _ = b 5112 // match: (Neg64 (Sub64 x y)) 5113 // cond: 5114 // result: (Sub64 y x) 5115 for { 5116 v_0 := v.Args[0] 5117 if v_0.Op != OpSub64 { 5118 break 5119 } 5120 x := v_0.Args[0] 5121 y := v_0.Args[1] 5122 v.reset(OpSub64) 5123 v.AddArg(y) 5124 v.AddArg(x) 5125 return true 5126 } 5127 return false 5128 } 5129 func rewriteValuegeneric_OpNeg8(v *Value, config *Config) bool { 5130 b := v.Block 5131 _ = b 5132 // match: (Neg8 (Sub8 x y)) 5133 // cond: 5134 // result: (Sub8 y x) 5135 for { 5136 v_0 := v.Args[0] 5137 if v_0.Op != OpSub8 { 5138 break 5139 } 5140 x := v_0.Args[0] 5141 y := v_0.Args[1] 5142 v.reset(OpSub8) 5143 v.AddArg(y) 5144 v.AddArg(x) 5145 return true 5146 } 5147 return false 5148 } 5149 func rewriteValuegeneric_OpNeq16(v *Value, config *Config) bool { 5150 b := v.Block 5151 _ = b 5152 // match: (Neq16 x x) 5153 // cond: 5154 // result: (ConstBool [0]) 5155 for { 5156 x := v.Args[0] 5157 if x != v.Args[1] { 5158 break 5159 } 5160 v.reset(OpConstBool) 5161 v.AuxInt = 0 5162 return true 5163 } 5164 // match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 5165 // cond: 5166 // result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x) 5167 for { 5168 v_0 := v.Args[0] 5169 if v_0.Op != OpConst16 { 5170 break 5171 } 5172 t := v_0.Type 5173 c := v_0.AuxInt 5174 v_1 := v.Args[1] 5175 if v_1.Op != OpAdd16 { 5176 break 5177 } 5178 v_1_0 := v_1.Args[0] 5179 if v_1_0.Op != OpConst16 { 5180 break 5181 } 5182 if v_1_0.Type != t { 5183 break 5184 } 5185 d := v_1_0.AuxInt 5186 x := v_1.Args[1] 5187 v.reset(OpNeq16) 5188 v0 := b.NewValue0(v.Line, OpConst16, t) 5189 v0.AuxInt = int64(int16(c - d)) 5190 v.AddArg(v0) 5191 v.AddArg(x) 5192 return true 5193 } 5194 // match: (Neq16 x (Const16 <t> [c])) 5195 // cond: x.Op != OpConst16 5196 // result: (Neq16 (Const16 <t> [c]) x) 5197 for { 5198 x := v.Args[0] 5199 v_1 := v.Args[1] 5200 if v_1.Op != OpConst16 { 5201 break 5202 } 5203 t := v_1.Type 5204 c := v_1.AuxInt 5205 if !(x.Op != OpConst16) { 5206 break 5207 } 5208 v.reset(OpNeq16) 5209 v0 := b.NewValue0(v.Line, OpConst16, t) 5210 v0.AuxInt = c 5211 v.AddArg(v0) 5212 v.AddArg(x) 5213 return true 5214 } 5215 // match: (Neq16 (Const16 [c]) (Const16 [d])) 5216 // cond: 5217 // result: (ConstBool [b2i(c != d)]) 5218 for { 5219 v_0 := v.Args[0] 5220 if v_0.Op != OpConst16 { 5221 break 5222 } 5223 c := v_0.AuxInt 5224 v_1 := v.Args[1] 5225 if v_1.Op != OpConst16 { 5226 break 5227 } 5228 d := v_1.AuxInt 5229 v.reset(OpConstBool) 5230 v.AuxInt = b2i(c != d) 5231 return true 5232 } 5233 return false 5234 } 5235 func rewriteValuegeneric_OpNeq32(v *Value, config *Config) bool { 5236 b := v.Block 5237 _ = b 5238 // match: (Neq32 x x) 5239 // cond: 5240 // result: (ConstBool [0]) 5241 for { 5242 x := v.Args[0] 5243 if x != v.Args[1] { 5244 break 5245 } 5246 v.reset(OpConstBool) 5247 v.AuxInt = 0 5248 return true 5249 } 5250 // match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 5251 // cond: 5252 // result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x) 5253 for { 5254 v_0 := v.Args[0] 5255 if v_0.Op != OpConst32 { 5256 break 5257 } 5258 t := v_0.Type 5259 c := v_0.AuxInt 5260 v_1 := v.Args[1] 5261 if v_1.Op != OpAdd32 { 5262 break 5263 } 5264 v_1_0 := v_1.Args[0] 5265 if v_1_0.Op != OpConst32 { 5266 break 5267 } 5268 if v_1_0.Type != t { 5269 break 5270 } 5271 d := v_1_0.AuxInt 5272 x := v_1.Args[1] 5273 v.reset(OpNeq32) 5274 v0 := b.NewValue0(v.Line, OpConst32, t) 5275 v0.AuxInt = int64(int32(c - d)) 5276 v.AddArg(v0) 5277 v.AddArg(x) 5278 return true 5279 } 5280 // match: (Neq32 x (Const32 <t> [c])) 5281 // cond: x.Op != OpConst32 5282 // result: (Neq32 (Const32 <t> [c]) x) 5283 for { 5284 x := v.Args[0] 5285 v_1 := v.Args[1] 5286 if v_1.Op != OpConst32 { 5287 break 5288 } 5289 t := v_1.Type 5290 c := v_1.AuxInt 5291 if !(x.Op != OpConst32) { 5292 break 5293 } 5294 v.reset(OpNeq32) 5295 v0 := b.NewValue0(v.Line, OpConst32, t) 5296 v0.AuxInt = c 5297 v.AddArg(v0) 5298 v.AddArg(x) 5299 return true 5300 } 5301 // match: (Neq32 (Const32 [c]) (Const32 [d])) 5302 // cond: 5303 // result: (ConstBool [b2i(c != d)]) 5304 for { 5305 v_0 := v.Args[0] 5306 if v_0.Op != OpConst32 { 5307 break 5308 } 5309 c := v_0.AuxInt 5310 v_1 := v.Args[1] 5311 if v_1.Op != OpConst32 { 5312 break 5313 } 5314 d := v_1.AuxInt 5315 v.reset(OpConstBool) 5316 v.AuxInt = b2i(c != d) 5317 return true 5318 } 5319 return false 5320 } 5321 func rewriteValuegeneric_OpNeq64(v *Value, config *Config) bool { 5322 b := v.Block 5323 _ = b 5324 // match: (Neq64 x x) 5325 // cond: 5326 // result: (ConstBool [0]) 5327 for { 5328 x := v.Args[0] 5329 if x != v.Args[1] { 5330 break 5331 } 5332 v.reset(OpConstBool) 5333 v.AuxInt = 0 5334 return true 5335 } 5336 // match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 5337 // cond: 5338 // result: (Neq64 (Const64 <t> [c-d]) x) 5339 for { 5340 v_0 := v.Args[0] 5341 if v_0.Op != OpConst64 { 5342 break 5343 } 5344 t := v_0.Type 5345 c := v_0.AuxInt 5346 v_1 := v.Args[1] 5347 if v_1.Op != OpAdd64 { 5348 break 5349 } 5350 v_1_0 := v_1.Args[0] 5351 if v_1_0.Op != OpConst64 { 5352 break 5353 } 5354 if v_1_0.Type != t { 5355 break 5356 } 5357 d := v_1_0.AuxInt 5358 x := v_1.Args[1] 5359 v.reset(OpNeq64) 5360 v0 := b.NewValue0(v.Line, OpConst64, t) 5361 v0.AuxInt = c - d 5362 v.AddArg(v0) 5363 v.AddArg(x) 5364 return true 5365 } 5366 // match: (Neq64 x (Const64 <t> [c])) 5367 // cond: x.Op != OpConst64 5368 // result: (Neq64 (Const64 <t> [c]) x) 5369 for { 5370 x := v.Args[0] 5371 v_1 := v.Args[1] 5372 if v_1.Op != OpConst64 { 5373 break 5374 } 5375 t := v_1.Type 5376 c := v_1.AuxInt 5377 if !(x.Op != OpConst64) { 5378 break 5379 } 5380 v.reset(OpNeq64) 5381 v0 := b.NewValue0(v.Line, OpConst64, t) 5382 v0.AuxInt = c 5383 v.AddArg(v0) 5384 v.AddArg(x) 5385 return true 5386 } 5387 // match: (Neq64 (Const64 [c]) (Const64 [d])) 5388 // cond: 5389 // result: (ConstBool [b2i(c != d)]) 5390 for { 5391 v_0 := v.Args[0] 5392 if v_0.Op != OpConst64 { 5393 break 5394 } 5395 c := v_0.AuxInt 5396 v_1 := v.Args[1] 5397 if v_1.Op != OpConst64 { 5398 break 5399 } 5400 d := v_1.AuxInt 5401 v.reset(OpConstBool) 5402 v.AuxInt = b2i(c != d) 5403 return true 5404 } 5405 return false 5406 } 5407 func rewriteValuegeneric_OpNeq8(v *Value, config *Config) bool { 5408 b := v.Block 5409 _ = b 5410 // match: (Neq8 x x) 5411 // cond: 5412 // result: (ConstBool [0]) 5413 for { 5414 x := v.Args[0] 5415 if x != v.Args[1] { 5416 break 5417 } 5418 v.reset(OpConstBool) 5419 v.AuxInt = 0 5420 return true 5421 } 5422 // match: (Neq8 (ConstBool [c]) (ConstBool [d])) 5423 // cond: 5424 // result: (ConstBool [b2i(c != d)]) 5425 for { 5426 v_0 := v.Args[0] 5427 if v_0.Op != OpConstBool { 5428 break 5429 } 5430 c := v_0.AuxInt 5431 v_1 := v.Args[1] 5432 if v_1.Op != OpConstBool { 5433 break 5434 } 5435 d := v_1.AuxInt 5436 v.reset(OpConstBool) 5437 v.AuxInt = b2i(c != d) 5438 return true 5439 } 5440 // match: (Neq8 (ConstBool [0]) x) 5441 // cond: 5442 // result: x 5443 for { 5444 v_0 := v.Args[0] 5445 if v_0.Op != OpConstBool { 5446 break 5447 } 5448 if v_0.AuxInt != 0 { 5449 break 5450 } 5451 x := v.Args[1] 5452 v.reset(OpCopy) 5453 v.Type = x.Type 5454 v.AddArg(x) 5455 return true 5456 } 5457 // match: (Neq8 (ConstBool [1]) x) 5458 // cond: 5459 // result: (Not x) 5460 for { 5461 v_0 := v.Args[0] 5462 if v_0.Op != OpConstBool { 5463 break 5464 } 5465 if v_0.AuxInt != 1 { 5466 break 5467 } 5468 x := v.Args[1] 5469 v.reset(OpNot) 5470 v.AddArg(x) 5471 return true 5472 } 5473 // match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 5474 // cond: 5475 // result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x) 5476 for { 5477 v_0 := v.Args[0] 5478 if v_0.Op != OpConst8 { 5479 break 5480 } 5481 t := v_0.Type 5482 c := v_0.AuxInt 5483 v_1 := v.Args[1] 5484 if v_1.Op != OpAdd8 { 5485 break 5486 } 5487 v_1_0 := v_1.Args[0] 5488 if v_1_0.Op != OpConst8 { 5489 break 5490 } 5491 if v_1_0.Type != t { 5492 break 5493 } 5494 d := v_1_0.AuxInt 5495 x := v_1.Args[1] 5496 v.reset(OpNeq8) 5497 v0 := b.NewValue0(v.Line, OpConst8, t) 5498 v0.AuxInt = int64(int8(c - d)) 5499 v.AddArg(v0) 5500 v.AddArg(x) 5501 return true 5502 } 5503 // match: (Neq8 x (Const8 <t> [c])) 5504 // cond: x.Op != OpConst8 5505 // result: (Neq8 (Const8 <t> [c]) x) 5506 for { 5507 x := v.Args[0] 5508 v_1 := v.Args[1] 5509 if v_1.Op != OpConst8 { 5510 break 5511 } 5512 t := v_1.Type 5513 c := v_1.AuxInt 5514 if !(x.Op != OpConst8) { 5515 break 5516 } 5517 v.reset(OpNeq8) 5518 v0 := b.NewValue0(v.Line, OpConst8, t) 5519 v0.AuxInt = c 5520 v.AddArg(v0) 5521 v.AddArg(x) 5522 return true 5523 } 5524 // match: (Neq8 x (ConstBool <t> [c])) 5525 // cond: x.Op != OpConstBool 5526 // result: (Neq8 (ConstBool <t> [c]) x) 5527 for { 5528 x := v.Args[0] 5529 v_1 := v.Args[1] 5530 if v_1.Op != OpConstBool { 5531 break 5532 } 5533 t := v_1.Type 5534 c := v_1.AuxInt 5535 if !(x.Op != OpConstBool) { 5536 break 5537 } 5538 v.reset(OpNeq8) 5539 v0 := b.NewValue0(v.Line, OpConstBool, t) 5540 v0.AuxInt = c 5541 v.AddArg(v0) 5542 v.AddArg(x) 5543 return true 5544 } 5545 // match: (Neq8 (Const8 [c]) (Const8 [d])) 5546 // cond: 5547 // result: (ConstBool [b2i(c != d)]) 5548 for { 5549 v_0 := v.Args[0] 5550 if v_0.Op != OpConst8 { 5551 break 5552 } 5553 c := v_0.AuxInt 5554 v_1 := v.Args[1] 5555 if v_1.Op != OpConst8 { 5556 break 5557 } 5558 d := v_1.AuxInt 5559 v.reset(OpConstBool) 5560 v.AuxInt = b2i(c != d) 5561 return true 5562 } 5563 return false 5564 } 5565 func rewriteValuegeneric_OpNeqInter(v *Value, config *Config) bool { 5566 b := v.Block 5567 _ = b 5568 // match: (NeqInter x y) 5569 // cond: 5570 // result: (NeqPtr (ITab x) (ITab y)) 5571 for { 5572 x := v.Args[0] 5573 y := v.Args[1] 5574 v.reset(OpNeqPtr) 5575 v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 5576 v0.AddArg(x) 5577 v.AddArg(v0) 5578 v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 5579 v1.AddArg(y) 5580 v.AddArg(v1) 5581 return true 5582 } 5583 return false 5584 } 5585 func rewriteValuegeneric_OpNeqPtr(v *Value, config *Config) bool { 5586 b := v.Block 5587 _ = b 5588 // match: (NeqPtr p (ConstNil)) 5589 // cond: 5590 // result: (IsNonNil p) 5591 for { 5592 p := v.Args[0] 5593 v_1 := v.Args[1] 5594 if v_1.Op != OpConstNil { 5595 break 5596 } 5597 v.reset(OpIsNonNil) 5598 v.AddArg(p) 5599 return true 5600 } 5601 // match: (NeqPtr (ConstNil) p) 5602 // cond: 5603 // result: (IsNonNil p) 5604 for { 5605 v_0 := v.Args[0] 5606 if v_0.Op != OpConstNil { 5607 break 5608 } 5609 p := v.Args[1] 5610 v.reset(OpIsNonNil) 5611 v.AddArg(p) 5612 return true 5613 } 5614 return false 5615 } 5616 func rewriteValuegeneric_OpNeqSlice(v *Value, config *Config) bool { 5617 b := v.Block 5618 _ = b 5619 // match: (NeqSlice x y) 5620 // cond: 5621 // result: (NeqPtr (SlicePtr x) (SlicePtr y)) 5622 for { 5623 x := v.Args[0] 5624 y := v.Args[1] 5625 v.reset(OpNeqPtr) 5626 v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 5627 v0.AddArg(x) 5628 v.AddArg(v0) 5629 v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 5630 v1.AddArg(y) 5631 v.AddArg(v1) 5632 return true 5633 } 5634 return false 5635 } 5636 func rewriteValuegeneric_OpOffPtr(v *Value, config *Config) bool { 5637 b := v.Block 5638 _ = b 5639 // match: (OffPtr (OffPtr p [b]) [a]) 5640 // cond: 5641 // result: (OffPtr p [a+b]) 5642 for { 5643 v_0 := v.Args[0] 5644 if v_0.Op != OpOffPtr { 5645 break 5646 } 5647 p := v_0.Args[0] 5648 b := v_0.AuxInt 5649 a := v.AuxInt 5650 v.reset(OpOffPtr) 5651 v.AddArg(p) 5652 v.AuxInt = a + b 5653 return true 5654 } 5655 // match: (OffPtr p [0]) 5656 // cond: v.Type.Compare(p.Type) == CMPeq 5657 // result: p 5658 for { 5659 p := v.Args[0] 5660 if v.AuxInt != 0 { 5661 break 5662 } 5663 if !(v.Type.Compare(p.Type) == CMPeq) { 5664 break 5665 } 5666 v.reset(OpCopy) 5667 v.Type = p.Type 5668 v.AddArg(p) 5669 return true 5670 } 5671 return false 5672 } 5673 func rewriteValuegeneric_OpOr16(v *Value, config *Config) bool { 5674 b := v.Block 5675 _ = b 5676 // match: (Or16 x (Const16 <t> [c])) 5677 // cond: x.Op != OpConst16 5678 // result: (Or16 (Const16 <t> [c]) x) 5679 for { 5680 x := v.Args[0] 5681 v_1 := v.Args[1] 5682 if v_1.Op != OpConst16 { 5683 break 5684 } 5685 t := v_1.Type 5686 c := v_1.AuxInt 5687 if !(x.Op != OpConst16) { 5688 break 5689 } 5690 v.reset(OpOr16) 5691 v0 := b.NewValue0(v.Line, OpConst16, t) 5692 v0.AuxInt = c 5693 v.AddArg(v0) 5694 v.AddArg(x) 5695 return true 5696 } 5697 // match: (Or16 x x) 5698 // cond: 5699 // result: x 5700 for { 5701 x := v.Args[0] 5702 if x != v.Args[1] { 5703 break 5704 } 5705 v.reset(OpCopy) 5706 v.Type = x.Type 5707 v.AddArg(x) 5708 return true 5709 } 5710 // match: (Or16 (Const16 [0]) x) 5711 // cond: 5712 // result: x 5713 for { 5714 v_0 := v.Args[0] 5715 if v_0.Op != OpConst16 { 5716 break 5717 } 5718 if v_0.AuxInt != 0 { 5719 break 5720 } 5721 x := v.Args[1] 5722 v.reset(OpCopy) 5723 v.Type = x.Type 5724 v.AddArg(x) 5725 return true 5726 } 5727 // match: (Or16 (Const16 [-1]) _) 5728 // cond: 5729 // result: (Const16 [-1]) 5730 for { 5731 v_0 := v.Args[0] 5732 if v_0.Op != OpConst16 { 5733 break 5734 } 5735 if v_0.AuxInt != -1 { 5736 break 5737 } 5738 v.reset(OpConst16) 5739 v.AuxInt = -1 5740 return true 5741 } 5742 return false 5743 } 5744 func rewriteValuegeneric_OpOr32(v *Value, config *Config) bool { 5745 b := v.Block 5746 _ = b 5747 // match: (Or32 x (Const32 <t> [c])) 5748 // cond: x.Op != OpConst32 5749 // result: (Or32 (Const32 <t> [c]) x) 5750 for { 5751 x := v.Args[0] 5752 v_1 := v.Args[1] 5753 if v_1.Op != OpConst32 { 5754 break 5755 } 5756 t := v_1.Type 5757 c := v_1.AuxInt 5758 if !(x.Op != OpConst32) { 5759 break 5760 } 5761 v.reset(OpOr32) 5762 v0 := b.NewValue0(v.Line, OpConst32, t) 5763 v0.AuxInt = c 5764 v.AddArg(v0) 5765 v.AddArg(x) 5766 return true 5767 } 5768 // match: (Or32 x x) 5769 // cond: 5770 // result: x 5771 for { 5772 x := v.Args[0] 5773 if x != v.Args[1] { 5774 break 5775 } 5776 v.reset(OpCopy) 5777 v.Type = x.Type 5778 v.AddArg(x) 5779 return true 5780 } 5781 // match: (Or32 (Const32 [0]) x) 5782 // cond: 5783 // result: x 5784 for { 5785 v_0 := v.Args[0] 5786 if v_0.Op != OpConst32 { 5787 break 5788 } 5789 if v_0.AuxInt != 0 { 5790 break 5791 } 5792 x := v.Args[1] 5793 v.reset(OpCopy) 5794 v.Type = x.Type 5795 v.AddArg(x) 5796 return true 5797 } 5798 // match: (Or32 (Const32 [-1]) _) 5799 // cond: 5800 // result: (Const32 [-1]) 5801 for { 5802 v_0 := v.Args[0] 5803 if v_0.Op != OpConst32 { 5804 break 5805 } 5806 if v_0.AuxInt != -1 { 5807 break 5808 } 5809 v.reset(OpConst32) 5810 v.AuxInt = -1 5811 return true 5812 } 5813 return false 5814 } 5815 func rewriteValuegeneric_OpOr64(v *Value, config *Config) bool { 5816 b := v.Block 5817 _ = b 5818 // match: (Or64 x (Const64 <t> [c])) 5819 // cond: x.Op != OpConst64 5820 // result: (Or64 (Const64 <t> [c]) x) 5821 for { 5822 x := v.Args[0] 5823 v_1 := v.Args[1] 5824 if v_1.Op != OpConst64 { 5825 break 5826 } 5827 t := v_1.Type 5828 c := v_1.AuxInt 5829 if !(x.Op != OpConst64) { 5830 break 5831 } 5832 v.reset(OpOr64) 5833 v0 := b.NewValue0(v.Line, OpConst64, t) 5834 v0.AuxInt = c 5835 v.AddArg(v0) 5836 v.AddArg(x) 5837 return true 5838 } 5839 // match: (Or64 x x) 5840 // cond: 5841 // result: x 5842 for { 5843 x := v.Args[0] 5844 if x != v.Args[1] { 5845 break 5846 } 5847 v.reset(OpCopy) 5848 v.Type = x.Type 5849 v.AddArg(x) 5850 return true 5851 } 5852 // match: (Or64 (Const64 [0]) x) 5853 // cond: 5854 // result: x 5855 for { 5856 v_0 := v.Args[0] 5857 if v_0.Op != OpConst64 { 5858 break 5859 } 5860 if v_0.AuxInt != 0 { 5861 break 5862 } 5863 x := v.Args[1] 5864 v.reset(OpCopy) 5865 v.Type = x.Type 5866 v.AddArg(x) 5867 return true 5868 } 5869 // match: (Or64 (Const64 [-1]) _) 5870 // cond: 5871 // result: (Const64 [-1]) 5872 for { 5873 v_0 := v.Args[0] 5874 if v_0.Op != OpConst64 { 5875 break 5876 } 5877 if v_0.AuxInt != -1 { 5878 break 5879 } 5880 v.reset(OpConst64) 5881 v.AuxInt = -1 5882 return true 5883 } 5884 return false 5885 } 5886 func rewriteValuegeneric_OpOr8(v *Value, config *Config) bool { 5887 b := v.Block 5888 _ = b 5889 // match: (Or8 x (Const8 <t> [c])) 5890 // cond: x.Op != OpConst8 5891 // result: (Or8 (Const8 <t> [c]) x) 5892 for { 5893 x := v.Args[0] 5894 v_1 := v.Args[1] 5895 if v_1.Op != OpConst8 { 5896 break 5897 } 5898 t := v_1.Type 5899 c := v_1.AuxInt 5900 if !(x.Op != OpConst8) { 5901 break 5902 } 5903 v.reset(OpOr8) 5904 v0 := b.NewValue0(v.Line, OpConst8, t) 5905 v0.AuxInt = c 5906 v.AddArg(v0) 5907 v.AddArg(x) 5908 return true 5909 } 5910 // match: (Or8 x x) 5911 // cond: 5912 // result: x 5913 for { 5914 x := v.Args[0] 5915 if x != v.Args[1] { 5916 break 5917 } 5918 v.reset(OpCopy) 5919 v.Type = x.Type 5920 v.AddArg(x) 5921 return true 5922 } 5923 // match: (Or8 (Const8 [0]) x) 5924 // cond: 5925 // result: x 5926 for { 5927 v_0 := v.Args[0] 5928 if v_0.Op != OpConst8 { 5929 break 5930 } 5931 if v_0.AuxInt != 0 { 5932 break 5933 } 5934 x := v.Args[1] 5935 v.reset(OpCopy) 5936 v.Type = x.Type 5937 v.AddArg(x) 5938 return true 5939 } 5940 // match: (Or8 (Const8 [-1]) _) 5941 // cond: 5942 // result: (Const8 [-1]) 5943 for { 5944 v_0 := v.Args[0] 5945 if v_0.Op != OpConst8 { 5946 break 5947 } 5948 if v_0.AuxInt != -1 { 5949 break 5950 } 5951 v.reset(OpConst8) 5952 v.AuxInt = -1 5953 return true 5954 } 5955 return false 5956 } 5957 func rewriteValuegeneric_OpPhi(v *Value, config *Config) bool { 5958 b := v.Block 5959 _ = b 5960 // match: (Phi (Const8 [c]) (Const8 [c])) 5961 // cond: 5962 // result: (Const8 [c]) 5963 for { 5964 v_0 := v.Args[0] 5965 if v_0.Op != OpConst8 { 5966 break 5967 } 5968 c := v_0.AuxInt 5969 v_1 := v.Args[1] 5970 if v_1.Op != OpConst8 { 5971 break 5972 } 5973 if v_1.AuxInt != c { 5974 break 5975 } 5976 if len(v.Args) != 2 { 5977 break 5978 } 5979 v.reset(OpConst8) 5980 v.AuxInt = c 5981 return true 5982 } 5983 // match: (Phi (Const16 [c]) (Const16 [c])) 5984 // cond: 5985 // result: (Const16 [c]) 5986 for { 5987 v_0 := v.Args[0] 5988 if v_0.Op != OpConst16 { 5989 break 5990 } 5991 c := v_0.AuxInt 5992 v_1 := v.Args[1] 5993 if v_1.Op != OpConst16 { 5994 break 5995 } 5996 if v_1.AuxInt != c { 5997 break 5998 } 5999 if len(v.Args) != 2 { 6000 break 6001 } 6002 v.reset(OpConst16) 6003 v.AuxInt = c 6004 return true 6005 } 6006 // match: (Phi (Const32 [c]) (Const32 [c])) 6007 // cond: 6008 // result: (Const32 [c]) 6009 for { 6010 v_0 := v.Args[0] 6011 if v_0.Op != OpConst32 { 6012 break 6013 } 6014 c := v_0.AuxInt 6015 v_1 := v.Args[1] 6016 if v_1.Op != OpConst32 { 6017 break 6018 } 6019 if v_1.AuxInt != c { 6020 break 6021 } 6022 if len(v.Args) != 2 { 6023 break 6024 } 6025 v.reset(OpConst32) 6026 v.AuxInt = c 6027 return true 6028 } 6029 // match: (Phi (Const64 [c]) (Const64 [c])) 6030 // cond: 6031 // result: (Const64 [c]) 6032 for { 6033 v_0 := v.Args[0] 6034 if v_0.Op != OpConst64 { 6035 break 6036 } 6037 c := v_0.AuxInt 6038 v_1 := v.Args[1] 6039 if v_1.Op != OpConst64 { 6040 break 6041 } 6042 if v_1.AuxInt != c { 6043 break 6044 } 6045 if len(v.Args) != 2 { 6046 break 6047 } 6048 v.reset(OpConst64) 6049 v.AuxInt = c 6050 return true 6051 } 6052 return false 6053 } 6054 func rewriteValuegeneric_OpPtrIndex(v *Value, config *Config) bool { 6055 b := v.Block 6056 _ = b 6057 // match: (PtrIndex <t> ptr idx) 6058 // cond: config.PtrSize == 4 6059 // result: (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.ElemType().Size()]))) 6060 for { 6061 t := v.Type 6062 ptr := v.Args[0] 6063 idx := v.Args[1] 6064 if !(config.PtrSize == 4) { 6065 break 6066 } 6067 v.reset(OpAddPtr) 6068 v.AddArg(ptr) 6069 v0 := b.NewValue0(v.Line, OpMul32, config.fe.TypeInt()) 6070 v0.AddArg(idx) 6071 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 6072 v1.AuxInt = t.ElemType().Size() 6073 v0.AddArg(v1) 6074 v.AddArg(v0) 6075 return true 6076 } 6077 // match: (PtrIndex <t> ptr idx) 6078 // cond: config.PtrSize == 8 6079 // result: (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.ElemType().Size()]))) 6080 for { 6081 t := v.Type 6082 ptr := v.Args[0] 6083 idx := v.Args[1] 6084 if !(config.PtrSize == 8) { 6085 break 6086 } 6087 v.reset(OpAddPtr) 6088 v.AddArg(ptr) 6089 v0 := b.NewValue0(v.Line, OpMul64, config.fe.TypeInt()) 6090 v0.AddArg(idx) 6091 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 6092 v1.AuxInt = t.ElemType().Size() 6093 v0.AddArg(v1) 6094 v.AddArg(v0) 6095 return true 6096 } 6097 return false 6098 } 6099 func rewriteValuegeneric_OpRsh16Ux16(v *Value, config *Config) bool { 6100 b := v.Block 6101 _ = b 6102 // match: (Rsh16Ux16 (Lsh16x16 (Rsh16Ux16 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3])) 6103 // cond: uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2) 6104 // result: (Rsh16Ux16 x (Const16 <config.fe.TypeUInt16()> [int64(int16(c1-c2+c3))])) 6105 for { 6106 v_0 := v.Args[0] 6107 if v_0.Op != OpLsh16x16 { 6108 break 6109 } 6110 v_0_0 := v_0.Args[0] 6111 if v_0_0.Op != OpRsh16Ux16 { 6112 break 6113 } 6114 x := v_0_0.Args[0] 6115 v_0_0_1 := v_0_0.Args[1] 6116 if v_0_0_1.Op != OpConst16 { 6117 break 6118 } 6119 c1 := v_0_0_1.AuxInt 6120 v_0_1 := v_0.Args[1] 6121 if v_0_1.Op != OpConst16 { 6122 break 6123 } 6124 c2 := v_0_1.AuxInt 6125 v_1 := v.Args[1] 6126 if v_1.Op != OpConst16 { 6127 break 6128 } 6129 c3 := v_1.AuxInt 6130 if !(uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2)) { 6131 break 6132 } 6133 v.reset(OpRsh16Ux16) 6134 v.AddArg(x) 6135 v0 := b.NewValue0(v.Line, OpConst16, config.fe.TypeUInt16()) 6136 v0.AuxInt = int64(int16(c1 - c2 + c3)) 6137 v.AddArg(v0) 6138 return true 6139 } 6140 // match: (Rsh16Ux16 <t> x (Const16 [c])) 6141 // cond: 6142 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))])) 6143 for { 6144 t := v.Type 6145 x := v.Args[0] 6146 v_1 := v.Args[1] 6147 if v_1.Op != OpConst16 { 6148 break 6149 } 6150 c := v_1.AuxInt 6151 v.reset(OpRsh16Ux64) 6152 v.AddArg(x) 6153 v0 := b.NewValue0(v.Line, OpConst64, t) 6154 v0.AuxInt = int64(uint16(c)) 6155 v.AddArg(v0) 6156 return true 6157 } 6158 return false 6159 } 6160 func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool { 6161 b := v.Block 6162 _ = b 6163 // match: (Rsh16Ux32 <t> x (Const32 [c])) 6164 // cond: 6165 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))])) 6166 for { 6167 t := v.Type 6168 x := v.Args[0] 6169 v_1 := v.Args[1] 6170 if v_1.Op != OpConst32 { 6171 break 6172 } 6173 c := v_1.AuxInt 6174 v.reset(OpRsh16Ux64) 6175 v.AddArg(x) 6176 v0 := b.NewValue0(v.Line, OpConst64, t) 6177 v0.AuxInt = int64(uint32(c)) 6178 v.AddArg(v0) 6179 return true 6180 } 6181 return false 6182 } 6183 func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { 6184 b := v.Block 6185 _ = b 6186 // match: (Rsh16Ux64 (Const16 [c]) (Const64 [d])) 6187 // cond: 6188 // result: (Const16 [int64(uint16(c) >> uint64(d))]) 6189 for { 6190 v_0 := v.Args[0] 6191 if v_0.Op != OpConst16 { 6192 break 6193 } 6194 c := v_0.AuxInt 6195 v_1 := v.Args[1] 6196 if v_1.Op != OpConst64 { 6197 break 6198 } 6199 d := v_1.AuxInt 6200 v.reset(OpConst16) 6201 v.AuxInt = int64(uint16(c) >> uint64(d)) 6202 return true 6203 } 6204 // match: (Rsh16Ux64 (Const16 [0]) _) 6205 // cond: 6206 // result: (Const16 [0]) 6207 for { 6208 v_0 := v.Args[0] 6209 if v_0.Op != OpConst16 { 6210 break 6211 } 6212 if v_0.AuxInt != 0 { 6213 break 6214 } 6215 v.reset(OpConst16) 6216 v.AuxInt = 0 6217 return true 6218 } 6219 // match: (Rsh16Ux64 x (Const64 [0])) 6220 // cond: 6221 // result: x 6222 for { 6223 x := v.Args[0] 6224 v_1 := v.Args[1] 6225 if v_1.Op != OpConst64 { 6226 break 6227 } 6228 if v_1.AuxInt != 0 { 6229 break 6230 } 6231 v.reset(OpCopy) 6232 v.Type = x.Type 6233 v.AddArg(x) 6234 return true 6235 } 6236 // match: (Rsh16Ux64 _ (Const64 [c])) 6237 // cond: uint64(c) >= 16 6238 // result: (Const16 [0]) 6239 for { 6240 v_1 := v.Args[1] 6241 if v_1.Op != OpConst64 { 6242 break 6243 } 6244 c := v_1.AuxInt 6245 if !(uint64(c) >= 16) { 6246 break 6247 } 6248 v.reset(OpConst16) 6249 v.AuxInt = 0 6250 return true 6251 } 6252 // match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) 6253 // cond: !uaddOvf(c,d) 6254 // result: (Rsh16Ux64 x (Const64 <t> [c+d])) 6255 for { 6256 t := v.Type 6257 v_0 := v.Args[0] 6258 if v_0.Op != OpRsh16Ux64 { 6259 break 6260 } 6261 x := v_0.Args[0] 6262 v_0_1 := v_0.Args[1] 6263 if v_0_1.Op != OpConst64 { 6264 break 6265 } 6266 c := v_0_1.AuxInt 6267 v_1 := v.Args[1] 6268 if v_1.Op != OpConst64 { 6269 break 6270 } 6271 d := v_1.AuxInt 6272 if !(!uaddOvf(c, d)) { 6273 break 6274 } 6275 v.reset(OpRsh16Ux64) 6276 v.AddArg(x) 6277 v0 := b.NewValue0(v.Line, OpConst64, t) 6278 v0.AuxInt = c + d 6279 v.AddArg(v0) 6280 return true 6281 } 6282 return false 6283 } 6284 func rewriteValuegeneric_OpRsh16Ux8(v *Value, config *Config) bool { 6285 b := v.Block 6286 _ = b 6287 // match: (Rsh16Ux8 <t> x (Const8 [c])) 6288 // cond: 6289 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))])) 6290 for { 6291 t := v.Type 6292 x := v.Args[0] 6293 v_1 := v.Args[1] 6294 if v_1.Op != OpConst8 { 6295 break 6296 } 6297 c := v_1.AuxInt 6298 v.reset(OpRsh16Ux64) 6299 v.AddArg(x) 6300 v0 := b.NewValue0(v.Line, OpConst64, t) 6301 v0.AuxInt = int64(uint8(c)) 6302 v.AddArg(v0) 6303 return true 6304 } 6305 return false 6306 } 6307 func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool { 6308 b := v.Block 6309 _ = b 6310 // match: (Rsh16x16 <t> x (Const16 [c])) 6311 // cond: 6312 // result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))])) 6313 for { 6314 t := v.Type 6315 x := v.Args[0] 6316 v_1 := v.Args[1] 6317 if v_1.Op != OpConst16 { 6318 break 6319 } 6320 c := v_1.AuxInt 6321 v.reset(OpRsh16x64) 6322 v.AddArg(x) 6323 v0 := b.NewValue0(v.Line, OpConst64, t) 6324 v0.AuxInt = int64(uint16(c)) 6325 v.AddArg(v0) 6326 return true 6327 } 6328 return false 6329 } 6330 func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool { 6331 b := v.Block 6332 _ = b 6333 // match: (Rsh16x32 <t> x (Const32 [c])) 6334 // cond: 6335 // result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))])) 6336 for { 6337 t := v.Type 6338 x := v.Args[0] 6339 v_1 := v.Args[1] 6340 if v_1.Op != OpConst32 { 6341 break 6342 } 6343 c := v_1.AuxInt 6344 v.reset(OpRsh16x64) 6345 v.AddArg(x) 6346 v0 := b.NewValue0(v.Line, OpConst64, t) 6347 v0.AuxInt = int64(uint32(c)) 6348 v.AddArg(v0) 6349 return true 6350 } 6351 return false 6352 } 6353 func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool { 6354 b := v.Block 6355 _ = b 6356 // match: (Rsh16x64 (Const16 [c]) (Const64 [d])) 6357 // cond: 6358 // result: (Const16 [int64(int16(c) >> uint64(d))]) 6359 for { 6360 v_0 := v.Args[0] 6361 if v_0.Op != OpConst16 { 6362 break 6363 } 6364 c := v_0.AuxInt 6365 v_1 := v.Args[1] 6366 if v_1.Op != OpConst64 { 6367 break 6368 } 6369 d := v_1.AuxInt 6370 v.reset(OpConst16) 6371 v.AuxInt = int64(int16(c) >> uint64(d)) 6372 return true 6373 } 6374 // match: (Rsh16x64 (Const16 [0]) _) 6375 // cond: 6376 // result: (Const16 [0]) 6377 for { 6378 v_0 := v.Args[0] 6379 if v_0.Op != OpConst16 { 6380 break 6381 } 6382 if v_0.AuxInt != 0 { 6383 break 6384 } 6385 v.reset(OpConst16) 6386 v.AuxInt = 0 6387 return true 6388 } 6389 // match: (Rsh16x64 x (Const64 [0])) 6390 // cond: 6391 // result: x 6392 for { 6393 x := v.Args[0] 6394 v_1 := v.Args[1] 6395 if v_1.Op != OpConst64 { 6396 break 6397 } 6398 if v_1.AuxInt != 0 { 6399 break 6400 } 6401 v.reset(OpCopy) 6402 v.Type = x.Type 6403 v.AddArg(x) 6404 return true 6405 } 6406 // match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d])) 6407 // cond: !uaddOvf(c,d) 6408 // result: (Rsh16x64 x (Const64 <t> [c+d])) 6409 for { 6410 t := v.Type 6411 v_0 := v.Args[0] 6412 if v_0.Op != OpRsh16x64 { 6413 break 6414 } 6415 x := v_0.Args[0] 6416 v_0_1 := v_0.Args[1] 6417 if v_0_1.Op != OpConst64 { 6418 break 6419 } 6420 c := v_0_1.AuxInt 6421 v_1 := v.Args[1] 6422 if v_1.Op != OpConst64 { 6423 break 6424 } 6425 d := v_1.AuxInt 6426 if !(!uaddOvf(c, d)) { 6427 break 6428 } 6429 v.reset(OpRsh16x64) 6430 v.AddArg(x) 6431 v0 := b.NewValue0(v.Line, OpConst64, t) 6432 v0.AuxInt = c + d 6433 v.AddArg(v0) 6434 return true 6435 } 6436 return false 6437 } 6438 func rewriteValuegeneric_OpRsh16x8(v *Value, config *Config) bool { 6439 b := v.Block 6440 _ = b 6441 // match: (Rsh16x8 <t> x (Const8 [c])) 6442 // cond: 6443 // result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))])) 6444 for { 6445 t := v.Type 6446 x := v.Args[0] 6447 v_1 := v.Args[1] 6448 if v_1.Op != OpConst8 { 6449 break 6450 } 6451 c := v_1.AuxInt 6452 v.reset(OpRsh16x64) 6453 v.AddArg(x) 6454 v0 := b.NewValue0(v.Line, OpConst64, t) 6455 v0.AuxInt = int64(uint8(c)) 6456 v.AddArg(v0) 6457 return true 6458 } 6459 return false 6460 } 6461 func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool { 6462 b := v.Block 6463 _ = b 6464 // match: (Rsh32Ux16 <t> x (Const16 [c])) 6465 // cond: 6466 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))])) 6467 for { 6468 t := v.Type 6469 x := v.Args[0] 6470 v_1 := v.Args[1] 6471 if v_1.Op != OpConst16 { 6472 break 6473 } 6474 c := v_1.AuxInt 6475 v.reset(OpRsh32Ux64) 6476 v.AddArg(x) 6477 v0 := b.NewValue0(v.Line, OpConst64, t) 6478 v0.AuxInt = int64(uint16(c)) 6479 v.AddArg(v0) 6480 return true 6481 } 6482 return false 6483 } 6484 func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool { 6485 b := v.Block 6486 _ = b 6487 // match: (Rsh32Ux32 (Lsh32x32 (Rsh32Ux32 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3])) 6488 // cond: uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2) 6489 // result: (Rsh32Ux32 x (Const32 <config.fe.TypeUInt32()> [int64(int32(c1-c2+c3))])) 6490 for { 6491 v_0 := v.Args[0] 6492 if v_0.Op != OpLsh32x32 { 6493 break 6494 } 6495 v_0_0 := v_0.Args[0] 6496 if v_0_0.Op != OpRsh32Ux32 { 6497 break 6498 } 6499 x := v_0_0.Args[0] 6500 v_0_0_1 := v_0_0.Args[1] 6501 if v_0_0_1.Op != OpConst32 { 6502 break 6503 } 6504 c1 := v_0_0_1.AuxInt 6505 v_0_1 := v_0.Args[1] 6506 if v_0_1.Op != OpConst32 { 6507 break 6508 } 6509 c2 := v_0_1.AuxInt 6510 v_1 := v.Args[1] 6511 if v_1.Op != OpConst32 { 6512 break 6513 } 6514 c3 := v_1.AuxInt 6515 if !(uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2)) { 6516 break 6517 } 6518 v.reset(OpRsh32Ux32) 6519 v.AddArg(x) 6520 v0 := b.NewValue0(v.Line, OpConst32, config.fe.TypeUInt32()) 6521 v0.AuxInt = int64(int32(c1 - c2 + c3)) 6522 v.AddArg(v0) 6523 return true 6524 } 6525 // match: (Rsh32Ux32 <t> x (Const32 [c])) 6526 // cond: 6527 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))])) 6528 for { 6529 t := v.Type 6530 x := v.Args[0] 6531 v_1 := v.Args[1] 6532 if v_1.Op != OpConst32 { 6533 break 6534 } 6535 c := v_1.AuxInt 6536 v.reset(OpRsh32Ux64) 6537 v.AddArg(x) 6538 v0 := b.NewValue0(v.Line, OpConst64, t) 6539 v0.AuxInt = int64(uint32(c)) 6540 v.AddArg(v0) 6541 return true 6542 } 6543 return false 6544 } 6545 func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { 6546 b := v.Block 6547 _ = b 6548 // match: (Rsh32Ux64 (Const32 [c]) (Const64 [d])) 6549 // cond: 6550 // result: (Const32 [int64(uint32(c) >> uint64(d))]) 6551 for { 6552 v_0 := v.Args[0] 6553 if v_0.Op != OpConst32 { 6554 break 6555 } 6556 c := v_0.AuxInt 6557 v_1 := v.Args[1] 6558 if v_1.Op != OpConst64 { 6559 break 6560 } 6561 d := v_1.AuxInt 6562 v.reset(OpConst32) 6563 v.AuxInt = int64(uint32(c) >> uint64(d)) 6564 return true 6565 } 6566 // match: (Rsh32Ux64 (Const32 [0]) _) 6567 // cond: 6568 // result: (Const32 [0]) 6569 for { 6570 v_0 := v.Args[0] 6571 if v_0.Op != OpConst32 { 6572 break 6573 } 6574 if v_0.AuxInt != 0 { 6575 break 6576 } 6577 v.reset(OpConst32) 6578 v.AuxInt = 0 6579 return true 6580 } 6581 // match: (Rsh32Ux64 x (Const64 [0])) 6582 // cond: 6583 // result: x 6584 for { 6585 x := v.Args[0] 6586 v_1 := v.Args[1] 6587 if v_1.Op != OpConst64 { 6588 break 6589 } 6590 if v_1.AuxInt != 0 { 6591 break 6592 } 6593 v.reset(OpCopy) 6594 v.Type = x.Type 6595 v.AddArg(x) 6596 return true 6597 } 6598 // match: (Rsh32Ux64 _ (Const64 [c])) 6599 // cond: uint64(c) >= 32 6600 // result: (Const32 [0]) 6601 for { 6602 v_1 := v.Args[1] 6603 if v_1.Op != OpConst64 { 6604 break 6605 } 6606 c := v_1.AuxInt 6607 if !(uint64(c) >= 32) { 6608 break 6609 } 6610 v.reset(OpConst32) 6611 v.AuxInt = 0 6612 return true 6613 } 6614 // match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) 6615 // cond: !uaddOvf(c,d) 6616 // result: (Rsh32Ux64 x (Const64 <t> [c+d])) 6617 for { 6618 t := v.Type 6619 v_0 := v.Args[0] 6620 if v_0.Op != OpRsh32Ux64 { 6621 break 6622 } 6623 x := v_0.Args[0] 6624 v_0_1 := v_0.Args[1] 6625 if v_0_1.Op != OpConst64 { 6626 break 6627 } 6628 c := v_0_1.AuxInt 6629 v_1 := v.Args[1] 6630 if v_1.Op != OpConst64 { 6631 break 6632 } 6633 d := v_1.AuxInt 6634 if !(!uaddOvf(c, d)) { 6635 break 6636 } 6637 v.reset(OpRsh32Ux64) 6638 v.AddArg(x) 6639 v0 := b.NewValue0(v.Line, OpConst64, t) 6640 v0.AuxInt = c + d 6641 v.AddArg(v0) 6642 return true 6643 } 6644 return false 6645 } 6646 func rewriteValuegeneric_OpRsh32Ux8(v *Value, config *Config) bool { 6647 b := v.Block 6648 _ = b 6649 // match: (Rsh32Ux8 <t> x (Const8 [c])) 6650 // cond: 6651 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))])) 6652 for { 6653 t := v.Type 6654 x := v.Args[0] 6655 v_1 := v.Args[1] 6656 if v_1.Op != OpConst8 { 6657 break 6658 } 6659 c := v_1.AuxInt 6660 v.reset(OpRsh32Ux64) 6661 v.AddArg(x) 6662 v0 := b.NewValue0(v.Line, OpConst64, t) 6663 v0.AuxInt = int64(uint8(c)) 6664 v.AddArg(v0) 6665 return true 6666 } 6667 return false 6668 } 6669 func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool { 6670 b := v.Block 6671 _ = b 6672 // match: (Rsh32x16 <t> x (Const16 [c])) 6673 // cond: 6674 // result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))])) 6675 for { 6676 t := v.Type 6677 x := v.Args[0] 6678 v_1 := v.Args[1] 6679 if v_1.Op != OpConst16 { 6680 break 6681 } 6682 c := v_1.AuxInt 6683 v.reset(OpRsh32x64) 6684 v.AddArg(x) 6685 v0 := b.NewValue0(v.Line, OpConst64, t) 6686 v0.AuxInt = int64(uint16(c)) 6687 v.AddArg(v0) 6688 return true 6689 } 6690 return false 6691 } 6692 func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool { 6693 b := v.Block 6694 _ = b 6695 // match: (Rsh32x32 <t> x (Const32 [c])) 6696 // cond: 6697 // result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))])) 6698 for { 6699 t := v.Type 6700 x := v.Args[0] 6701 v_1 := v.Args[1] 6702 if v_1.Op != OpConst32 { 6703 break 6704 } 6705 c := v_1.AuxInt 6706 v.reset(OpRsh32x64) 6707 v.AddArg(x) 6708 v0 := b.NewValue0(v.Line, OpConst64, t) 6709 v0.AuxInt = int64(uint32(c)) 6710 v.AddArg(v0) 6711 return true 6712 } 6713 return false 6714 } 6715 func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool { 6716 b := v.Block 6717 _ = b 6718 // match: (Rsh32x64 (Const32 [c]) (Const64 [d])) 6719 // cond: 6720 // result: (Const32 [int64(int32(c) >> uint64(d))]) 6721 for { 6722 v_0 := v.Args[0] 6723 if v_0.Op != OpConst32 { 6724 break 6725 } 6726 c := v_0.AuxInt 6727 v_1 := v.Args[1] 6728 if v_1.Op != OpConst64 { 6729 break 6730 } 6731 d := v_1.AuxInt 6732 v.reset(OpConst32) 6733 v.AuxInt = int64(int32(c) >> uint64(d)) 6734 return true 6735 } 6736 // match: (Rsh32x64 (Const32 [0]) _) 6737 // cond: 6738 // result: (Const32 [0]) 6739 for { 6740 v_0 := v.Args[0] 6741 if v_0.Op != OpConst32 { 6742 break 6743 } 6744 if v_0.AuxInt != 0 { 6745 break 6746 } 6747 v.reset(OpConst32) 6748 v.AuxInt = 0 6749 return true 6750 } 6751 // match: (Rsh32x64 x (Const64 [0])) 6752 // cond: 6753 // result: x 6754 for { 6755 x := v.Args[0] 6756 v_1 := v.Args[1] 6757 if v_1.Op != OpConst64 { 6758 break 6759 } 6760 if v_1.AuxInt != 0 { 6761 break 6762 } 6763 v.reset(OpCopy) 6764 v.Type = x.Type 6765 v.AddArg(x) 6766 return true 6767 } 6768 // match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d])) 6769 // cond: !uaddOvf(c,d) 6770 // result: (Rsh32x64 x (Const64 <t> [c+d])) 6771 for { 6772 t := v.Type 6773 v_0 := v.Args[0] 6774 if v_0.Op != OpRsh32x64 { 6775 break 6776 } 6777 x := v_0.Args[0] 6778 v_0_1 := v_0.Args[1] 6779 if v_0_1.Op != OpConst64 { 6780 break 6781 } 6782 c := v_0_1.AuxInt 6783 v_1 := v.Args[1] 6784 if v_1.Op != OpConst64 { 6785 break 6786 } 6787 d := v_1.AuxInt 6788 if !(!uaddOvf(c, d)) { 6789 break 6790 } 6791 v.reset(OpRsh32x64) 6792 v.AddArg(x) 6793 v0 := b.NewValue0(v.Line, OpConst64, t) 6794 v0.AuxInt = c + d 6795 v.AddArg(v0) 6796 return true 6797 } 6798 return false 6799 } 6800 func rewriteValuegeneric_OpRsh32x8(v *Value, config *Config) bool { 6801 b := v.Block 6802 _ = b 6803 // match: (Rsh32x8 <t> x (Const8 [c])) 6804 // cond: 6805 // result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))])) 6806 for { 6807 t := v.Type 6808 x := v.Args[0] 6809 v_1 := v.Args[1] 6810 if v_1.Op != OpConst8 { 6811 break 6812 } 6813 c := v_1.AuxInt 6814 v.reset(OpRsh32x64) 6815 v.AddArg(x) 6816 v0 := b.NewValue0(v.Line, OpConst64, t) 6817 v0.AuxInt = int64(uint8(c)) 6818 v.AddArg(v0) 6819 return true 6820 } 6821 return false 6822 } 6823 func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool { 6824 b := v.Block 6825 _ = b 6826 // match: (Rsh64Ux16 <t> x (Const16 [c])) 6827 // cond: 6828 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))])) 6829 for { 6830 t := v.Type 6831 x := v.Args[0] 6832 v_1 := v.Args[1] 6833 if v_1.Op != OpConst16 { 6834 break 6835 } 6836 c := v_1.AuxInt 6837 v.reset(OpRsh64Ux64) 6838 v.AddArg(x) 6839 v0 := b.NewValue0(v.Line, OpConst64, t) 6840 v0.AuxInt = int64(uint16(c)) 6841 v.AddArg(v0) 6842 return true 6843 } 6844 // match: (Rsh64Ux16 (Const64 [0]) _) 6845 // cond: 6846 // result: (Const64 [0]) 6847 for { 6848 v_0 := v.Args[0] 6849 if v_0.Op != OpConst64 { 6850 break 6851 } 6852 if v_0.AuxInt != 0 { 6853 break 6854 } 6855 v.reset(OpConst64) 6856 v.AuxInt = 0 6857 return true 6858 } 6859 return false 6860 } 6861 func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool { 6862 b := v.Block 6863 _ = b 6864 // match: (Rsh64Ux32 <t> x (Const32 [c])) 6865 // cond: 6866 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))])) 6867 for { 6868 t := v.Type 6869 x := v.Args[0] 6870 v_1 := v.Args[1] 6871 if v_1.Op != OpConst32 { 6872 break 6873 } 6874 c := v_1.AuxInt 6875 v.reset(OpRsh64Ux64) 6876 v.AddArg(x) 6877 v0 := b.NewValue0(v.Line, OpConst64, t) 6878 v0.AuxInt = int64(uint32(c)) 6879 v.AddArg(v0) 6880 return true 6881 } 6882 // match: (Rsh64Ux32 (Const64 [0]) _) 6883 // cond: 6884 // result: (Const64 [0]) 6885 for { 6886 v_0 := v.Args[0] 6887 if v_0.Op != OpConst64 { 6888 break 6889 } 6890 if v_0.AuxInt != 0 { 6891 break 6892 } 6893 v.reset(OpConst64) 6894 v.AuxInt = 0 6895 return true 6896 } 6897 return false 6898 } 6899 func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool { 6900 b := v.Block 6901 _ = b 6902 // match: (Rsh64Ux64 (Const64 [c]) (Const64 [d])) 6903 // cond: 6904 // result: (Const64 [int64(uint64(c) >> uint64(d))]) 6905 for { 6906 v_0 := v.Args[0] 6907 if v_0.Op != OpConst64 { 6908 break 6909 } 6910 c := v_0.AuxInt 6911 v_1 := v.Args[1] 6912 if v_1.Op != OpConst64 { 6913 break 6914 } 6915 d := v_1.AuxInt 6916 v.reset(OpConst64) 6917 v.AuxInt = int64(uint64(c) >> uint64(d)) 6918 return true 6919 } 6920 // match: (Rsh64Ux64 (Const64 [0]) _) 6921 // cond: 6922 // result: (Const64 [0]) 6923 for { 6924 v_0 := v.Args[0] 6925 if v_0.Op != OpConst64 { 6926 break 6927 } 6928 if v_0.AuxInt != 0 { 6929 break 6930 } 6931 v.reset(OpConst64) 6932 v.AuxInt = 0 6933 return true 6934 } 6935 // match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 6936 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) 6937 // result: (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 6938 for { 6939 v_0 := v.Args[0] 6940 if v_0.Op != OpLsh64x64 { 6941 break 6942 } 6943 v_0_0 := v_0.Args[0] 6944 if v_0_0.Op != OpRsh64Ux64 { 6945 break 6946 } 6947 x := v_0_0.Args[0] 6948 v_0_0_1 := v_0_0.Args[1] 6949 if v_0_0_1.Op != OpConst64 { 6950 break 6951 } 6952 c1 := v_0_0_1.AuxInt 6953 v_0_1 := v_0.Args[1] 6954 if v_0_1.Op != OpConst64 { 6955 break 6956 } 6957 c2 := v_0_1.AuxInt 6958 v_1 := v.Args[1] 6959 if v_1.Op != OpConst64 { 6960 break 6961 } 6962 c3 := v_1.AuxInt 6963 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2)) { 6964 break 6965 } 6966 v.reset(OpRsh64Ux64) 6967 v.AddArg(x) 6968 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 6969 v0.AuxInt = c1 - c2 + c3 6970 v.AddArg(v0) 6971 return true 6972 } 6973 // match: (Rsh64Ux64 x (Const64 [0])) 6974 // cond: 6975 // result: x 6976 for { 6977 x := v.Args[0] 6978 v_1 := v.Args[1] 6979 if v_1.Op != OpConst64 { 6980 break 6981 } 6982 if v_1.AuxInt != 0 { 6983 break 6984 } 6985 v.reset(OpCopy) 6986 v.Type = x.Type 6987 v.AddArg(x) 6988 return true 6989 } 6990 // match: (Rsh64Ux64 (Const64 [0]) _) 6991 // cond: 6992 // result: (Const64 [0]) 6993 for { 6994 v_0 := v.Args[0] 6995 if v_0.Op != OpConst64 { 6996 break 6997 } 6998 if v_0.AuxInt != 0 { 6999 break 7000 } 7001 v.reset(OpConst64) 7002 v.AuxInt = 0 7003 return true 7004 } 7005 // match: (Rsh64Ux64 _ (Const64 [c])) 7006 // cond: uint64(c) >= 64 7007 // result: (Const64 [0]) 7008 for { 7009 v_1 := v.Args[1] 7010 if v_1.Op != OpConst64 { 7011 break 7012 } 7013 c := v_1.AuxInt 7014 if !(uint64(c) >= 64) { 7015 break 7016 } 7017 v.reset(OpConst64) 7018 v.AuxInt = 0 7019 return true 7020 } 7021 // match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) 7022 // cond: !uaddOvf(c,d) 7023 // result: (Rsh64Ux64 x (Const64 <t> [c+d])) 7024 for { 7025 t := v.Type 7026 v_0 := v.Args[0] 7027 if v_0.Op != OpRsh64Ux64 { 7028 break 7029 } 7030 x := v_0.Args[0] 7031 v_0_1 := v_0.Args[1] 7032 if v_0_1.Op != OpConst64 { 7033 break 7034 } 7035 c := v_0_1.AuxInt 7036 v_1 := v.Args[1] 7037 if v_1.Op != OpConst64 { 7038 break 7039 } 7040 d := v_1.AuxInt 7041 if !(!uaddOvf(c, d)) { 7042 break 7043 } 7044 v.reset(OpRsh64Ux64) 7045 v.AddArg(x) 7046 v0 := b.NewValue0(v.Line, OpConst64, t) 7047 v0.AuxInt = c + d 7048 v.AddArg(v0) 7049 return true 7050 } 7051 return false 7052 } 7053 func rewriteValuegeneric_OpRsh64Ux8(v *Value, config *Config) bool { 7054 b := v.Block 7055 _ = b 7056 // match: (Rsh64Ux8 <t> x (Const8 [c])) 7057 // cond: 7058 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))])) 7059 for { 7060 t := v.Type 7061 x := v.Args[0] 7062 v_1 := v.Args[1] 7063 if v_1.Op != OpConst8 { 7064 break 7065 } 7066 c := v_1.AuxInt 7067 v.reset(OpRsh64Ux64) 7068 v.AddArg(x) 7069 v0 := b.NewValue0(v.Line, OpConst64, t) 7070 v0.AuxInt = int64(uint8(c)) 7071 v.AddArg(v0) 7072 return true 7073 } 7074 // match: (Rsh64Ux8 (Const64 [0]) _) 7075 // cond: 7076 // result: (Const64 [0]) 7077 for { 7078 v_0 := v.Args[0] 7079 if v_0.Op != OpConst64 { 7080 break 7081 } 7082 if v_0.AuxInt != 0 { 7083 break 7084 } 7085 v.reset(OpConst64) 7086 v.AuxInt = 0 7087 return true 7088 } 7089 return false 7090 } 7091 func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool { 7092 b := v.Block 7093 _ = b 7094 // match: (Rsh64x16 <t> x (Const16 [c])) 7095 // cond: 7096 // result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))])) 7097 for { 7098 t := v.Type 7099 x := v.Args[0] 7100 v_1 := v.Args[1] 7101 if v_1.Op != OpConst16 { 7102 break 7103 } 7104 c := v_1.AuxInt 7105 v.reset(OpRsh64x64) 7106 v.AddArg(x) 7107 v0 := b.NewValue0(v.Line, OpConst64, t) 7108 v0.AuxInt = int64(uint16(c)) 7109 v.AddArg(v0) 7110 return true 7111 } 7112 // match: (Rsh64x16 (Const64 [0]) _) 7113 // cond: 7114 // result: (Const64 [0]) 7115 for { 7116 v_0 := v.Args[0] 7117 if v_0.Op != OpConst64 { 7118 break 7119 } 7120 if v_0.AuxInt != 0 { 7121 break 7122 } 7123 v.reset(OpConst64) 7124 v.AuxInt = 0 7125 return true 7126 } 7127 return false 7128 } 7129 func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool { 7130 b := v.Block 7131 _ = b 7132 // match: (Rsh64x32 <t> x (Const32 [c])) 7133 // cond: 7134 // result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))])) 7135 for { 7136 t := v.Type 7137 x := v.Args[0] 7138 v_1 := v.Args[1] 7139 if v_1.Op != OpConst32 { 7140 break 7141 } 7142 c := v_1.AuxInt 7143 v.reset(OpRsh64x64) 7144 v.AddArg(x) 7145 v0 := b.NewValue0(v.Line, OpConst64, t) 7146 v0.AuxInt = int64(uint32(c)) 7147 v.AddArg(v0) 7148 return true 7149 } 7150 // match: (Rsh64x32 (Const64 [0]) _) 7151 // cond: 7152 // result: (Const64 [0]) 7153 for { 7154 v_0 := v.Args[0] 7155 if v_0.Op != OpConst64 { 7156 break 7157 } 7158 if v_0.AuxInt != 0 { 7159 break 7160 } 7161 v.reset(OpConst64) 7162 v.AuxInt = 0 7163 return true 7164 } 7165 return false 7166 } 7167 func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool { 7168 b := v.Block 7169 _ = b 7170 // match: (Rsh64x64 (Const64 [c]) (Const64 [d])) 7171 // cond: 7172 // result: (Const64 [c >> uint64(d)]) 7173 for { 7174 v_0 := v.Args[0] 7175 if v_0.Op != OpConst64 { 7176 break 7177 } 7178 c := v_0.AuxInt 7179 v_1 := v.Args[1] 7180 if v_1.Op != OpConst64 { 7181 break 7182 } 7183 d := v_1.AuxInt 7184 v.reset(OpConst64) 7185 v.AuxInt = c >> uint64(d) 7186 return true 7187 } 7188 // match: (Rsh64x64 (Const64 [0]) _) 7189 // cond: 7190 // result: (Const64 [0]) 7191 for { 7192 v_0 := v.Args[0] 7193 if v_0.Op != OpConst64 { 7194 break 7195 } 7196 if v_0.AuxInt != 0 { 7197 break 7198 } 7199 v.reset(OpConst64) 7200 v.AuxInt = 0 7201 return true 7202 } 7203 // match: (Rsh64x64 x (Const64 [0])) 7204 // cond: 7205 // result: x 7206 for { 7207 x := v.Args[0] 7208 v_1 := v.Args[1] 7209 if v_1.Op != OpConst64 { 7210 break 7211 } 7212 if v_1.AuxInt != 0 { 7213 break 7214 } 7215 v.reset(OpCopy) 7216 v.Type = x.Type 7217 v.AddArg(x) 7218 return true 7219 } 7220 // match: (Rsh64x64 (Const64 [0]) _) 7221 // cond: 7222 // result: (Const64 [0]) 7223 for { 7224 v_0 := v.Args[0] 7225 if v_0.Op != OpConst64 { 7226 break 7227 } 7228 if v_0.AuxInt != 0 { 7229 break 7230 } 7231 v.reset(OpConst64) 7232 v.AuxInt = 0 7233 return true 7234 } 7235 // match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d])) 7236 // cond: !uaddOvf(c,d) 7237 // result: (Rsh64x64 x (Const64 <t> [c+d])) 7238 for { 7239 t := v.Type 7240 v_0 := v.Args[0] 7241 if v_0.Op != OpRsh64x64 { 7242 break 7243 } 7244 x := v_0.Args[0] 7245 v_0_1 := v_0.Args[1] 7246 if v_0_1.Op != OpConst64 { 7247 break 7248 } 7249 c := v_0_1.AuxInt 7250 v_1 := v.Args[1] 7251 if v_1.Op != OpConst64 { 7252 break 7253 } 7254 d := v_1.AuxInt 7255 if !(!uaddOvf(c, d)) { 7256 break 7257 } 7258 v.reset(OpRsh64x64) 7259 v.AddArg(x) 7260 v0 := b.NewValue0(v.Line, OpConst64, t) 7261 v0.AuxInt = c + d 7262 v.AddArg(v0) 7263 return true 7264 } 7265 return false 7266 } 7267 func rewriteValuegeneric_OpRsh64x8(v *Value, config *Config) bool { 7268 b := v.Block 7269 _ = b 7270 // match: (Rsh64x8 <t> x (Const8 [c])) 7271 // cond: 7272 // result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))])) 7273 for { 7274 t := v.Type 7275 x := v.Args[0] 7276 v_1 := v.Args[1] 7277 if v_1.Op != OpConst8 { 7278 break 7279 } 7280 c := v_1.AuxInt 7281 v.reset(OpRsh64x64) 7282 v.AddArg(x) 7283 v0 := b.NewValue0(v.Line, OpConst64, t) 7284 v0.AuxInt = int64(uint8(c)) 7285 v.AddArg(v0) 7286 return true 7287 } 7288 // match: (Rsh64x8 (Const64 [0]) _) 7289 // cond: 7290 // result: (Const64 [0]) 7291 for { 7292 v_0 := v.Args[0] 7293 if v_0.Op != OpConst64 { 7294 break 7295 } 7296 if v_0.AuxInt != 0 { 7297 break 7298 } 7299 v.reset(OpConst64) 7300 v.AuxInt = 0 7301 return true 7302 } 7303 return false 7304 } 7305 func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool { 7306 b := v.Block 7307 _ = b 7308 // match: (Rsh8Ux16 <t> x (Const16 [c])) 7309 // cond: 7310 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))])) 7311 for { 7312 t := v.Type 7313 x := v.Args[0] 7314 v_1 := v.Args[1] 7315 if v_1.Op != OpConst16 { 7316 break 7317 } 7318 c := v_1.AuxInt 7319 v.reset(OpRsh8Ux64) 7320 v.AddArg(x) 7321 v0 := b.NewValue0(v.Line, OpConst64, t) 7322 v0.AuxInt = int64(uint16(c)) 7323 v.AddArg(v0) 7324 return true 7325 } 7326 return false 7327 } 7328 func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool { 7329 b := v.Block 7330 _ = b 7331 // match: (Rsh8Ux32 <t> x (Const32 [c])) 7332 // cond: 7333 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))])) 7334 for { 7335 t := v.Type 7336 x := v.Args[0] 7337 v_1 := v.Args[1] 7338 if v_1.Op != OpConst32 { 7339 break 7340 } 7341 c := v_1.AuxInt 7342 v.reset(OpRsh8Ux64) 7343 v.AddArg(x) 7344 v0 := b.NewValue0(v.Line, OpConst64, t) 7345 v0.AuxInt = int64(uint32(c)) 7346 v.AddArg(v0) 7347 return true 7348 } 7349 return false 7350 } 7351 func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { 7352 b := v.Block 7353 _ = b 7354 // match: (Rsh8Ux64 (Const8 [c]) (Const64 [d])) 7355 // cond: 7356 // result: (Const8 [int64(uint8(c) >> uint64(d))]) 7357 for { 7358 v_0 := v.Args[0] 7359 if v_0.Op != OpConst8 { 7360 break 7361 } 7362 c := v_0.AuxInt 7363 v_1 := v.Args[1] 7364 if v_1.Op != OpConst64 { 7365 break 7366 } 7367 d := v_1.AuxInt 7368 v.reset(OpConst8) 7369 v.AuxInt = int64(uint8(c) >> uint64(d)) 7370 return true 7371 } 7372 // match: (Rsh8Ux64 (Const8 [0]) _) 7373 // cond: 7374 // result: (Const8 [0]) 7375 for { 7376 v_0 := v.Args[0] 7377 if v_0.Op != OpConst8 { 7378 break 7379 } 7380 if v_0.AuxInt != 0 { 7381 break 7382 } 7383 v.reset(OpConst8) 7384 v.AuxInt = 0 7385 return true 7386 } 7387 // match: (Rsh8Ux64 x (Const64 [0])) 7388 // cond: 7389 // result: x 7390 for { 7391 x := v.Args[0] 7392 v_1 := v.Args[1] 7393 if v_1.Op != OpConst64 { 7394 break 7395 } 7396 if v_1.AuxInt != 0 { 7397 break 7398 } 7399 v.reset(OpCopy) 7400 v.Type = x.Type 7401 v.AddArg(x) 7402 return true 7403 } 7404 // match: (Rsh8Ux64 _ (Const64 [c])) 7405 // cond: uint64(c) >= 8 7406 // result: (Const8 [0]) 7407 for { 7408 v_1 := v.Args[1] 7409 if v_1.Op != OpConst64 { 7410 break 7411 } 7412 c := v_1.AuxInt 7413 if !(uint64(c) >= 8) { 7414 break 7415 } 7416 v.reset(OpConst8) 7417 v.AuxInt = 0 7418 return true 7419 } 7420 // match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) 7421 // cond: !uaddOvf(c,d) 7422 // result: (Rsh8Ux64 x (Const64 <t> [c+d])) 7423 for { 7424 t := v.Type 7425 v_0 := v.Args[0] 7426 if v_0.Op != OpRsh8Ux64 { 7427 break 7428 } 7429 x := v_0.Args[0] 7430 v_0_1 := v_0.Args[1] 7431 if v_0_1.Op != OpConst64 { 7432 break 7433 } 7434 c := v_0_1.AuxInt 7435 v_1 := v.Args[1] 7436 if v_1.Op != OpConst64 { 7437 break 7438 } 7439 d := v_1.AuxInt 7440 if !(!uaddOvf(c, d)) { 7441 break 7442 } 7443 v.reset(OpRsh8Ux64) 7444 v.AddArg(x) 7445 v0 := b.NewValue0(v.Line, OpConst64, t) 7446 v0.AuxInt = c + d 7447 v.AddArg(v0) 7448 return true 7449 } 7450 return false 7451 } 7452 func rewriteValuegeneric_OpRsh8Ux8(v *Value, config *Config) bool { 7453 b := v.Block 7454 _ = b 7455 // match: (Rsh8Ux8 (Lsh8x8 (Rsh8Ux8 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3])) 7456 // cond: uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2) 7457 // result: (Rsh8Ux8 x (Const8 <config.fe.TypeUInt8()> [int64(int8(c1-c2+c3))])) 7458 for { 7459 v_0 := v.Args[0] 7460 if v_0.Op != OpLsh8x8 { 7461 break 7462 } 7463 v_0_0 := v_0.Args[0] 7464 if v_0_0.Op != OpRsh8Ux8 { 7465 break 7466 } 7467 x := v_0_0.Args[0] 7468 v_0_0_1 := v_0_0.Args[1] 7469 if v_0_0_1.Op != OpConst8 { 7470 break 7471 } 7472 c1 := v_0_0_1.AuxInt 7473 v_0_1 := v_0.Args[1] 7474 if v_0_1.Op != OpConst8 { 7475 break 7476 } 7477 c2 := v_0_1.AuxInt 7478 v_1 := v.Args[1] 7479 if v_1.Op != OpConst8 { 7480 break 7481 } 7482 c3 := v_1.AuxInt 7483 if !(uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2)) { 7484 break 7485 } 7486 v.reset(OpRsh8Ux8) 7487 v.AddArg(x) 7488 v0 := b.NewValue0(v.Line, OpConst8, config.fe.TypeUInt8()) 7489 v0.AuxInt = int64(int8(c1 - c2 + c3)) 7490 v.AddArg(v0) 7491 return true 7492 } 7493 // match: (Rsh8Ux8 <t> x (Const8 [c])) 7494 // cond: 7495 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))])) 7496 for { 7497 t := v.Type 7498 x := v.Args[0] 7499 v_1 := v.Args[1] 7500 if v_1.Op != OpConst8 { 7501 break 7502 } 7503 c := v_1.AuxInt 7504 v.reset(OpRsh8Ux64) 7505 v.AddArg(x) 7506 v0 := b.NewValue0(v.Line, OpConst64, t) 7507 v0.AuxInt = int64(uint8(c)) 7508 v.AddArg(v0) 7509 return true 7510 } 7511 return false 7512 } 7513 func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool { 7514 b := v.Block 7515 _ = b 7516 // match: (Rsh8x16 <t> x (Const16 [c])) 7517 // cond: 7518 // result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))])) 7519 for { 7520 t := v.Type 7521 x := v.Args[0] 7522 v_1 := v.Args[1] 7523 if v_1.Op != OpConst16 { 7524 break 7525 } 7526 c := v_1.AuxInt 7527 v.reset(OpRsh8x64) 7528 v.AddArg(x) 7529 v0 := b.NewValue0(v.Line, OpConst64, t) 7530 v0.AuxInt = int64(uint16(c)) 7531 v.AddArg(v0) 7532 return true 7533 } 7534 return false 7535 } 7536 func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool { 7537 b := v.Block 7538 _ = b 7539 // match: (Rsh8x32 <t> x (Const32 [c])) 7540 // cond: 7541 // result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))])) 7542 for { 7543 t := v.Type 7544 x := v.Args[0] 7545 v_1 := v.Args[1] 7546 if v_1.Op != OpConst32 { 7547 break 7548 } 7549 c := v_1.AuxInt 7550 v.reset(OpRsh8x64) 7551 v.AddArg(x) 7552 v0 := b.NewValue0(v.Line, OpConst64, t) 7553 v0.AuxInt = int64(uint32(c)) 7554 v.AddArg(v0) 7555 return true 7556 } 7557 return false 7558 } 7559 func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool { 7560 b := v.Block 7561 _ = b 7562 // match: (Rsh8x64 (Const8 [c]) (Const64 [d])) 7563 // cond: 7564 // result: (Const8 [int64(int8(c) >> uint64(d))]) 7565 for { 7566 v_0 := v.Args[0] 7567 if v_0.Op != OpConst8 { 7568 break 7569 } 7570 c := v_0.AuxInt 7571 v_1 := v.Args[1] 7572 if v_1.Op != OpConst64 { 7573 break 7574 } 7575 d := v_1.AuxInt 7576 v.reset(OpConst8) 7577 v.AuxInt = int64(int8(c) >> uint64(d)) 7578 return true 7579 } 7580 // match: (Rsh8x64 (Const8 [0]) _) 7581 // cond: 7582 // result: (Const8 [0]) 7583 for { 7584 v_0 := v.Args[0] 7585 if v_0.Op != OpConst8 { 7586 break 7587 } 7588 if v_0.AuxInt != 0 { 7589 break 7590 } 7591 v.reset(OpConst8) 7592 v.AuxInt = 0 7593 return true 7594 } 7595 // match: (Rsh8x64 x (Const64 [0])) 7596 // cond: 7597 // result: x 7598 for { 7599 x := v.Args[0] 7600 v_1 := v.Args[1] 7601 if v_1.Op != OpConst64 { 7602 break 7603 } 7604 if v_1.AuxInt != 0 { 7605 break 7606 } 7607 v.reset(OpCopy) 7608 v.Type = x.Type 7609 v.AddArg(x) 7610 return true 7611 } 7612 // match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d])) 7613 // cond: !uaddOvf(c,d) 7614 // result: (Rsh8x64 x (Const64 <t> [c+d])) 7615 for { 7616 t := v.Type 7617 v_0 := v.Args[0] 7618 if v_0.Op != OpRsh8x64 { 7619 break 7620 } 7621 x := v_0.Args[0] 7622 v_0_1 := v_0.Args[1] 7623 if v_0_1.Op != OpConst64 { 7624 break 7625 } 7626 c := v_0_1.AuxInt 7627 v_1 := v.Args[1] 7628 if v_1.Op != OpConst64 { 7629 break 7630 } 7631 d := v_1.AuxInt 7632 if !(!uaddOvf(c, d)) { 7633 break 7634 } 7635 v.reset(OpRsh8x64) 7636 v.AddArg(x) 7637 v0 := b.NewValue0(v.Line, OpConst64, t) 7638 v0.AuxInt = c + d 7639 v.AddArg(v0) 7640 return true 7641 } 7642 return false 7643 } 7644 func rewriteValuegeneric_OpRsh8x8(v *Value, config *Config) bool { 7645 b := v.Block 7646 _ = b 7647 // match: (Rsh8x8 <t> x (Const8 [c])) 7648 // cond: 7649 // result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))])) 7650 for { 7651 t := v.Type 7652 x := v.Args[0] 7653 v_1 := v.Args[1] 7654 if v_1.Op != OpConst8 { 7655 break 7656 } 7657 c := v_1.AuxInt 7658 v.reset(OpRsh8x64) 7659 v.AddArg(x) 7660 v0 := b.NewValue0(v.Line, OpConst64, t) 7661 v0.AuxInt = int64(uint8(c)) 7662 v.AddArg(v0) 7663 return true 7664 } 7665 return false 7666 } 7667 func rewriteValuegeneric_OpSliceCap(v *Value, config *Config) bool { 7668 b := v.Block 7669 _ = b 7670 // match: (SliceCap (SliceMake _ _ (Const64 <t> [c]))) 7671 // cond: 7672 // result: (Const64 <t> [c]) 7673 for { 7674 v_0 := v.Args[0] 7675 if v_0.Op != OpSliceMake { 7676 break 7677 } 7678 v_0_2 := v_0.Args[2] 7679 if v_0_2.Op != OpConst64 { 7680 break 7681 } 7682 t := v_0_2.Type 7683 c := v_0_2.AuxInt 7684 v.reset(OpConst64) 7685 v.Type = t 7686 v.AuxInt = c 7687 return true 7688 } 7689 // match: (SliceCap (SliceMake _ _ (SliceCap x))) 7690 // cond: 7691 // result: (SliceCap x) 7692 for { 7693 v_0 := v.Args[0] 7694 if v_0.Op != OpSliceMake { 7695 break 7696 } 7697 v_0_2 := v_0.Args[2] 7698 if v_0_2.Op != OpSliceCap { 7699 break 7700 } 7701 x := v_0_2.Args[0] 7702 v.reset(OpSliceCap) 7703 v.AddArg(x) 7704 return true 7705 } 7706 // match: (SliceCap (SliceMake _ _ (SliceLen x))) 7707 // cond: 7708 // result: (SliceLen x) 7709 for { 7710 v_0 := v.Args[0] 7711 if v_0.Op != OpSliceMake { 7712 break 7713 } 7714 v_0_2 := v_0.Args[2] 7715 if v_0_2.Op != OpSliceLen { 7716 break 7717 } 7718 x := v_0_2.Args[0] 7719 v.reset(OpSliceLen) 7720 v.AddArg(x) 7721 return true 7722 } 7723 return false 7724 } 7725 func rewriteValuegeneric_OpSliceLen(v *Value, config *Config) bool { 7726 b := v.Block 7727 _ = b 7728 // match: (SliceLen (SliceMake _ (Const64 <t> [c]) _)) 7729 // cond: 7730 // result: (Const64 <t> [c]) 7731 for { 7732 v_0 := v.Args[0] 7733 if v_0.Op != OpSliceMake { 7734 break 7735 } 7736 v_0_1 := v_0.Args[1] 7737 if v_0_1.Op != OpConst64 { 7738 break 7739 } 7740 t := v_0_1.Type 7741 c := v_0_1.AuxInt 7742 v.reset(OpConst64) 7743 v.Type = t 7744 v.AuxInt = c 7745 return true 7746 } 7747 // match: (SliceLen (SliceMake _ (SliceLen x) _)) 7748 // cond: 7749 // result: (SliceLen x) 7750 for { 7751 v_0 := v.Args[0] 7752 if v_0.Op != OpSliceMake { 7753 break 7754 } 7755 v_0_1 := v_0.Args[1] 7756 if v_0_1.Op != OpSliceLen { 7757 break 7758 } 7759 x := v_0_1.Args[0] 7760 v.reset(OpSliceLen) 7761 v.AddArg(x) 7762 return true 7763 } 7764 return false 7765 } 7766 func rewriteValuegeneric_OpSlicePtr(v *Value, config *Config) bool { 7767 b := v.Block 7768 _ = b 7769 // match: (SlicePtr (SliceMake (SlicePtr x) _ _)) 7770 // cond: 7771 // result: (SlicePtr x) 7772 for { 7773 v_0 := v.Args[0] 7774 if v_0.Op != OpSliceMake { 7775 break 7776 } 7777 v_0_0 := v_0.Args[0] 7778 if v_0_0.Op != OpSlicePtr { 7779 break 7780 } 7781 x := v_0_0.Args[0] 7782 v.reset(OpSlicePtr) 7783 v.AddArg(x) 7784 return true 7785 } 7786 return false 7787 } 7788 func rewriteValuegeneric_OpStore(v *Value, config *Config) bool { 7789 b := v.Block 7790 _ = b 7791 // match: (Store _ (StructMake0) mem) 7792 // cond: 7793 // result: mem 7794 for { 7795 v_1 := v.Args[1] 7796 if v_1.Op != OpStructMake0 { 7797 break 7798 } 7799 mem := v.Args[2] 7800 v.reset(OpCopy) 7801 v.Type = mem.Type 7802 v.AddArg(mem) 7803 return true 7804 } 7805 // match: (Store dst (StructMake1 <t> f0) mem) 7806 // cond: 7807 // result: (Store [t.FieldType(0).Size()] dst f0 mem) 7808 for { 7809 dst := v.Args[0] 7810 v_1 := v.Args[1] 7811 if v_1.Op != OpStructMake1 { 7812 break 7813 } 7814 t := v_1.Type 7815 f0 := v_1.Args[0] 7816 mem := v.Args[2] 7817 v.reset(OpStore) 7818 v.AuxInt = t.FieldType(0).Size() 7819 v.AddArg(dst) 7820 v.AddArg(f0) 7821 v.AddArg(mem) 7822 return true 7823 } 7824 // match: (Store dst (StructMake2 <t> f0 f1) mem) 7825 // cond: 7826 // result: (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)) 7827 for { 7828 dst := v.Args[0] 7829 v_1 := v.Args[1] 7830 if v_1.Op != OpStructMake2 { 7831 break 7832 } 7833 t := v_1.Type 7834 f0 := v_1.Args[0] 7835 f1 := v_1.Args[1] 7836 mem := v.Args[2] 7837 v.reset(OpStore) 7838 v.AuxInt = t.FieldType(1).Size() 7839 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 7840 v0.AuxInt = t.FieldOff(1) 7841 v0.AddArg(dst) 7842 v.AddArg(v0) 7843 v.AddArg(f1) 7844 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 7845 v1.AuxInt = t.FieldType(0).Size() 7846 v1.AddArg(dst) 7847 v1.AddArg(f0) 7848 v1.AddArg(mem) 7849 v.AddArg(v1) 7850 return true 7851 } 7852 // match: (Store dst (StructMake3 <t> f0 f1 f2) mem) 7853 // cond: 7854 // result: (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem))) 7855 for { 7856 dst := v.Args[0] 7857 v_1 := v.Args[1] 7858 if v_1.Op != OpStructMake3 { 7859 break 7860 } 7861 t := v_1.Type 7862 f0 := v_1.Args[0] 7863 f1 := v_1.Args[1] 7864 f2 := v_1.Args[2] 7865 mem := v.Args[2] 7866 v.reset(OpStore) 7867 v.AuxInt = t.FieldType(2).Size() 7868 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 7869 v0.AuxInt = t.FieldOff(2) 7870 v0.AddArg(dst) 7871 v.AddArg(v0) 7872 v.AddArg(f2) 7873 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 7874 v1.AuxInt = t.FieldType(1).Size() 7875 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 7876 v2.AuxInt = t.FieldOff(1) 7877 v2.AddArg(dst) 7878 v1.AddArg(v2) 7879 v1.AddArg(f1) 7880 v3 := b.NewValue0(v.Line, OpStore, TypeMem) 7881 v3.AuxInt = t.FieldType(0).Size() 7882 v3.AddArg(dst) 7883 v3.AddArg(f0) 7884 v3.AddArg(mem) 7885 v1.AddArg(v3) 7886 v.AddArg(v1) 7887 return true 7888 } 7889 // match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) 7890 // cond: 7891 // result: (Store [t.FieldType(3).Size()] (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)))) 7892 for { 7893 dst := v.Args[0] 7894 v_1 := v.Args[1] 7895 if v_1.Op != OpStructMake4 { 7896 break 7897 } 7898 t := v_1.Type 7899 f0 := v_1.Args[0] 7900 f1 := v_1.Args[1] 7901 f2 := v_1.Args[2] 7902 f3 := v_1.Args[3] 7903 mem := v.Args[2] 7904 v.reset(OpStore) 7905 v.AuxInt = t.FieldType(3).Size() 7906 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) 7907 v0.AuxInt = t.FieldOff(3) 7908 v0.AddArg(dst) 7909 v.AddArg(v0) 7910 v.AddArg(f3) 7911 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 7912 v1.AuxInt = t.FieldType(2).Size() 7913 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 7914 v2.AuxInt = t.FieldOff(2) 7915 v2.AddArg(dst) 7916 v1.AddArg(v2) 7917 v1.AddArg(f2) 7918 v3 := b.NewValue0(v.Line, OpStore, TypeMem) 7919 v3.AuxInt = t.FieldType(1).Size() 7920 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 7921 v4.AuxInt = t.FieldOff(1) 7922 v4.AddArg(dst) 7923 v3.AddArg(v4) 7924 v3.AddArg(f1) 7925 v5 := b.NewValue0(v.Line, OpStore, TypeMem) 7926 v5.AuxInt = t.FieldType(0).Size() 7927 v5.AddArg(dst) 7928 v5.AddArg(f0) 7929 v5.AddArg(mem) 7930 v3.AddArg(v5) 7931 v1.AddArg(v3) 7932 v.AddArg(v1) 7933 return true 7934 } 7935 // match: (Store [size] dst (Load <t> src mem) mem) 7936 // cond: !config.fe.CanSSA(t) 7937 // result: (Move [size] dst src mem) 7938 for { 7939 size := v.AuxInt 7940 dst := v.Args[0] 7941 v_1 := v.Args[1] 7942 if v_1.Op != OpLoad { 7943 break 7944 } 7945 t := v_1.Type 7946 src := v_1.Args[0] 7947 mem := v_1.Args[1] 7948 if mem != v.Args[2] { 7949 break 7950 } 7951 if !(!config.fe.CanSSA(t)) { 7952 break 7953 } 7954 v.reset(OpMove) 7955 v.AuxInt = size 7956 v.AddArg(dst) 7957 v.AddArg(src) 7958 v.AddArg(mem) 7959 return true 7960 } 7961 // match: (Store [size] dst (Load <t> src mem) (VarDef {x} mem)) 7962 // cond: !config.fe.CanSSA(t) 7963 // result: (Move [size] dst src (VarDef {x} mem)) 7964 for { 7965 size := v.AuxInt 7966 dst := v.Args[0] 7967 v_1 := v.Args[1] 7968 if v_1.Op != OpLoad { 7969 break 7970 } 7971 t := v_1.Type 7972 src := v_1.Args[0] 7973 mem := v_1.Args[1] 7974 v_2 := v.Args[2] 7975 if v_2.Op != OpVarDef { 7976 break 7977 } 7978 x := v_2.Aux 7979 if mem != v_2.Args[0] { 7980 break 7981 } 7982 if !(!config.fe.CanSSA(t)) { 7983 break 7984 } 7985 v.reset(OpMove) 7986 v.AuxInt = size 7987 v.AddArg(dst) 7988 v.AddArg(src) 7989 v0 := b.NewValue0(v.Line, OpVarDef, TypeMem) 7990 v0.Aux = x 7991 v0.AddArg(mem) 7992 v.AddArg(v0) 7993 return true 7994 } 7995 return false 7996 } 7997 func rewriteValuegeneric_OpStringLen(v *Value, config *Config) bool { 7998 b := v.Block 7999 _ = b 8000 // match: (StringLen (StringMake _ (Const64 <t> [c]))) 8001 // cond: 8002 // result: (Const64 <t> [c]) 8003 for { 8004 v_0 := v.Args[0] 8005 if v_0.Op != OpStringMake { 8006 break 8007 } 8008 v_0_1 := v_0.Args[1] 8009 if v_0_1.Op != OpConst64 { 8010 break 8011 } 8012 t := v_0_1.Type 8013 c := v_0_1.AuxInt 8014 v.reset(OpConst64) 8015 v.Type = t 8016 v.AuxInt = c 8017 return true 8018 } 8019 return false 8020 } 8021 func rewriteValuegeneric_OpStringPtr(v *Value, config *Config) bool { 8022 b := v.Block 8023 _ = b 8024 // match: (StringPtr (StringMake (Const64 <t> [c]) _)) 8025 // cond: 8026 // result: (Const64 <t> [c]) 8027 for { 8028 v_0 := v.Args[0] 8029 if v_0.Op != OpStringMake { 8030 break 8031 } 8032 v_0_0 := v_0.Args[0] 8033 if v_0_0.Op != OpConst64 { 8034 break 8035 } 8036 t := v_0_0.Type 8037 c := v_0_0.AuxInt 8038 v.reset(OpConst64) 8039 v.Type = t 8040 v.AuxInt = c 8041 return true 8042 } 8043 return false 8044 } 8045 func rewriteValuegeneric_OpStructSelect(v *Value, config *Config) bool { 8046 b := v.Block 8047 _ = b 8048 // match: (StructSelect (StructMake1 x)) 8049 // cond: 8050 // result: x 8051 for { 8052 v_0 := v.Args[0] 8053 if v_0.Op != OpStructMake1 { 8054 break 8055 } 8056 x := v_0.Args[0] 8057 v.reset(OpCopy) 8058 v.Type = x.Type 8059 v.AddArg(x) 8060 return true 8061 } 8062 // match: (StructSelect [0] (StructMake2 x _)) 8063 // cond: 8064 // result: x 8065 for { 8066 if v.AuxInt != 0 { 8067 break 8068 } 8069 v_0 := v.Args[0] 8070 if v_0.Op != OpStructMake2 { 8071 break 8072 } 8073 x := v_0.Args[0] 8074 v.reset(OpCopy) 8075 v.Type = x.Type 8076 v.AddArg(x) 8077 return true 8078 } 8079 // match: (StructSelect [1] (StructMake2 _ x)) 8080 // cond: 8081 // result: x 8082 for { 8083 if v.AuxInt != 1 { 8084 break 8085 } 8086 v_0 := v.Args[0] 8087 if v_0.Op != OpStructMake2 { 8088 break 8089 } 8090 x := v_0.Args[1] 8091 v.reset(OpCopy) 8092 v.Type = x.Type 8093 v.AddArg(x) 8094 return true 8095 } 8096 // match: (StructSelect [0] (StructMake3 x _ _)) 8097 // cond: 8098 // result: x 8099 for { 8100 if v.AuxInt != 0 { 8101 break 8102 } 8103 v_0 := v.Args[0] 8104 if v_0.Op != OpStructMake3 { 8105 break 8106 } 8107 x := v_0.Args[0] 8108 v.reset(OpCopy) 8109 v.Type = x.Type 8110 v.AddArg(x) 8111 return true 8112 } 8113 // match: (StructSelect [1] (StructMake3 _ x _)) 8114 // cond: 8115 // result: x 8116 for { 8117 if v.AuxInt != 1 { 8118 break 8119 } 8120 v_0 := v.Args[0] 8121 if v_0.Op != OpStructMake3 { 8122 break 8123 } 8124 x := v_0.Args[1] 8125 v.reset(OpCopy) 8126 v.Type = x.Type 8127 v.AddArg(x) 8128 return true 8129 } 8130 // match: (StructSelect [2] (StructMake3 _ _ x)) 8131 // cond: 8132 // result: x 8133 for { 8134 if v.AuxInt != 2 { 8135 break 8136 } 8137 v_0 := v.Args[0] 8138 if v_0.Op != OpStructMake3 { 8139 break 8140 } 8141 x := v_0.Args[2] 8142 v.reset(OpCopy) 8143 v.Type = x.Type 8144 v.AddArg(x) 8145 return true 8146 } 8147 // match: (StructSelect [0] (StructMake4 x _ _ _)) 8148 // cond: 8149 // result: x 8150 for { 8151 if v.AuxInt != 0 { 8152 break 8153 } 8154 v_0 := v.Args[0] 8155 if v_0.Op != OpStructMake4 { 8156 break 8157 } 8158 x := v_0.Args[0] 8159 v.reset(OpCopy) 8160 v.Type = x.Type 8161 v.AddArg(x) 8162 return true 8163 } 8164 // match: (StructSelect [1] (StructMake4 _ x _ _)) 8165 // cond: 8166 // result: x 8167 for { 8168 if v.AuxInt != 1 { 8169 break 8170 } 8171 v_0 := v.Args[0] 8172 if v_0.Op != OpStructMake4 { 8173 break 8174 } 8175 x := v_0.Args[1] 8176 v.reset(OpCopy) 8177 v.Type = x.Type 8178 v.AddArg(x) 8179 return true 8180 } 8181 // match: (StructSelect [2] (StructMake4 _ _ x _)) 8182 // cond: 8183 // result: x 8184 for { 8185 if v.AuxInt != 2 { 8186 break 8187 } 8188 v_0 := v.Args[0] 8189 if v_0.Op != OpStructMake4 { 8190 break 8191 } 8192 x := v_0.Args[2] 8193 v.reset(OpCopy) 8194 v.Type = x.Type 8195 v.AddArg(x) 8196 return true 8197 } 8198 // match: (StructSelect [3] (StructMake4 _ _ _ x)) 8199 // cond: 8200 // result: x 8201 for { 8202 if v.AuxInt != 3 { 8203 break 8204 } 8205 v_0 := v.Args[0] 8206 if v_0.Op != OpStructMake4 { 8207 break 8208 } 8209 x := v_0.Args[3] 8210 v.reset(OpCopy) 8211 v.Type = x.Type 8212 v.AddArg(x) 8213 return true 8214 } 8215 // match: (StructSelect [i] x:(Load <t> ptr mem)) 8216 // cond: !config.fe.CanSSA(t) 8217 // result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem) 8218 for { 8219 i := v.AuxInt 8220 x := v.Args[0] 8221 if x.Op != OpLoad { 8222 break 8223 } 8224 t := x.Type 8225 ptr := x.Args[0] 8226 mem := x.Args[1] 8227 if !(!config.fe.CanSSA(t)) { 8228 break 8229 } 8230 b = x.Block 8231 v0 := b.NewValue0(v.Line, OpLoad, v.Type) 8232 v.reset(OpCopy) 8233 v.AddArg(v0) 8234 v1 := b.NewValue0(v.Line, OpOffPtr, v.Type.PtrTo()) 8235 v1.AuxInt = t.FieldOff(int(i)) 8236 v1.AddArg(ptr) 8237 v0.AddArg(v1) 8238 v0.AddArg(mem) 8239 return true 8240 } 8241 return false 8242 } 8243 func rewriteValuegeneric_OpSub16(v *Value, config *Config) bool { 8244 b := v.Block 8245 _ = b 8246 // match: (Sub16 (Const16 [c]) (Const16 [d])) 8247 // cond: 8248 // result: (Const16 [int64(int16(c-d))]) 8249 for { 8250 v_0 := v.Args[0] 8251 if v_0.Op != OpConst16 { 8252 break 8253 } 8254 c := v_0.AuxInt 8255 v_1 := v.Args[1] 8256 if v_1.Op != OpConst16 { 8257 break 8258 } 8259 d := v_1.AuxInt 8260 v.reset(OpConst16) 8261 v.AuxInt = int64(int16(c - d)) 8262 return true 8263 } 8264 // match: (Sub16 x (Const16 <t> [c])) 8265 // cond: x.Op != OpConst16 8266 // result: (Add16 (Const16 <t> [int64(int16(-c))]) x) 8267 for { 8268 x := v.Args[0] 8269 v_1 := v.Args[1] 8270 if v_1.Op != OpConst16 { 8271 break 8272 } 8273 t := v_1.Type 8274 c := v_1.AuxInt 8275 if !(x.Op != OpConst16) { 8276 break 8277 } 8278 v.reset(OpAdd16) 8279 v0 := b.NewValue0(v.Line, OpConst16, t) 8280 v0.AuxInt = int64(int16(-c)) 8281 v.AddArg(v0) 8282 v.AddArg(x) 8283 return true 8284 } 8285 // match: (Sub16 x x) 8286 // cond: 8287 // result: (Const16 [0]) 8288 for { 8289 x := v.Args[0] 8290 if x != v.Args[1] { 8291 break 8292 } 8293 v.reset(OpConst16) 8294 v.AuxInt = 0 8295 return true 8296 } 8297 // match: (Sub16 (Add16 x y) x) 8298 // cond: 8299 // result: y 8300 for { 8301 v_0 := v.Args[0] 8302 if v_0.Op != OpAdd16 { 8303 break 8304 } 8305 x := v_0.Args[0] 8306 y := v_0.Args[1] 8307 if x != v.Args[1] { 8308 break 8309 } 8310 v.reset(OpCopy) 8311 v.Type = y.Type 8312 v.AddArg(y) 8313 return true 8314 } 8315 // match: (Sub16 (Add16 x y) y) 8316 // cond: 8317 // result: x 8318 for { 8319 v_0 := v.Args[0] 8320 if v_0.Op != OpAdd16 { 8321 break 8322 } 8323 x := v_0.Args[0] 8324 y := v_0.Args[1] 8325 if y != v.Args[1] { 8326 break 8327 } 8328 v.reset(OpCopy) 8329 v.Type = x.Type 8330 v.AddArg(x) 8331 return true 8332 } 8333 return false 8334 } 8335 func rewriteValuegeneric_OpSub32(v *Value, config *Config) bool { 8336 b := v.Block 8337 _ = b 8338 // match: (Sub32 (Const32 [c]) (Const32 [d])) 8339 // cond: 8340 // result: (Const32 [int64(int32(c-d))]) 8341 for { 8342 v_0 := v.Args[0] 8343 if v_0.Op != OpConst32 { 8344 break 8345 } 8346 c := v_0.AuxInt 8347 v_1 := v.Args[1] 8348 if v_1.Op != OpConst32 { 8349 break 8350 } 8351 d := v_1.AuxInt 8352 v.reset(OpConst32) 8353 v.AuxInt = int64(int32(c - d)) 8354 return true 8355 } 8356 // match: (Sub32 x (Const32 <t> [c])) 8357 // cond: x.Op != OpConst32 8358 // result: (Add32 (Const32 <t> [int64(int32(-c))]) x) 8359 for { 8360 x := v.Args[0] 8361 v_1 := v.Args[1] 8362 if v_1.Op != OpConst32 { 8363 break 8364 } 8365 t := v_1.Type 8366 c := v_1.AuxInt 8367 if !(x.Op != OpConst32) { 8368 break 8369 } 8370 v.reset(OpAdd32) 8371 v0 := b.NewValue0(v.Line, OpConst32, t) 8372 v0.AuxInt = int64(int32(-c)) 8373 v.AddArg(v0) 8374 v.AddArg(x) 8375 return true 8376 } 8377 // match: (Sub32 x x) 8378 // cond: 8379 // result: (Const32 [0]) 8380 for { 8381 x := v.Args[0] 8382 if x != v.Args[1] { 8383 break 8384 } 8385 v.reset(OpConst32) 8386 v.AuxInt = 0 8387 return true 8388 } 8389 // match: (Sub32 (Add32 x y) x) 8390 // cond: 8391 // result: y 8392 for { 8393 v_0 := v.Args[0] 8394 if v_0.Op != OpAdd32 { 8395 break 8396 } 8397 x := v_0.Args[0] 8398 y := v_0.Args[1] 8399 if x != v.Args[1] { 8400 break 8401 } 8402 v.reset(OpCopy) 8403 v.Type = y.Type 8404 v.AddArg(y) 8405 return true 8406 } 8407 // match: (Sub32 (Add32 x y) y) 8408 // cond: 8409 // result: x 8410 for { 8411 v_0 := v.Args[0] 8412 if v_0.Op != OpAdd32 { 8413 break 8414 } 8415 x := v_0.Args[0] 8416 y := v_0.Args[1] 8417 if y != v.Args[1] { 8418 break 8419 } 8420 v.reset(OpCopy) 8421 v.Type = x.Type 8422 v.AddArg(x) 8423 return true 8424 } 8425 return false 8426 } 8427 func rewriteValuegeneric_OpSub32F(v *Value, config *Config) bool { 8428 b := v.Block 8429 _ = b 8430 // match: (Sub32F (Const32F [c]) (Const32F [d])) 8431 // cond: 8432 // result: (Const32F [f2i(float64(i2f32(c) - i2f32(d)))]) 8433 for { 8434 v_0 := v.Args[0] 8435 if v_0.Op != OpConst32F { 8436 break 8437 } 8438 c := v_0.AuxInt 8439 v_1 := v.Args[1] 8440 if v_1.Op != OpConst32F { 8441 break 8442 } 8443 d := v_1.AuxInt 8444 v.reset(OpConst32F) 8445 v.AuxInt = f2i(float64(i2f32(c) - i2f32(d))) 8446 return true 8447 } 8448 return false 8449 } 8450 func rewriteValuegeneric_OpSub64(v *Value, config *Config) bool { 8451 b := v.Block 8452 _ = b 8453 // match: (Sub64 (Const64 [c]) (Const64 [d])) 8454 // cond: 8455 // result: (Const64 [c-d]) 8456 for { 8457 v_0 := v.Args[0] 8458 if v_0.Op != OpConst64 { 8459 break 8460 } 8461 c := v_0.AuxInt 8462 v_1 := v.Args[1] 8463 if v_1.Op != OpConst64 { 8464 break 8465 } 8466 d := v_1.AuxInt 8467 v.reset(OpConst64) 8468 v.AuxInt = c - d 8469 return true 8470 } 8471 // match: (Sub64 x (Const64 <t> [c])) 8472 // cond: x.Op != OpConst64 8473 // result: (Add64 (Const64 <t> [-c]) x) 8474 for { 8475 x := v.Args[0] 8476 v_1 := v.Args[1] 8477 if v_1.Op != OpConst64 { 8478 break 8479 } 8480 t := v_1.Type 8481 c := v_1.AuxInt 8482 if !(x.Op != OpConst64) { 8483 break 8484 } 8485 v.reset(OpAdd64) 8486 v0 := b.NewValue0(v.Line, OpConst64, t) 8487 v0.AuxInt = -c 8488 v.AddArg(v0) 8489 v.AddArg(x) 8490 return true 8491 } 8492 // match: (Sub64 x x) 8493 // cond: 8494 // result: (Const64 [0]) 8495 for { 8496 x := v.Args[0] 8497 if x != v.Args[1] { 8498 break 8499 } 8500 v.reset(OpConst64) 8501 v.AuxInt = 0 8502 return true 8503 } 8504 // match: (Sub64 (Add64 x y) x) 8505 // cond: 8506 // result: y 8507 for { 8508 v_0 := v.Args[0] 8509 if v_0.Op != OpAdd64 { 8510 break 8511 } 8512 x := v_0.Args[0] 8513 y := v_0.Args[1] 8514 if x != v.Args[1] { 8515 break 8516 } 8517 v.reset(OpCopy) 8518 v.Type = y.Type 8519 v.AddArg(y) 8520 return true 8521 } 8522 // match: (Sub64 (Add64 x y) y) 8523 // cond: 8524 // result: x 8525 for { 8526 v_0 := v.Args[0] 8527 if v_0.Op != OpAdd64 { 8528 break 8529 } 8530 x := v_0.Args[0] 8531 y := v_0.Args[1] 8532 if y != v.Args[1] { 8533 break 8534 } 8535 v.reset(OpCopy) 8536 v.Type = x.Type 8537 v.AddArg(x) 8538 return true 8539 } 8540 return false 8541 } 8542 func rewriteValuegeneric_OpSub64F(v *Value, config *Config) bool { 8543 b := v.Block 8544 _ = b 8545 // match: (Sub64F (Const64F [c]) (Const64F [d])) 8546 // cond: 8547 // result: (Const64F [f2i(i2f(c) - i2f(d))]) 8548 for { 8549 v_0 := v.Args[0] 8550 if v_0.Op != OpConst64F { 8551 break 8552 } 8553 c := v_0.AuxInt 8554 v_1 := v.Args[1] 8555 if v_1.Op != OpConst64F { 8556 break 8557 } 8558 d := v_1.AuxInt 8559 v.reset(OpConst64F) 8560 v.AuxInt = f2i(i2f(c) - i2f(d)) 8561 return true 8562 } 8563 return false 8564 } 8565 func rewriteValuegeneric_OpSub8(v *Value, config *Config) bool { 8566 b := v.Block 8567 _ = b 8568 // match: (Sub8 (Const8 [c]) (Const8 [d])) 8569 // cond: 8570 // result: (Const8 [int64(int8(c-d))]) 8571 for { 8572 v_0 := v.Args[0] 8573 if v_0.Op != OpConst8 { 8574 break 8575 } 8576 c := v_0.AuxInt 8577 v_1 := v.Args[1] 8578 if v_1.Op != OpConst8 { 8579 break 8580 } 8581 d := v_1.AuxInt 8582 v.reset(OpConst8) 8583 v.AuxInt = int64(int8(c - d)) 8584 return true 8585 } 8586 // match: (Sub8 x (Const8 <t> [c])) 8587 // cond: x.Op != OpConst8 8588 // result: (Add8 (Const8 <t> [int64(int8(-c))]) x) 8589 for { 8590 x := v.Args[0] 8591 v_1 := v.Args[1] 8592 if v_1.Op != OpConst8 { 8593 break 8594 } 8595 t := v_1.Type 8596 c := v_1.AuxInt 8597 if !(x.Op != OpConst8) { 8598 break 8599 } 8600 v.reset(OpAdd8) 8601 v0 := b.NewValue0(v.Line, OpConst8, t) 8602 v0.AuxInt = int64(int8(-c)) 8603 v.AddArg(v0) 8604 v.AddArg(x) 8605 return true 8606 } 8607 // match: (Sub8 x x) 8608 // cond: 8609 // result: (Const8 [0]) 8610 for { 8611 x := v.Args[0] 8612 if x != v.Args[1] { 8613 break 8614 } 8615 v.reset(OpConst8) 8616 v.AuxInt = 0 8617 return true 8618 } 8619 // match: (Sub8 (Add8 x y) x) 8620 // cond: 8621 // result: y 8622 for { 8623 v_0 := v.Args[0] 8624 if v_0.Op != OpAdd8 { 8625 break 8626 } 8627 x := v_0.Args[0] 8628 y := v_0.Args[1] 8629 if x != v.Args[1] { 8630 break 8631 } 8632 v.reset(OpCopy) 8633 v.Type = y.Type 8634 v.AddArg(y) 8635 return true 8636 } 8637 // match: (Sub8 (Add8 x y) y) 8638 // cond: 8639 // result: x 8640 for { 8641 v_0 := v.Args[0] 8642 if v_0.Op != OpAdd8 { 8643 break 8644 } 8645 x := v_0.Args[0] 8646 y := v_0.Args[1] 8647 if y != v.Args[1] { 8648 break 8649 } 8650 v.reset(OpCopy) 8651 v.Type = x.Type 8652 v.AddArg(x) 8653 return true 8654 } 8655 return false 8656 } 8657 func rewriteValuegeneric_OpTrunc16to8(v *Value, config *Config) bool { 8658 b := v.Block 8659 _ = b 8660 // match: (Trunc16to8 (Const16 [c])) 8661 // cond: 8662 // result: (Const8 [int64(int8(c))]) 8663 for { 8664 v_0 := v.Args[0] 8665 if v_0.Op != OpConst16 { 8666 break 8667 } 8668 c := v_0.AuxInt 8669 v.reset(OpConst8) 8670 v.AuxInt = int64(int8(c)) 8671 return true 8672 } 8673 // match: (Trunc16to8 (And16 (Const16 [y]) x)) 8674 // cond: y&0xFF == 0xFF 8675 // result: (Trunc16to8 x) 8676 for { 8677 v_0 := v.Args[0] 8678 if v_0.Op != OpAnd16 { 8679 break 8680 } 8681 v_0_0 := v_0.Args[0] 8682 if v_0_0.Op != OpConst16 { 8683 break 8684 } 8685 y := v_0_0.AuxInt 8686 x := v_0.Args[1] 8687 if !(y&0xFF == 0xFF) { 8688 break 8689 } 8690 v.reset(OpTrunc16to8) 8691 v.AddArg(x) 8692 return true 8693 } 8694 return false 8695 } 8696 func rewriteValuegeneric_OpTrunc32to16(v *Value, config *Config) bool { 8697 b := v.Block 8698 _ = b 8699 // match: (Trunc32to16 (Const32 [c])) 8700 // cond: 8701 // result: (Const16 [int64(int16(c))]) 8702 for { 8703 v_0 := v.Args[0] 8704 if v_0.Op != OpConst32 { 8705 break 8706 } 8707 c := v_0.AuxInt 8708 v.reset(OpConst16) 8709 v.AuxInt = int64(int16(c)) 8710 return true 8711 } 8712 // match: (Trunc32to16 (And32 (Const32 [y]) x)) 8713 // cond: y&0xFFFF == 0xFFFF 8714 // result: (Trunc32to16 x) 8715 for { 8716 v_0 := v.Args[0] 8717 if v_0.Op != OpAnd32 { 8718 break 8719 } 8720 v_0_0 := v_0.Args[0] 8721 if v_0_0.Op != OpConst32 { 8722 break 8723 } 8724 y := v_0_0.AuxInt 8725 x := v_0.Args[1] 8726 if !(y&0xFFFF == 0xFFFF) { 8727 break 8728 } 8729 v.reset(OpTrunc32to16) 8730 v.AddArg(x) 8731 return true 8732 } 8733 return false 8734 } 8735 func rewriteValuegeneric_OpTrunc32to8(v *Value, config *Config) bool { 8736 b := v.Block 8737 _ = b 8738 // match: (Trunc32to8 (Const32 [c])) 8739 // cond: 8740 // result: (Const8 [int64(int8(c))]) 8741 for { 8742 v_0 := v.Args[0] 8743 if v_0.Op != OpConst32 { 8744 break 8745 } 8746 c := v_0.AuxInt 8747 v.reset(OpConst8) 8748 v.AuxInt = int64(int8(c)) 8749 return true 8750 } 8751 // match: (Trunc32to8 (And32 (Const32 [y]) x)) 8752 // cond: y&0xFF == 0xFF 8753 // result: (Trunc32to8 x) 8754 for { 8755 v_0 := v.Args[0] 8756 if v_0.Op != OpAnd32 { 8757 break 8758 } 8759 v_0_0 := v_0.Args[0] 8760 if v_0_0.Op != OpConst32 { 8761 break 8762 } 8763 y := v_0_0.AuxInt 8764 x := v_0.Args[1] 8765 if !(y&0xFF == 0xFF) { 8766 break 8767 } 8768 v.reset(OpTrunc32to8) 8769 v.AddArg(x) 8770 return true 8771 } 8772 return false 8773 } 8774 func rewriteValuegeneric_OpTrunc64to16(v *Value, config *Config) bool { 8775 b := v.Block 8776 _ = b 8777 // match: (Trunc64to16 (Const64 [c])) 8778 // cond: 8779 // result: (Const16 [int64(int16(c))]) 8780 for { 8781 v_0 := v.Args[0] 8782 if v_0.Op != OpConst64 { 8783 break 8784 } 8785 c := v_0.AuxInt 8786 v.reset(OpConst16) 8787 v.AuxInt = int64(int16(c)) 8788 return true 8789 } 8790 // match: (Trunc64to16 (And64 (Const64 [y]) x)) 8791 // cond: y&0xFFFF == 0xFFFF 8792 // result: (Trunc64to16 x) 8793 for { 8794 v_0 := v.Args[0] 8795 if v_0.Op != OpAnd64 { 8796 break 8797 } 8798 v_0_0 := v_0.Args[0] 8799 if v_0_0.Op != OpConst64 { 8800 break 8801 } 8802 y := v_0_0.AuxInt 8803 x := v_0.Args[1] 8804 if !(y&0xFFFF == 0xFFFF) { 8805 break 8806 } 8807 v.reset(OpTrunc64to16) 8808 v.AddArg(x) 8809 return true 8810 } 8811 return false 8812 } 8813 func rewriteValuegeneric_OpTrunc64to32(v *Value, config *Config) bool { 8814 b := v.Block 8815 _ = b 8816 // match: (Trunc64to32 (Const64 [c])) 8817 // cond: 8818 // result: (Const32 [int64(int32(c))]) 8819 for { 8820 v_0 := v.Args[0] 8821 if v_0.Op != OpConst64 { 8822 break 8823 } 8824 c := v_0.AuxInt 8825 v.reset(OpConst32) 8826 v.AuxInt = int64(int32(c)) 8827 return true 8828 } 8829 // match: (Trunc64to32 (And64 (Const64 [y]) x)) 8830 // cond: y&0xFFFFFFFF == 0xFFFFFFFF 8831 // result: (Trunc64to32 x) 8832 for { 8833 v_0 := v.Args[0] 8834 if v_0.Op != OpAnd64 { 8835 break 8836 } 8837 v_0_0 := v_0.Args[0] 8838 if v_0_0.Op != OpConst64 { 8839 break 8840 } 8841 y := v_0_0.AuxInt 8842 x := v_0.Args[1] 8843 if !(y&0xFFFFFFFF == 0xFFFFFFFF) { 8844 break 8845 } 8846 v.reset(OpTrunc64to32) 8847 v.AddArg(x) 8848 return true 8849 } 8850 return false 8851 } 8852 func rewriteValuegeneric_OpTrunc64to8(v *Value, config *Config) bool { 8853 b := v.Block 8854 _ = b 8855 // match: (Trunc64to8 (Const64 [c])) 8856 // cond: 8857 // result: (Const8 [int64(int8(c))]) 8858 for { 8859 v_0 := v.Args[0] 8860 if v_0.Op != OpConst64 { 8861 break 8862 } 8863 c := v_0.AuxInt 8864 v.reset(OpConst8) 8865 v.AuxInt = int64(int8(c)) 8866 return true 8867 } 8868 // match: (Trunc64to8 (And64 (Const64 [y]) x)) 8869 // cond: y&0xFF == 0xFF 8870 // result: (Trunc64to8 x) 8871 for { 8872 v_0 := v.Args[0] 8873 if v_0.Op != OpAnd64 { 8874 break 8875 } 8876 v_0_0 := v_0.Args[0] 8877 if v_0_0.Op != OpConst64 { 8878 break 8879 } 8880 y := v_0_0.AuxInt 8881 x := v_0.Args[1] 8882 if !(y&0xFF == 0xFF) { 8883 break 8884 } 8885 v.reset(OpTrunc64to8) 8886 v.AddArg(x) 8887 return true 8888 } 8889 return false 8890 } 8891 func rewriteValuegeneric_OpXor16(v *Value, config *Config) bool { 8892 b := v.Block 8893 _ = b 8894 // match: (Xor16 x (Const16 <t> [c])) 8895 // cond: x.Op != OpConst16 8896 // result: (Xor16 (Const16 <t> [c]) x) 8897 for { 8898 x := v.Args[0] 8899 v_1 := v.Args[1] 8900 if v_1.Op != OpConst16 { 8901 break 8902 } 8903 t := v_1.Type 8904 c := v_1.AuxInt 8905 if !(x.Op != OpConst16) { 8906 break 8907 } 8908 v.reset(OpXor16) 8909 v0 := b.NewValue0(v.Line, OpConst16, t) 8910 v0.AuxInt = c 8911 v.AddArg(v0) 8912 v.AddArg(x) 8913 return true 8914 } 8915 // match: (Xor16 x x) 8916 // cond: 8917 // result: (Const16 [0]) 8918 for { 8919 x := v.Args[0] 8920 if x != v.Args[1] { 8921 break 8922 } 8923 v.reset(OpConst16) 8924 v.AuxInt = 0 8925 return true 8926 } 8927 // match: (Xor16 (Const16 [0]) x) 8928 // cond: 8929 // result: x 8930 for { 8931 v_0 := v.Args[0] 8932 if v_0.Op != OpConst16 { 8933 break 8934 } 8935 if v_0.AuxInt != 0 { 8936 break 8937 } 8938 x := v.Args[1] 8939 v.reset(OpCopy) 8940 v.Type = x.Type 8941 v.AddArg(x) 8942 return true 8943 } 8944 return false 8945 } 8946 func rewriteValuegeneric_OpXor32(v *Value, config *Config) bool { 8947 b := v.Block 8948 _ = b 8949 // match: (Xor32 x (Const32 <t> [c])) 8950 // cond: x.Op != OpConst32 8951 // result: (Xor32 (Const32 <t> [c]) x) 8952 for { 8953 x := v.Args[0] 8954 v_1 := v.Args[1] 8955 if v_1.Op != OpConst32 { 8956 break 8957 } 8958 t := v_1.Type 8959 c := v_1.AuxInt 8960 if !(x.Op != OpConst32) { 8961 break 8962 } 8963 v.reset(OpXor32) 8964 v0 := b.NewValue0(v.Line, OpConst32, t) 8965 v0.AuxInt = c 8966 v.AddArg(v0) 8967 v.AddArg(x) 8968 return true 8969 } 8970 // match: (Xor32 x x) 8971 // cond: 8972 // result: (Const32 [0]) 8973 for { 8974 x := v.Args[0] 8975 if x != v.Args[1] { 8976 break 8977 } 8978 v.reset(OpConst32) 8979 v.AuxInt = 0 8980 return true 8981 } 8982 // match: (Xor32 (Const32 [0]) x) 8983 // cond: 8984 // result: x 8985 for { 8986 v_0 := v.Args[0] 8987 if v_0.Op != OpConst32 { 8988 break 8989 } 8990 if v_0.AuxInt != 0 { 8991 break 8992 } 8993 x := v.Args[1] 8994 v.reset(OpCopy) 8995 v.Type = x.Type 8996 v.AddArg(x) 8997 return true 8998 } 8999 return false 9000 } 9001 func rewriteValuegeneric_OpXor64(v *Value, config *Config) bool { 9002 b := v.Block 9003 _ = b 9004 // match: (Xor64 x (Const64 <t> [c])) 9005 // cond: x.Op != OpConst64 9006 // result: (Xor64 (Const64 <t> [c]) x) 9007 for { 9008 x := v.Args[0] 9009 v_1 := v.Args[1] 9010 if v_1.Op != OpConst64 { 9011 break 9012 } 9013 t := v_1.Type 9014 c := v_1.AuxInt 9015 if !(x.Op != OpConst64) { 9016 break 9017 } 9018 v.reset(OpXor64) 9019 v0 := b.NewValue0(v.Line, OpConst64, t) 9020 v0.AuxInt = c 9021 v.AddArg(v0) 9022 v.AddArg(x) 9023 return true 9024 } 9025 // match: (Xor64 x x) 9026 // cond: 9027 // result: (Const64 [0]) 9028 for { 9029 x := v.Args[0] 9030 if x != v.Args[1] { 9031 break 9032 } 9033 v.reset(OpConst64) 9034 v.AuxInt = 0 9035 return true 9036 } 9037 // match: (Xor64 (Const64 [0]) x) 9038 // cond: 9039 // result: x 9040 for { 9041 v_0 := v.Args[0] 9042 if v_0.Op != OpConst64 { 9043 break 9044 } 9045 if v_0.AuxInt != 0 { 9046 break 9047 } 9048 x := v.Args[1] 9049 v.reset(OpCopy) 9050 v.Type = x.Type 9051 v.AddArg(x) 9052 return true 9053 } 9054 return false 9055 } 9056 func rewriteValuegeneric_OpXor8(v *Value, config *Config) bool { 9057 b := v.Block 9058 _ = b 9059 // match: (Xor8 x (Const8 <t> [c])) 9060 // cond: x.Op != OpConst8 9061 // result: (Xor8 (Const8 <t> [c]) x) 9062 for { 9063 x := v.Args[0] 9064 v_1 := v.Args[1] 9065 if v_1.Op != OpConst8 { 9066 break 9067 } 9068 t := v_1.Type 9069 c := v_1.AuxInt 9070 if !(x.Op != OpConst8) { 9071 break 9072 } 9073 v.reset(OpXor8) 9074 v0 := b.NewValue0(v.Line, OpConst8, t) 9075 v0.AuxInt = c 9076 v.AddArg(v0) 9077 v.AddArg(x) 9078 return true 9079 } 9080 // match: (Xor8 x x) 9081 // cond: 9082 // result: (Const8 [0]) 9083 for { 9084 x := v.Args[0] 9085 if x != v.Args[1] { 9086 break 9087 } 9088 v.reset(OpConst8) 9089 v.AuxInt = 0 9090 return true 9091 } 9092 // match: (Xor8 (Const8 [0]) x) 9093 // cond: 9094 // result: x 9095 for { 9096 v_0 := v.Args[0] 9097 if v_0.Op != OpConst8 { 9098 break 9099 } 9100 if v_0.AuxInt != 0 { 9101 break 9102 } 9103 x := v.Args[1] 9104 v.reset(OpCopy) 9105 v.Type = x.Type 9106 v.AddArg(x) 9107 return true 9108 } 9109 return false 9110 } 9111 func rewriteBlockgeneric(b *Block) bool { 9112 switch b.Kind { 9113 case BlockCheck: 9114 // match: (Check (NilCheck (GetG _) _) next) 9115 // cond: 9116 // result: (Plain nil next) 9117 for { 9118 v := b.Control 9119 if v.Op != OpNilCheck { 9120 break 9121 } 9122 v_0 := v.Args[0] 9123 if v_0.Op != OpGetG { 9124 break 9125 } 9126 next := b.Succs[0] 9127 b.Kind = BlockPlain 9128 b.SetControl(nil) 9129 b.Succs[0] = next 9130 b.Likely = BranchUnknown 9131 return true 9132 } 9133 case BlockIf: 9134 // match: (If (Not cond) yes no) 9135 // cond: 9136 // result: (If cond no yes) 9137 for { 9138 v := b.Control 9139 if v.Op != OpNot { 9140 break 9141 } 9142 cond := v.Args[0] 9143 yes := b.Succs[0] 9144 no := b.Succs[1] 9145 b.Kind = BlockIf 9146 b.SetControl(cond) 9147 b.Succs[0] = no 9148 b.Succs[1] = yes 9149 b.Likely *= -1 9150 return true 9151 } 9152 // match: (If (ConstBool [c]) yes no) 9153 // cond: c == 1 9154 // result: (First nil yes no) 9155 for { 9156 v := b.Control 9157 if v.Op != OpConstBool { 9158 break 9159 } 9160 c := v.AuxInt 9161 yes := b.Succs[0] 9162 no := b.Succs[1] 9163 if !(c == 1) { 9164 break 9165 } 9166 b.Kind = BlockFirst 9167 b.SetControl(nil) 9168 b.Succs[0] = yes 9169 b.Succs[1] = no 9170 return true 9171 } 9172 // match: (If (ConstBool [c]) yes no) 9173 // cond: c == 0 9174 // result: (First nil no yes) 9175 for { 9176 v := b.Control 9177 if v.Op != OpConstBool { 9178 break 9179 } 9180 c := v.AuxInt 9181 yes := b.Succs[0] 9182 no := b.Succs[1] 9183 if !(c == 0) { 9184 break 9185 } 9186 b.Kind = BlockFirst 9187 b.SetControl(nil) 9188 b.Succs[0] = no 9189 b.Succs[1] = yes 9190 b.Likely *= -1 9191 return true 9192 } 9193 } 9194 return false 9195 }