github.com/dannin/go@v0.0.0-20161031215817-d35dfd405eaa/src/cmd/compile/internal/ssa/rewritegeneric.go (about) 1 // autogenerated from gen/generic.rules: do not edit! 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 8 var _ = math.MinInt8 // in case not otherwise used 9 func rewriteValuegeneric(v *Value, config *Config) bool { 10 switch v.Op { 11 case OpAdd16: 12 return rewriteValuegeneric_OpAdd16(v, config) 13 case OpAdd32: 14 return rewriteValuegeneric_OpAdd32(v, config) 15 case OpAdd32F: 16 return rewriteValuegeneric_OpAdd32F(v, config) 17 case OpAdd64: 18 return rewriteValuegeneric_OpAdd64(v, config) 19 case OpAdd64F: 20 return rewriteValuegeneric_OpAdd64F(v, config) 21 case OpAdd8: 22 return rewriteValuegeneric_OpAdd8(v, config) 23 case OpAddPtr: 24 return rewriteValuegeneric_OpAddPtr(v, config) 25 case OpAnd16: 26 return rewriteValuegeneric_OpAnd16(v, config) 27 case OpAnd32: 28 return rewriteValuegeneric_OpAnd32(v, config) 29 case OpAnd64: 30 return rewriteValuegeneric_OpAnd64(v, config) 31 case OpAnd8: 32 return rewriteValuegeneric_OpAnd8(v, config) 33 case OpArg: 34 return rewriteValuegeneric_OpArg(v, config) 35 case OpArraySelect: 36 return rewriteValuegeneric_OpArraySelect(v, config) 37 case OpCom16: 38 return rewriteValuegeneric_OpCom16(v, config) 39 case OpCom32: 40 return rewriteValuegeneric_OpCom32(v, config) 41 case OpCom64: 42 return rewriteValuegeneric_OpCom64(v, config) 43 case OpCom8: 44 return rewriteValuegeneric_OpCom8(v, config) 45 case OpConstInterface: 46 return rewriteValuegeneric_OpConstInterface(v, config) 47 case OpConstSlice: 48 return rewriteValuegeneric_OpConstSlice(v, config) 49 case OpConstString: 50 return rewriteValuegeneric_OpConstString(v, config) 51 case OpConvert: 52 return rewriteValuegeneric_OpConvert(v, config) 53 case OpCvt32Fto64F: 54 return rewriteValuegeneric_OpCvt32Fto64F(v, config) 55 case OpCvt64Fto32F: 56 return rewriteValuegeneric_OpCvt64Fto32F(v, config) 57 case OpDiv32F: 58 return rewriteValuegeneric_OpDiv32F(v, config) 59 case OpDiv64: 60 return rewriteValuegeneric_OpDiv64(v, config) 61 case OpDiv64F: 62 return rewriteValuegeneric_OpDiv64F(v, config) 63 case OpDiv64u: 64 return rewriteValuegeneric_OpDiv64u(v, config) 65 case OpEq16: 66 return rewriteValuegeneric_OpEq16(v, config) 67 case OpEq32: 68 return rewriteValuegeneric_OpEq32(v, config) 69 case OpEq64: 70 return rewriteValuegeneric_OpEq64(v, config) 71 case OpEq8: 72 return rewriteValuegeneric_OpEq8(v, config) 73 case OpEqB: 74 return rewriteValuegeneric_OpEqB(v, config) 75 case OpEqInter: 76 return rewriteValuegeneric_OpEqInter(v, config) 77 case OpEqPtr: 78 return rewriteValuegeneric_OpEqPtr(v, config) 79 case OpEqSlice: 80 return rewriteValuegeneric_OpEqSlice(v, config) 81 case OpGeq16: 82 return rewriteValuegeneric_OpGeq16(v, config) 83 case OpGeq16U: 84 return rewriteValuegeneric_OpGeq16U(v, config) 85 case OpGeq32: 86 return rewriteValuegeneric_OpGeq32(v, config) 87 case OpGeq32U: 88 return rewriteValuegeneric_OpGeq32U(v, config) 89 case OpGeq64: 90 return rewriteValuegeneric_OpGeq64(v, config) 91 case OpGeq64U: 92 return rewriteValuegeneric_OpGeq64U(v, config) 93 case OpGeq8: 94 return rewriteValuegeneric_OpGeq8(v, config) 95 case OpGeq8U: 96 return rewriteValuegeneric_OpGeq8U(v, config) 97 case OpGreater16: 98 return rewriteValuegeneric_OpGreater16(v, config) 99 case OpGreater16U: 100 return rewriteValuegeneric_OpGreater16U(v, config) 101 case OpGreater32: 102 return rewriteValuegeneric_OpGreater32(v, config) 103 case OpGreater32U: 104 return rewriteValuegeneric_OpGreater32U(v, config) 105 case OpGreater64: 106 return rewriteValuegeneric_OpGreater64(v, config) 107 case OpGreater64U: 108 return rewriteValuegeneric_OpGreater64U(v, config) 109 case OpGreater8: 110 return rewriteValuegeneric_OpGreater8(v, config) 111 case OpGreater8U: 112 return rewriteValuegeneric_OpGreater8U(v, config) 113 case OpIMake: 114 return rewriteValuegeneric_OpIMake(v, config) 115 case OpIsInBounds: 116 return rewriteValuegeneric_OpIsInBounds(v, config) 117 case OpIsSliceInBounds: 118 return rewriteValuegeneric_OpIsSliceInBounds(v, config) 119 case OpLeq16: 120 return rewriteValuegeneric_OpLeq16(v, config) 121 case OpLeq16U: 122 return rewriteValuegeneric_OpLeq16U(v, config) 123 case OpLeq32: 124 return rewriteValuegeneric_OpLeq32(v, config) 125 case OpLeq32U: 126 return rewriteValuegeneric_OpLeq32U(v, config) 127 case OpLeq64: 128 return rewriteValuegeneric_OpLeq64(v, config) 129 case OpLeq64U: 130 return rewriteValuegeneric_OpLeq64U(v, config) 131 case OpLeq8: 132 return rewriteValuegeneric_OpLeq8(v, config) 133 case OpLeq8U: 134 return rewriteValuegeneric_OpLeq8U(v, config) 135 case OpLess16: 136 return rewriteValuegeneric_OpLess16(v, config) 137 case OpLess16U: 138 return rewriteValuegeneric_OpLess16U(v, config) 139 case OpLess32: 140 return rewriteValuegeneric_OpLess32(v, config) 141 case OpLess32U: 142 return rewriteValuegeneric_OpLess32U(v, config) 143 case OpLess64: 144 return rewriteValuegeneric_OpLess64(v, config) 145 case OpLess64U: 146 return rewriteValuegeneric_OpLess64U(v, config) 147 case OpLess8: 148 return rewriteValuegeneric_OpLess8(v, config) 149 case OpLess8U: 150 return rewriteValuegeneric_OpLess8U(v, config) 151 case OpLoad: 152 return rewriteValuegeneric_OpLoad(v, config) 153 case OpLsh16x16: 154 return rewriteValuegeneric_OpLsh16x16(v, config) 155 case OpLsh16x32: 156 return rewriteValuegeneric_OpLsh16x32(v, config) 157 case OpLsh16x64: 158 return rewriteValuegeneric_OpLsh16x64(v, config) 159 case OpLsh16x8: 160 return rewriteValuegeneric_OpLsh16x8(v, config) 161 case OpLsh32x16: 162 return rewriteValuegeneric_OpLsh32x16(v, config) 163 case OpLsh32x32: 164 return rewriteValuegeneric_OpLsh32x32(v, config) 165 case OpLsh32x64: 166 return rewriteValuegeneric_OpLsh32x64(v, config) 167 case OpLsh32x8: 168 return rewriteValuegeneric_OpLsh32x8(v, config) 169 case OpLsh64x16: 170 return rewriteValuegeneric_OpLsh64x16(v, config) 171 case OpLsh64x32: 172 return rewriteValuegeneric_OpLsh64x32(v, config) 173 case OpLsh64x64: 174 return rewriteValuegeneric_OpLsh64x64(v, config) 175 case OpLsh64x8: 176 return rewriteValuegeneric_OpLsh64x8(v, config) 177 case OpLsh8x16: 178 return rewriteValuegeneric_OpLsh8x16(v, config) 179 case OpLsh8x32: 180 return rewriteValuegeneric_OpLsh8x32(v, config) 181 case OpLsh8x64: 182 return rewriteValuegeneric_OpLsh8x64(v, config) 183 case OpLsh8x8: 184 return rewriteValuegeneric_OpLsh8x8(v, config) 185 case OpMod16: 186 return rewriteValuegeneric_OpMod16(v, config) 187 case OpMod16u: 188 return rewriteValuegeneric_OpMod16u(v, config) 189 case OpMod32: 190 return rewriteValuegeneric_OpMod32(v, config) 191 case OpMod32u: 192 return rewriteValuegeneric_OpMod32u(v, config) 193 case OpMod64: 194 return rewriteValuegeneric_OpMod64(v, config) 195 case OpMod64u: 196 return rewriteValuegeneric_OpMod64u(v, config) 197 case OpMod8: 198 return rewriteValuegeneric_OpMod8(v, config) 199 case OpMod8u: 200 return rewriteValuegeneric_OpMod8u(v, config) 201 case OpMul16: 202 return rewriteValuegeneric_OpMul16(v, config) 203 case OpMul32: 204 return rewriteValuegeneric_OpMul32(v, config) 205 case OpMul32F: 206 return rewriteValuegeneric_OpMul32F(v, config) 207 case OpMul64: 208 return rewriteValuegeneric_OpMul64(v, config) 209 case OpMul64F: 210 return rewriteValuegeneric_OpMul64F(v, config) 211 case OpMul8: 212 return rewriteValuegeneric_OpMul8(v, config) 213 case OpNeg16: 214 return rewriteValuegeneric_OpNeg16(v, config) 215 case OpNeg32: 216 return rewriteValuegeneric_OpNeg32(v, config) 217 case OpNeg64: 218 return rewriteValuegeneric_OpNeg64(v, config) 219 case OpNeg8: 220 return rewriteValuegeneric_OpNeg8(v, config) 221 case OpNeq16: 222 return rewriteValuegeneric_OpNeq16(v, config) 223 case OpNeq32: 224 return rewriteValuegeneric_OpNeq32(v, config) 225 case OpNeq64: 226 return rewriteValuegeneric_OpNeq64(v, config) 227 case OpNeq8: 228 return rewriteValuegeneric_OpNeq8(v, config) 229 case OpNeqB: 230 return rewriteValuegeneric_OpNeqB(v, config) 231 case OpNeqInter: 232 return rewriteValuegeneric_OpNeqInter(v, config) 233 case OpNeqPtr: 234 return rewriteValuegeneric_OpNeqPtr(v, config) 235 case OpNeqSlice: 236 return rewriteValuegeneric_OpNeqSlice(v, config) 237 case OpNilCheck: 238 return rewriteValuegeneric_OpNilCheck(v, config) 239 case OpNot: 240 return rewriteValuegeneric_OpNot(v, config) 241 case OpOffPtr: 242 return rewriteValuegeneric_OpOffPtr(v, config) 243 case OpOr16: 244 return rewriteValuegeneric_OpOr16(v, config) 245 case OpOr32: 246 return rewriteValuegeneric_OpOr32(v, config) 247 case OpOr64: 248 return rewriteValuegeneric_OpOr64(v, config) 249 case OpOr8: 250 return rewriteValuegeneric_OpOr8(v, config) 251 case OpPhi: 252 return rewriteValuegeneric_OpPhi(v, config) 253 case OpPtrIndex: 254 return rewriteValuegeneric_OpPtrIndex(v, config) 255 case OpRsh16Ux16: 256 return rewriteValuegeneric_OpRsh16Ux16(v, config) 257 case OpRsh16Ux32: 258 return rewriteValuegeneric_OpRsh16Ux32(v, config) 259 case OpRsh16Ux64: 260 return rewriteValuegeneric_OpRsh16Ux64(v, config) 261 case OpRsh16Ux8: 262 return rewriteValuegeneric_OpRsh16Ux8(v, config) 263 case OpRsh16x16: 264 return rewriteValuegeneric_OpRsh16x16(v, config) 265 case OpRsh16x32: 266 return rewriteValuegeneric_OpRsh16x32(v, config) 267 case OpRsh16x64: 268 return rewriteValuegeneric_OpRsh16x64(v, config) 269 case OpRsh16x8: 270 return rewriteValuegeneric_OpRsh16x8(v, config) 271 case OpRsh32Ux16: 272 return rewriteValuegeneric_OpRsh32Ux16(v, config) 273 case OpRsh32Ux32: 274 return rewriteValuegeneric_OpRsh32Ux32(v, config) 275 case OpRsh32Ux64: 276 return rewriteValuegeneric_OpRsh32Ux64(v, config) 277 case OpRsh32Ux8: 278 return rewriteValuegeneric_OpRsh32Ux8(v, config) 279 case OpRsh32x16: 280 return rewriteValuegeneric_OpRsh32x16(v, config) 281 case OpRsh32x32: 282 return rewriteValuegeneric_OpRsh32x32(v, config) 283 case OpRsh32x64: 284 return rewriteValuegeneric_OpRsh32x64(v, config) 285 case OpRsh32x8: 286 return rewriteValuegeneric_OpRsh32x8(v, config) 287 case OpRsh64Ux16: 288 return rewriteValuegeneric_OpRsh64Ux16(v, config) 289 case OpRsh64Ux32: 290 return rewriteValuegeneric_OpRsh64Ux32(v, config) 291 case OpRsh64Ux64: 292 return rewriteValuegeneric_OpRsh64Ux64(v, config) 293 case OpRsh64Ux8: 294 return rewriteValuegeneric_OpRsh64Ux8(v, config) 295 case OpRsh64x16: 296 return rewriteValuegeneric_OpRsh64x16(v, config) 297 case OpRsh64x32: 298 return rewriteValuegeneric_OpRsh64x32(v, config) 299 case OpRsh64x64: 300 return rewriteValuegeneric_OpRsh64x64(v, config) 301 case OpRsh64x8: 302 return rewriteValuegeneric_OpRsh64x8(v, config) 303 case OpRsh8Ux16: 304 return rewriteValuegeneric_OpRsh8Ux16(v, config) 305 case OpRsh8Ux32: 306 return rewriteValuegeneric_OpRsh8Ux32(v, config) 307 case OpRsh8Ux64: 308 return rewriteValuegeneric_OpRsh8Ux64(v, config) 309 case OpRsh8Ux8: 310 return rewriteValuegeneric_OpRsh8Ux8(v, config) 311 case OpRsh8x16: 312 return rewriteValuegeneric_OpRsh8x16(v, config) 313 case OpRsh8x32: 314 return rewriteValuegeneric_OpRsh8x32(v, config) 315 case OpRsh8x64: 316 return rewriteValuegeneric_OpRsh8x64(v, config) 317 case OpRsh8x8: 318 return rewriteValuegeneric_OpRsh8x8(v, config) 319 case OpSignExt16to32: 320 return rewriteValuegeneric_OpSignExt16to32(v, config) 321 case OpSignExt16to64: 322 return rewriteValuegeneric_OpSignExt16to64(v, config) 323 case OpSignExt32to64: 324 return rewriteValuegeneric_OpSignExt32to64(v, config) 325 case OpSignExt8to16: 326 return rewriteValuegeneric_OpSignExt8to16(v, config) 327 case OpSignExt8to32: 328 return rewriteValuegeneric_OpSignExt8to32(v, config) 329 case OpSignExt8to64: 330 return rewriteValuegeneric_OpSignExt8to64(v, config) 331 case OpSliceCap: 332 return rewriteValuegeneric_OpSliceCap(v, config) 333 case OpSliceLen: 334 return rewriteValuegeneric_OpSliceLen(v, config) 335 case OpSlicePtr: 336 return rewriteValuegeneric_OpSlicePtr(v, config) 337 case OpSlicemask: 338 return rewriteValuegeneric_OpSlicemask(v, config) 339 case OpSqrt: 340 return rewriteValuegeneric_OpSqrt(v, config) 341 case OpStore: 342 return rewriteValuegeneric_OpStore(v, config) 343 case OpStringLen: 344 return rewriteValuegeneric_OpStringLen(v, config) 345 case OpStringPtr: 346 return rewriteValuegeneric_OpStringPtr(v, config) 347 case OpStructSelect: 348 return rewriteValuegeneric_OpStructSelect(v, config) 349 case OpSub16: 350 return rewriteValuegeneric_OpSub16(v, config) 351 case OpSub32: 352 return rewriteValuegeneric_OpSub32(v, config) 353 case OpSub32F: 354 return rewriteValuegeneric_OpSub32F(v, config) 355 case OpSub64: 356 return rewriteValuegeneric_OpSub64(v, config) 357 case OpSub64F: 358 return rewriteValuegeneric_OpSub64F(v, config) 359 case OpSub8: 360 return rewriteValuegeneric_OpSub8(v, config) 361 case OpTrunc16to8: 362 return rewriteValuegeneric_OpTrunc16to8(v, config) 363 case OpTrunc32to16: 364 return rewriteValuegeneric_OpTrunc32to16(v, config) 365 case OpTrunc32to8: 366 return rewriteValuegeneric_OpTrunc32to8(v, config) 367 case OpTrunc64to16: 368 return rewriteValuegeneric_OpTrunc64to16(v, config) 369 case OpTrunc64to32: 370 return rewriteValuegeneric_OpTrunc64to32(v, config) 371 case OpTrunc64to8: 372 return rewriteValuegeneric_OpTrunc64to8(v, config) 373 case OpXor16: 374 return rewriteValuegeneric_OpXor16(v, config) 375 case OpXor32: 376 return rewriteValuegeneric_OpXor32(v, config) 377 case OpXor64: 378 return rewriteValuegeneric_OpXor64(v, config) 379 case OpXor8: 380 return rewriteValuegeneric_OpXor8(v, config) 381 case OpZero: 382 return rewriteValuegeneric_OpZero(v, config) 383 case OpZeroExt16to32: 384 return rewriteValuegeneric_OpZeroExt16to32(v, config) 385 case OpZeroExt16to64: 386 return rewriteValuegeneric_OpZeroExt16to64(v, config) 387 case OpZeroExt32to64: 388 return rewriteValuegeneric_OpZeroExt32to64(v, config) 389 case OpZeroExt8to16: 390 return rewriteValuegeneric_OpZeroExt8to16(v, config) 391 case OpZeroExt8to32: 392 return rewriteValuegeneric_OpZeroExt8to32(v, config) 393 case OpZeroExt8to64: 394 return rewriteValuegeneric_OpZeroExt8to64(v, config) 395 } 396 return false 397 } 398 func rewriteValuegeneric_OpAdd16(v *Value, config *Config) bool { 399 b := v.Block 400 _ = b 401 // match: (Add16 (Const16 [c]) (Const16 [d])) 402 // cond: 403 // result: (Const16 [int64(int16(c+d))]) 404 for { 405 v_0 := v.Args[0] 406 if v_0.Op != OpConst16 { 407 break 408 } 409 c := v_0.AuxInt 410 v_1 := v.Args[1] 411 if v_1.Op != OpConst16 { 412 break 413 } 414 d := v_1.AuxInt 415 v.reset(OpConst16) 416 v.AuxInt = int64(int16(c + d)) 417 return true 418 } 419 // match: (Add16 x (Const16 <t> [c])) 420 // cond: x.Op != OpConst16 421 // result: (Add16 (Const16 <t> [c]) x) 422 for { 423 x := v.Args[0] 424 v_1 := v.Args[1] 425 if v_1.Op != OpConst16 { 426 break 427 } 428 t := v_1.Type 429 c := v_1.AuxInt 430 if !(x.Op != OpConst16) { 431 break 432 } 433 v.reset(OpAdd16) 434 v0 := b.NewValue0(v.Line, OpConst16, t) 435 v0.AuxInt = c 436 v.AddArg(v0) 437 v.AddArg(x) 438 return true 439 } 440 // match: (Add16 (Const16 [0]) x) 441 // cond: 442 // result: x 443 for { 444 v_0 := v.Args[0] 445 if v_0.Op != OpConst16 { 446 break 447 } 448 if v_0.AuxInt != 0 { 449 break 450 } 451 x := v.Args[1] 452 v.reset(OpCopy) 453 v.Type = x.Type 454 v.AddArg(x) 455 return true 456 } 457 return false 458 } 459 func rewriteValuegeneric_OpAdd32(v *Value, config *Config) bool { 460 b := v.Block 461 _ = b 462 // match: (Add32 (Const32 [c]) (Const32 [d])) 463 // cond: 464 // result: (Const32 [int64(int32(c+d))]) 465 for { 466 v_0 := v.Args[0] 467 if v_0.Op != OpConst32 { 468 break 469 } 470 c := v_0.AuxInt 471 v_1 := v.Args[1] 472 if v_1.Op != OpConst32 { 473 break 474 } 475 d := v_1.AuxInt 476 v.reset(OpConst32) 477 v.AuxInt = int64(int32(c + d)) 478 return true 479 } 480 // match: (Add32 x (Const32 <t> [c])) 481 // cond: x.Op != OpConst32 482 // result: (Add32 (Const32 <t> [c]) x) 483 for { 484 x := v.Args[0] 485 v_1 := v.Args[1] 486 if v_1.Op != OpConst32 { 487 break 488 } 489 t := v_1.Type 490 c := v_1.AuxInt 491 if !(x.Op != OpConst32) { 492 break 493 } 494 v.reset(OpAdd32) 495 v0 := b.NewValue0(v.Line, OpConst32, t) 496 v0.AuxInt = c 497 v.AddArg(v0) 498 v.AddArg(x) 499 return true 500 } 501 // match: (Add32 (Const32 [0]) x) 502 // cond: 503 // result: x 504 for { 505 v_0 := v.Args[0] 506 if v_0.Op != OpConst32 { 507 break 508 } 509 if v_0.AuxInt != 0 { 510 break 511 } 512 x := v.Args[1] 513 v.reset(OpCopy) 514 v.Type = x.Type 515 v.AddArg(x) 516 return true 517 } 518 return false 519 } 520 func rewriteValuegeneric_OpAdd32F(v *Value, config *Config) bool { 521 b := v.Block 522 _ = b 523 // match: (Add32F (Const32F [c]) (Const32F [d])) 524 // cond: 525 // result: (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) 526 for { 527 v_0 := v.Args[0] 528 if v_0.Op != OpConst32F { 529 break 530 } 531 c := v_0.AuxInt 532 v_1 := v.Args[1] 533 if v_1.Op != OpConst32F { 534 break 535 } 536 d := v_1.AuxInt 537 v.reset(OpConst32F) 538 v.AuxInt = f2i(float64(i2f32(c) + i2f32(d))) 539 return true 540 } 541 // match: (Add32F x (Const32F [0])) 542 // cond: 543 // result: x 544 for { 545 x := v.Args[0] 546 v_1 := v.Args[1] 547 if v_1.Op != OpConst32F { 548 break 549 } 550 if v_1.AuxInt != 0 { 551 break 552 } 553 v.reset(OpCopy) 554 v.Type = x.Type 555 v.AddArg(x) 556 return true 557 } 558 // match: (Add32F (Const32F [0]) x) 559 // cond: 560 // result: x 561 for { 562 v_0 := v.Args[0] 563 if v_0.Op != OpConst32F { 564 break 565 } 566 if v_0.AuxInt != 0 { 567 break 568 } 569 x := v.Args[1] 570 v.reset(OpCopy) 571 v.Type = x.Type 572 v.AddArg(x) 573 return true 574 } 575 return false 576 } 577 func rewriteValuegeneric_OpAdd64(v *Value, config *Config) bool { 578 b := v.Block 579 _ = b 580 // match: (Add64 (Const64 [c]) (Const64 [d])) 581 // cond: 582 // result: (Const64 [c+d]) 583 for { 584 v_0 := v.Args[0] 585 if v_0.Op != OpConst64 { 586 break 587 } 588 c := v_0.AuxInt 589 v_1 := v.Args[1] 590 if v_1.Op != OpConst64 { 591 break 592 } 593 d := v_1.AuxInt 594 v.reset(OpConst64) 595 v.AuxInt = c + d 596 return true 597 } 598 // match: (Add64 x (Const64 <t> [c])) 599 // cond: x.Op != OpConst64 600 // result: (Add64 (Const64 <t> [c]) x) 601 for { 602 x := v.Args[0] 603 v_1 := v.Args[1] 604 if v_1.Op != OpConst64 { 605 break 606 } 607 t := v_1.Type 608 c := v_1.AuxInt 609 if !(x.Op != OpConst64) { 610 break 611 } 612 v.reset(OpAdd64) 613 v0 := b.NewValue0(v.Line, OpConst64, t) 614 v0.AuxInt = c 615 v.AddArg(v0) 616 v.AddArg(x) 617 return true 618 } 619 // match: (Add64 (Const64 [0]) x) 620 // cond: 621 // result: x 622 for { 623 v_0 := v.Args[0] 624 if v_0.Op != OpConst64 { 625 break 626 } 627 if v_0.AuxInt != 0 { 628 break 629 } 630 x := v.Args[1] 631 v.reset(OpCopy) 632 v.Type = x.Type 633 v.AddArg(x) 634 return true 635 } 636 return false 637 } 638 func rewriteValuegeneric_OpAdd64F(v *Value, config *Config) bool { 639 b := v.Block 640 _ = b 641 // match: (Add64F (Const64F [c]) (Const64F [d])) 642 // cond: 643 // result: (Const64F [f2i(i2f(c) + i2f(d))]) 644 for { 645 v_0 := v.Args[0] 646 if v_0.Op != OpConst64F { 647 break 648 } 649 c := v_0.AuxInt 650 v_1 := v.Args[1] 651 if v_1.Op != OpConst64F { 652 break 653 } 654 d := v_1.AuxInt 655 v.reset(OpConst64F) 656 v.AuxInt = f2i(i2f(c) + i2f(d)) 657 return true 658 } 659 // match: (Add64F x (Const64F [0])) 660 // cond: 661 // result: x 662 for { 663 x := v.Args[0] 664 v_1 := v.Args[1] 665 if v_1.Op != OpConst64F { 666 break 667 } 668 if v_1.AuxInt != 0 { 669 break 670 } 671 v.reset(OpCopy) 672 v.Type = x.Type 673 v.AddArg(x) 674 return true 675 } 676 // match: (Add64F (Const64F [0]) x) 677 // cond: 678 // result: x 679 for { 680 v_0 := v.Args[0] 681 if v_0.Op != OpConst64F { 682 break 683 } 684 if v_0.AuxInt != 0 { 685 break 686 } 687 x := v.Args[1] 688 v.reset(OpCopy) 689 v.Type = x.Type 690 v.AddArg(x) 691 return true 692 } 693 return false 694 } 695 func rewriteValuegeneric_OpAdd8(v *Value, config *Config) bool { 696 b := v.Block 697 _ = b 698 // match: (Add8 (Const8 [c]) (Const8 [d])) 699 // cond: 700 // result: (Const8 [int64(int8(c+d))]) 701 for { 702 v_0 := v.Args[0] 703 if v_0.Op != OpConst8 { 704 break 705 } 706 c := v_0.AuxInt 707 v_1 := v.Args[1] 708 if v_1.Op != OpConst8 { 709 break 710 } 711 d := v_1.AuxInt 712 v.reset(OpConst8) 713 v.AuxInt = int64(int8(c + d)) 714 return true 715 } 716 // match: (Add8 x (Const8 <t> [c])) 717 // cond: x.Op != OpConst8 718 // result: (Add8 (Const8 <t> [c]) x) 719 for { 720 x := v.Args[0] 721 v_1 := v.Args[1] 722 if v_1.Op != OpConst8 { 723 break 724 } 725 t := v_1.Type 726 c := v_1.AuxInt 727 if !(x.Op != OpConst8) { 728 break 729 } 730 v.reset(OpAdd8) 731 v0 := b.NewValue0(v.Line, OpConst8, t) 732 v0.AuxInt = c 733 v.AddArg(v0) 734 v.AddArg(x) 735 return true 736 } 737 // match: (Add8 (Const8 [0]) x) 738 // cond: 739 // result: x 740 for { 741 v_0 := v.Args[0] 742 if v_0.Op != OpConst8 { 743 break 744 } 745 if v_0.AuxInt != 0 { 746 break 747 } 748 x := v.Args[1] 749 v.reset(OpCopy) 750 v.Type = x.Type 751 v.AddArg(x) 752 return true 753 } 754 return false 755 } 756 func rewriteValuegeneric_OpAddPtr(v *Value, config *Config) bool { 757 b := v.Block 758 _ = b 759 // match: (AddPtr <t> x (Const64 [c])) 760 // cond: 761 // result: (OffPtr <t> x [c]) 762 for { 763 t := v.Type 764 x := v.Args[0] 765 v_1 := v.Args[1] 766 if v_1.Op != OpConst64 { 767 break 768 } 769 c := v_1.AuxInt 770 v.reset(OpOffPtr) 771 v.Type = t 772 v.AuxInt = c 773 v.AddArg(x) 774 return true 775 } 776 return false 777 } 778 func rewriteValuegeneric_OpAnd16(v *Value, config *Config) bool { 779 b := v.Block 780 _ = b 781 // match: (And16 x (Const16 <t> [c])) 782 // cond: x.Op != OpConst16 783 // result: (And16 (Const16 <t> [c]) x) 784 for { 785 x := v.Args[0] 786 v_1 := v.Args[1] 787 if v_1.Op != OpConst16 { 788 break 789 } 790 t := v_1.Type 791 c := v_1.AuxInt 792 if !(x.Op != OpConst16) { 793 break 794 } 795 v.reset(OpAnd16) 796 v0 := b.NewValue0(v.Line, OpConst16, t) 797 v0.AuxInt = c 798 v.AddArg(v0) 799 v.AddArg(x) 800 return true 801 } 802 // match: (And16 x x) 803 // cond: 804 // result: x 805 for { 806 x := v.Args[0] 807 if x != v.Args[1] { 808 break 809 } 810 v.reset(OpCopy) 811 v.Type = x.Type 812 v.AddArg(x) 813 return true 814 } 815 // match: (And16 (Const16 [-1]) x) 816 // cond: 817 // result: x 818 for { 819 v_0 := v.Args[0] 820 if v_0.Op != OpConst16 { 821 break 822 } 823 if v_0.AuxInt != -1 { 824 break 825 } 826 x := v.Args[1] 827 v.reset(OpCopy) 828 v.Type = x.Type 829 v.AddArg(x) 830 return true 831 } 832 // match: (And16 (Const16 [0]) _) 833 // cond: 834 // result: (Const16 [0]) 835 for { 836 v_0 := v.Args[0] 837 if v_0.Op != OpConst16 { 838 break 839 } 840 if v_0.AuxInt != 0 { 841 break 842 } 843 v.reset(OpConst16) 844 v.AuxInt = 0 845 return true 846 } 847 // match: (And16 x (And16 x y)) 848 // cond: 849 // result: (And16 x y) 850 for { 851 x := v.Args[0] 852 v_1 := v.Args[1] 853 if v_1.Op != OpAnd16 { 854 break 855 } 856 if x != v_1.Args[0] { 857 break 858 } 859 y := v_1.Args[1] 860 v.reset(OpAnd16) 861 v.AddArg(x) 862 v.AddArg(y) 863 return true 864 } 865 // match: (And16 x (And16 y x)) 866 // cond: 867 // result: (And16 x y) 868 for { 869 x := v.Args[0] 870 v_1 := v.Args[1] 871 if v_1.Op != OpAnd16 { 872 break 873 } 874 y := v_1.Args[0] 875 if x != v_1.Args[1] { 876 break 877 } 878 v.reset(OpAnd16) 879 v.AddArg(x) 880 v.AddArg(y) 881 return true 882 } 883 // match: (And16 (And16 x y) x) 884 // cond: 885 // result: (And16 x y) 886 for { 887 v_0 := v.Args[0] 888 if v_0.Op != OpAnd16 { 889 break 890 } 891 x := v_0.Args[0] 892 y := v_0.Args[1] 893 if x != v.Args[1] { 894 break 895 } 896 v.reset(OpAnd16) 897 v.AddArg(x) 898 v.AddArg(y) 899 return true 900 } 901 // match: (And16 (And16 x y) y) 902 // cond: 903 // result: (And16 x y) 904 for { 905 v_0 := v.Args[0] 906 if v_0.Op != OpAnd16 { 907 break 908 } 909 x := v_0.Args[0] 910 y := v_0.Args[1] 911 if y != v.Args[1] { 912 break 913 } 914 v.reset(OpAnd16) 915 v.AddArg(x) 916 v.AddArg(y) 917 return true 918 } 919 return false 920 } 921 func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool { 922 b := v.Block 923 _ = b 924 // match: (And32 x (Const32 <t> [c])) 925 // cond: x.Op != OpConst32 926 // result: (And32 (Const32 <t> [c]) x) 927 for { 928 x := v.Args[0] 929 v_1 := v.Args[1] 930 if v_1.Op != OpConst32 { 931 break 932 } 933 t := v_1.Type 934 c := v_1.AuxInt 935 if !(x.Op != OpConst32) { 936 break 937 } 938 v.reset(OpAnd32) 939 v0 := b.NewValue0(v.Line, OpConst32, t) 940 v0.AuxInt = c 941 v.AddArg(v0) 942 v.AddArg(x) 943 return true 944 } 945 // match: (And32 x x) 946 // cond: 947 // result: x 948 for { 949 x := v.Args[0] 950 if x != v.Args[1] { 951 break 952 } 953 v.reset(OpCopy) 954 v.Type = x.Type 955 v.AddArg(x) 956 return true 957 } 958 // match: (And32 (Const32 [-1]) x) 959 // cond: 960 // result: x 961 for { 962 v_0 := v.Args[0] 963 if v_0.Op != OpConst32 { 964 break 965 } 966 if v_0.AuxInt != -1 { 967 break 968 } 969 x := v.Args[1] 970 v.reset(OpCopy) 971 v.Type = x.Type 972 v.AddArg(x) 973 return true 974 } 975 // match: (And32 (Const32 [0]) _) 976 // cond: 977 // result: (Const32 [0]) 978 for { 979 v_0 := v.Args[0] 980 if v_0.Op != OpConst32 { 981 break 982 } 983 if v_0.AuxInt != 0 { 984 break 985 } 986 v.reset(OpConst32) 987 v.AuxInt = 0 988 return true 989 } 990 // match: (And32 x (And32 x y)) 991 // cond: 992 // result: (And32 x y) 993 for { 994 x := v.Args[0] 995 v_1 := v.Args[1] 996 if v_1.Op != OpAnd32 { 997 break 998 } 999 if x != v_1.Args[0] { 1000 break 1001 } 1002 y := v_1.Args[1] 1003 v.reset(OpAnd32) 1004 v.AddArg(x) 1005 v.AddArg(y) 1006 return true 1007 } 1008 // match: (And32 x (And32 y x)) 1009 // cond: 1010 // result: (And32 x y) 1011 for { 1012 x := v.Args[0] 1013 v_1 := v.Args[1] 1014 if v_1.Op != OpAnd32 { 1015 break 1016 } 1017 y := v_1.Args[0] 1018 if x != v_1.Args[1] { 1019 break 1020 } 1021 v.reset(OpAnd32) 1022 v.AddArg(x) 1023 v.AddArg(y) 1024 return true 1025 } 1026 // match: (And32 (And32 x y) x) 1027 // cond: 1028 // result: (And32 x y) 1029 for { 1030 v_0 := v.Args[0] 1031 if v_0.Op != OpAnd32 { 1032 break 1033 } 1034 x := v_0.Args[0] 1035 y := v_0.Args[1] 1036 if x != v.Args[1] { 1037 break 1038 } 1039 v.reset(OpAnd32) 1040 v.AddArg(x) 1041 v.AddArg(y) 1042 return true 1043 } 1044 // match: (And32 (And32 x y) y) 1045 // cond: 1046 // result: (And32 x y) 1047 for { 1048 v_0 := v.Args[0] 1049 if v_0.Op != OpAnd32 { 1050 break 1051 } 1052 x := v_0.Args[0] 1053 y := v_0.Args[1] 1054 if y != v.Args[1] { 1055 break 1056 } 1057 v.reset(OpAnd32) 1058 v.AddArg(x) 1059 v.AddArg(y) 1060 return true 1061 } 1062 return false 1063 } 1064 func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool { 1065 b := v.Block 1066 _ = b 1067 // match: (And64 x (Const64 <t> [c])) 1068 // cond: x.Op != OpConst64 1069 // result: (And64 (Const64 <t> [c]) x) 1070 for { 1071 x := v.Args[0] 1072 v_1 := v.Args[1] 1073 if v_1.Op != OpConst64 { 1074 break 1075 } 1076 t := v_1.Type 1077 c := v_1.AuxInt 1078 if !(x.Op != OpConst64) { 1079 break 1080 } 1081 v.reset(OpAnd64) 1082 v0 := b.NewValue0(v.Line, OpConst64, t) 1083 v0.AuxInt = c 1084 v.AddArg(v0) 1085 v.AddArg(x) 1086 return true 1087 } 1088 // match: (And64 x x) 1089 // cond: 1090 // result: x 1091 for { 1092 x := v.Args[0] 1093 if x != v.Args[1] { 1094 break 1095 } 1096 v.reset(OpCopy) 1097 v.Type = x.Type 1098 v.AddArg(x) 1099 return true 1100 } 1101 // match: (And64 (Const64 [-1]) x) 1102 // cond: 1103 // result: x 1104 for { 1105 v_0 := v.Args[0] 1106 if v_0.Op != OpConst64 { 1107 break 1108 } 1109 if v_0.AuxInt != -1 { 1110 break 1111 } 1112 x := v.Args[1] 1113 v.reset(OpCopy) 1114 v.Type = x.Type 1115 v.AddArg(x) 1116 return true 1117 } 1118 // match: (And64 (Const64 [0]) _) 1119 // cond: 1120 // result: (Const64 [0]) 1121 for { 1122 v_0 := v.Args[0] 1123 if v_0.Op != OpConst64 { 1124 break 1125 } 1126 if v_0.AuxInt != 0 { 1127 break 1128 } 1129 v.reset(OpConst64) 1130 v.AuxInt = 0 1131 return true 1132 } 1133 // match: (And64 x (And64 x y)) 1134 // cond: 1135 // result: (And64 x y) 1136 for { 1137 x := v.Args[0] 1138 v_1 := v.Args[1] 1139 if v_1.Op != OpAnd64 { 1140 break 1141 } 1142 if x != v_1.Args[0] { 1143 break 1144 } 1145 y := v_1.Args[1] 1146 v.reset(OpAnd64) 1147 v.AddArg(x) 1148 v.AddArg(y) 1149 return true 1150 } 1151 // match: (And64 x (And64 y x)) 1152 // cond: 1153 // result: (And64 x y) 1154 for { 1155 x := v.Args[0] 1156 v_1 := v.Args[1] 1157 if v_1.Op != OpAnd64 { 1158 break 1159 } 1160 y := v_1.Args[0] 1161 if x != v_1.Args[1] { 1162 break 1163 } 1164 v.reset(OpAnd64) 1165 v.AddArg(x) 1166 v.AddArg(y) 1167 return true 1168 } 1169 // match: (And64 (And64 x y) x) 1170 // cond: 1171 // result: (And64 x y) 1172 for { 1173 v_0 := v.Args[0] 1174 if v_0.Op != OpAnd64 { 1175 break 1176 } 1177 x := v_0.Args[0] 1178 y := v_0.Args[1] 1179 if x != v.Args[1] { 1180 break 1181 } 1182 v.reset(OpAnd64) 1183 v.AddArg(x) 1184 v.AddArg(y) 1185 return true 1186 } 1187 // match: (And64 (And64 x y) y) 1188 // cond: 1189 // result: (And64 x y) 1190 for { 1191 v_0 := v.Args[0] 1192 if v_0.Op != OpAnd64 { 1193 break 1194 } 1195 x := v_0.Args[0] 1196 y := v_0.Args[1] 1197 if y != v.Args[1] { 1198 break 1199 } 1200 v.reset(OpAnd64) 1201 v.AddArg(x) 1202 v.AddArg(y) 1203 return true 1204 } 1205 // match: (And64 <t> (Const64 [y]) x) 1206 // cond: nlz(y) + nto(y) == 64 && nto(y) >= 32 1207 // result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)])) 1208 for { 1209 t := v.Type 1210 v_0 := v.Args[0] 1211 if v_0.Op != OpConst64 { 1212 break 1213 } 1214 y := v_0.AuxInt 1215 x := v.Args[1] 1216 if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) { 1217 break 1218 } 1219 v.reset(OpRsh64Ux64) 1220 v0 := b.NewValue0(v.Line, OpLsh64x64, t) 1221 v0.AddArg(x) 1222 v1 := b.NewValue0(v.Line, OpConst64, t) 1223 v1.AuxInt = nlz(y) 1224 v0.AddArg(v1) 1225 v.AddArg(v0) 1226 v2 := b.NewValue0(v.Line, OpConst64, t) 1227 v2.AuxInt = nlz(y) 1228 v.AddArg(v2) 1229 return true 1230 } 1231 // match: (And64 <t> (Const64 [y]) x) 1232 // cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32 1233 // result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)])) 1234 for { 1235 t := v.Type 1236 v_0 := v.Args[0] 1237 if v_0.Op != OpConst64 { 1238 break 1239 } 1240 y := v_0.AuxInt 1241 x := v.Args[1] 1242 if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) { 1243 break 1244 } 1245 v.reset(OpLsh64x64) 1246 v0 := b.NewValue0(v.Line, OpRsh64Ux64, t) 1247 v0.AddArg(x) 1248 v1 := b.NewValue0(v.Line, OpConst64, t) 1249 v1.AuxInt = ntz(y) 1250 v0.AddArg(v1) 1251 v.AddArg(v0) 1252 v2 := b.NewValue0(v.Line, OpConst64, t) 1253 v2.AuxInt = ntz(y) 1254 v.AddArg(v2) 1255 return true 1256 } 1257 return false 1258 } 1259 func rewriteValuegeneric_OpAnd8(v *Value, config *Config) bool { 1260 b := v.Block 1261 _ = b 1262 // match: (And8 x (Const8 <t> [c])) 1263 // cond: x.Op != OpConst8 1264 // result: (And8 (Const8 <t> [c]) x) 1265 for { 1266 x := v.Args[0] 1267 v_1 := v.Args[1] 1268 if v_1.Op != OpConst8 { 1269 break 1270 } 1271 t := v_1.Type 1272 c := v_1.AuxInt 1273 if !(x.Op != OpConst8) { 1274 break 1275 } 1276 v.reset(OpAnd8) 1277 v0 := b.NewValue0(v.Line, OpConst8, t) 1278 v0.AuxInt = c 1279 v.AddArg(v0) 1280 v.AddArg(x) 1281 return true 1282 } 1283 // match: (And8 x x) 1284 // cond: 1285 // result: x 1286 for { 1287 x := v.Args[0] 1288 if x != v.Args[1] { 1289 break 1290 } 1291 v.reset(OpCopy) 1292 v.Type = x.Type 1293 v.AddArg(x) 1294 return true 1295 } 1296 // match: (And8 (Const8 [-1]) x) 1297 // cond: 1298 // result: x 1299 for { 1300 v_0 := v.Args[0] 1301 if v_0.Op != OpConst8 { 1302 break 1303 } 1304 if v_0.AuxInt != -1 { 1305 break 1306 } 1307 x := v.Args[1] 1308 v.reset(OpCopy) 1309 v.Type = x.Type 1310 v.AddArg(x) 1311 return true 1312 } 1313 // match: (And8 (Const8 [0]) _) 1314 // cond: 1315 // result: (Const8 [0]) 1316 for { 1317 v_0 := v.Args[0] 1318 if v_0.Op != OpConst8 { 1319 break 1320 } 1321 if v_0.AuxInt != 0 { 1322 break 1323 } 1324 v.reset(OpConst8) 1325 v.AuxInt = 0 1326 return true 1327 } 1328 // match: (And8 x (And8 x y)) 1329 // cond: 1330 // result: (And8 x y) 1331 for { 1332 x := v.Args[0] 1333 v_1 := v.Args[1] 1334 if v_1.Op != OpAnd8 { 1335 break 1336 } 1337 if x != v_1.Args[0] { 1338 break 1339 } 1340 y := v_1.Args[1] 1341 v.reset(OpAnd8) 1342 v.AddArg(x) 1343 v.AddArg(y) 1344 return true 1345 } 1346 // match: (And8 x (And8 y x)) 1347 // cond: 1348 // result: (And8 x y) 1349 for { 1350 x := v.Args[0] 1351 v_1 := v.Args[1] 1352 if v_1.Op != OpAnd8 { 1353 break 1354 } 1355 y := v_1.Args[0] 1356 if x != v_1.Args[1] { 1357 break 1358 } 1359 v.reset(OpAnd8) 1360 v.AddArg(x) 1361 v.AddArg(y) 1362 return true 1363 } 1364 // match: (And8 (And8 x y) x) 1365 // cond: 1366 // result: (And8 x y) 1367 for { 1368 v_0 := v.Args[0] 1369 if v_0.Op != OpAnd8 { 1370 break 1371 } 1372 x := v_0.Args[0] 1373 y := v_0.Args[1] 1374 if x != v.Args[1] { 1375 break 1376 } 1377 v.reset(OpAnd8) 1378 v.AddArg(x) 1379 v.AddArg(y) 1380 return true 1381 } 1382 // match: (And8 (And8 x y) y) 1383 // cond: 1384 // result: (And8 x y) 1385 for { 1386 v_0 := v.Args[0] 1387 if v_0.Op != OpAnd8 { 1388 break 1389 } 1390 x := v_0.Args[0] 1391 y := v_0.Args[1] 1392 if y != v.Args[1] { 1393 break 1394 } 1395 v.reset(OpAnd8) 1396 v.AddArg(x) 1397 v.AddArg(y) 1398 return true 1399 } 1400 return false 1401 } 1402 func rewriteValuegeneric_OpArg(v *Value, config *Config) bool { 1403 b := v.Block 1404 _ = b 1405 // match: (Arg {n} [off]) 1406 // cond: v.Type.IsString() 1407 // result: (StringMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])) 1408 for { 1409 off := v.AuxInt 1410 n := v.Aux 1411 if !(v.Type.IsString()) { 1412 break 1413 } 1414 v.reset(OpStringMake) 1415 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1416 v0.AuxInt = off 1417 v0.Aux = n 1418 v.AddArg(v0) 1419 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1420 v1.AuxInt = off + config.PtrSize 1421 v1.Aux = n 1422 v.AddArg(v1) 1423 return true 1424 } 1425 // match: (Arg {n} [off]) 1426 // cond: v.Type.IsSlice() 1427 // result: (SliceMake (Arg <v.Type.ElemType().PtrTo()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]) (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize])) 1428 for { 1429 off := v.AuxInt 1430 n := v.Aux 1431 if !(v.Type.IsSlice()) { 1432 break 1433 } 1434 v.reset(OpSliceMake) 1435 v0 := b.NewValue0(v.Line, OpArg, v.Type.ElemType().PtrTo()) 1436 v0.AuxInt = off 1437 v0.Aux = n 1438 v.AddArg(v0) 1439 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1440 v1.AuxInt = off + config.PtrSize 1441 v1.Aux = n 1442 v.AddArg(v1) 1443 v2 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1444 v2.AuxInt = off + 2*config.PtrSize 1445 v2.Aux = n 1446 v.AddArg(v2) 1447 return true 1448 } 1449 // match: (Arg {n} [off]) 1450 // cond: v.Type.IsInterface() 1451 // result: (IMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize])) 1452 for { 1453 off := v.AuxInt 1454 n := v.Aux 1455 if !(v.Type.IsInterface()) { 1456 break 1457 } 1458 v.reset(OpIMake) 1459 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1460 v0.AuxInt = off 1461 v0.Aux = n 1462 v.AddArg(v0) 1463 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1464 v1.AuxInt = off + config.PtrSize 1465 v1.Aux = n 1466 v.AddArg(v1) 1467 return true 1468 } 1469 // match: (Arg {n} [off]) 1470 // cond: v.Type.IsComplex() && v.Type.Size() == 16 1471 // result: (ComplexMake (Arg <config.fe.TypeFloat64()> {n} [off]) (Arg <config.fe.TypeFloat64()> {n} [off+8])) 1472 for { 1473 off := v.AuxInt 1474 n := v.Aux 1475 if !(v.Type.IsComplex() && v.Type.Size() == 16) { 1476 break 1477 } 1478 v.reset(OpComplexMake) 1479 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) 1480 v0.AuxInt = off 1481 v0.Aux = n 1482 v.AddArg(v0) 1483 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) 1484 v1.AuxInt = off + 8 1485 v1.Aux = n 1486 v.AddArg(v1) 1487 return true 1488 } 1489 // match: (Arg {n} [off]) 1490 // cond: v.Type.IsComplex() && v.Type.Size() == 8 1491 // result: (ComplexMake (Arg <config.fe.TypeFloat32()> {n} [off]) (Arg <config.fe.TypeFloat32()> {n} [off+4])) 1492 for { 1493 off := v.AuxInt 1494 n := v.Aux 1495 if !(v.Type.IsComplex() && v.Type.Size() == 8) { 1496 break 1497 } 1498 v.reset(OpComplexMake) 1499 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) 1500 v0.AuxInt = off 1501 v0.Aux = n 1502 v.AddArg(v0) 1503 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) 1504 v1.AuxInt = off + 4 1505 v1.Aux = n 1506 v.AddArg(v1) 1507 return true 1508 } 1509 // match: (Arg <t>) 1510 // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) 1511 // result: (StructMake0) 1512 for { 1513 t := v.Type 1514 if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { 1515 break 1516 } 1517 v.reset(OpStructMake0) 1518 return true 1519 } 1520 // match: (Arg <t> {n} [off]) 1521 // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) 1522 // result: (StructMake1 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])) 1523 for { 1524 t := v.Type 1525 off := v.AuxInt 1526 n := v.Aux 1527 if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { 1528 break 1529 } 1530 v.reset(OpStructMake1) 1531 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1532 v0.AuxInt = off + t.FieldOff(0) 1533 v0.Aux = n 1534 v.AddArg(v0) 1535 return true 1536 } 1537 // match: (Arg <t> {n} [off]) 1538 // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) 1539 // result: (StructMake2 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])) 1540 for { 1541 t := v.Type 1542 off := v.AuxInt 1543 n := v.Aux 1544 if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { 1545 break 1546 } 1547 v.reset(OpStructMake2) 1548 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1549 v0.AuxInt = off + t.FieldOff(0) 1550 v0.Aux = n 1551 v.AddArg(v0) 1552 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1553 v1.AuxInt = off + t.FieldOff(1) 1554 v1.Aux = n 1555 v.AddArg(v1) 1556 return true 1557 } 1558 // match: (Arg <t> {n} [off]) 1559 // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) 1560 // result: (StructMake3 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])) 1561 for { 1562 t := v.Type 1563 off := v.AuxInt 1564 n := v.Aux 1565 if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { 1566 break 1567 } 1568 v.reset(OpStructMake3) 1569 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1570 v0.AuxInt = off + t.FieldOff(0) 1571 v0.Aux = n 1572 v.AddArg(v0) 1573 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1574 v1.AuxInt = off + t.FieldOff(1) 1575 v1.Aux = n 1576 v.AddArg(v1) 1577 v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) 1578 v2.AuxInt = off + t.FieldOff(2) 1579 v2.Aux = n 1580 v.AddArg(v2) 1581 return true 1582 } 1583 // match: (Arg <t> {n} [off]) 1584 // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) 1585 // result: (StructMake4 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]) (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)])) 1586 for { 1587 t := v.Type 1588 off := v.AuxInt 1589 n := v.Aux 1590 if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { 1591 break 1592 } 1593 v.reset(OpStructMake4) 1594 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1595 v0.AuxInt = off + t.FieldOff(0) 1596 v0.Aux = n 1597 v.AddArg(v0) 1598 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1599 v1.AuxInt = off + t.FieldOff(1) 1600 v1.Aux = n 1601 v.AddArg(v1) 1602 v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) 1603 v2.AuxInt = off + t.FieldOff(2) 1604 v2.Aux = n 1605 v.AddArg(v2) 1606 v3 := b.NewValue0(v.Line, OpArg, t.FieldType(3)) 1607 v3.AuxInt = off + t.FieldOff(3) 1608 v3.Aux = n 1609 v.AddArg(v3) 1610 return true 1611 } 1612 // match: (Arg <t>) 1613 // cond: t.IsArray() && t.NumElem() == 0 1614 // result: (ArrayMake0) 1615 for { 1616 t := v.Type 1617 if !(t.IsArray() && t.NumElem() == 0) { 1618 break 1619 } 1620 v.reset(OpArrayMake0) 1621 return true 1622 } 1623 // match: (Arg <t> {n} [off]) 1624 // cond: t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t) 1625 // result: (ArrayMake1 (Arg <t.ElemType()> {n} [off])) 1626 for { 1627 t := v.Type 1628 off := v.AuxInt 1629 n := v.Aux 1630 if !(t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)) { 1631 break 1632 } 1633 v.reset(OpArrayMake1) 1634 v0 := b.NewValue0(v.Line, OpArg, t.ElemType()) 1635 v0.AuxInt = off 1636 v0.Aux = n 1637 v.AddArg(v0) 1638 return true 1639 } 1640 return false 1641 } 1642 func rewriteValuegeneric_OpArraySelect(v *Value, config *Config) bool { 1643 b := v.Block 1644 _ = b 1645 // match: (ArraySelect (ArrayMake1 x)) 1646 // cond: 1647 // result: x 1648 for { 1649 v_0 := v.Args[0] 1650 if v_0.Op != OpArrayMake1 { 1651 break 1652 } 1653 x := v_0.Args[0] 1654 v.reset(OpCopy) 1655 v.Type = x.Type 1656 v.AddArg(x) 1657 return true 1658 } 1659 // match: (ArraySelect [0] (Load ptr mem)) 1660 // cond: 1661 // result: (Load ptr mem) 1662 for { 1663 if v.AuxInt != 0 { 1664 break 1665 } 1666 v_0 := v.Args[0] 1667 if v_0.Op != OpLoad { 1668 break 1669 } 1670 ptr := v_0.Args[0] 1671 mem := v_0.Args[1] 1672 v.reset(OpLoad) 1673 v.AddArg(ptr) 1674 v.AddArg(mem) 1675 return true 1676 } 1677 return false 1678 } 1679 func rewriteValuegeneric_OpCom16(v *Value, config *Config) bool { 1680 b := v.Block 1681 _ = b 1682 // match: (Com16 (Com16 x)) 1683 // cond: 1684 // result: x 1685 for { 1686 v_0 := v.Args[0] 1687 if v_0.Op != OpCom16 { 1688 break 1689 } 1690 x := v_0.Args[0] 1691 v.reset(OpCopy) 1692 v.Type = x.Type 1693 v.AddArg(x) 1694 return true 1695 } 1696 return false 1697 } 1698 func rewriteValuegeneric_OpCom32(v *Value, config *Config) bool { 1699 b := v.Block 1700 _ = b 1701 // match: (Com32 (Com32 x)) 1702 // cond: 1703 // result: x 1704 for { 1705 v_0 := v.Args[0] 1706 if v_0.Op != OpCom32 { 1707 break 1708 } 1709 x := v_0.Args[0] 1710 v.reset(OpCopy) 1711 v.Type = x.Type 1712 v.AddArg(x) 1713 return true 1714 } 1715 return false 1716 } 1717 func rewriteValuegeneric_OpCom64(v *Value, config *Config) bool { 1718 b := v.Block 1719 _ = b 1720 // match: (Com64 (Com64 x)) 1721 // cond: 1722 // result: x 1723 for { 1724 v_0 := v.Args[0] 1725 if v_0.Op != OpCom64 { 1726 break 1727 } 1728 x := v_0.Args[0] 1729 v.reset(OpCopy) 1730 v.Type = x.Type 1731 v.AddArg(x) 1732 return true 1733 } 1734 return false 1735 } 1736 func rewriteValuegeneric_OpCom8(v *Value, config *Config) bool { 1737 b := v.Block 1738 _ = b 1739 // match: (Com8 (Com8 x)) 1740 // cond: 1741 // result: x 1742 for { 1743 v_0 := v.Args[0] 1744 if v_0.Op != OpCom8 { 1745 break 1746 } 1747 x := v_0.Args[0] 1748 v.reset(OpCopy) 1749 v.Type = x.Type 1750 v.AddArg(x) 1751 return true 1752 } 1753 return false 1754 } 1755 func rewriteValuegeneric_OpConstInterface(v *Value, config *Config) bool { 1756 b := v.Block 1757 _ = b 1758 // match: (ConstInterface) 1759 // cond: 1760 // result: (IMake (ConstNil <config.fe.TypeBytePtr()>) (ConstNil <config.fe.TypeBytePtr()>)) 1761 for { 1762 v.reset(OpIMake) 1763 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1764 v.AddArg(v0) 1765 v1 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1766 v.AddArg(v1) 1767 return true 1768 } 1769 } 1770 func rewriteValuegeneric_OpConstSlice(v *Value, config *Config) bool { 1771 b := v.Block 1772 _ = b 1773 // match: (ConstSlice) 1774 // cond: config.PtrSize == 4 1775 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const32 <config.fe.TypeInt()> [0]) (Const32 <config.fe.TypeInt()> [0])) 1776 for { 1777 if !(config.PtrSize == 4) { 1778 break 1779 } 1780 v.reset(OpSliceMake) 1781 v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) 1782 v.AddArg(v0) 1783 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1784 v1.AuxInt = 0 1785 v.AddArg(v1) 1786 v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1787 v2.AuxInt = 0 1788 v.AddArg(v2) 1789 return true 1790 } 1791 // match: (ConstSlice) 1792 // cond: config.PtrSize == 8 1793 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const64 <config.fe.TypeInt()> [0]) (Const64 <config.fe.TypeInt()> [0])) 1794 for { 1795 if !(config.PtrSize == 8) { 1796 break 1797 } 1798 v.reset(OpSliceMake) 1799 v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) 1800 v.AddArg(v0) 1801 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1802 v1.AuxInt = 0 1803 v.AddArg(v1) 1804 v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1805 v2.AuxInt = 0 1806 v.AddArg(v2) 1807 return true 1808 } 1809 return false 1810 } 1811 func rewriteValuegeneric_OpConstString(v *Value, config *Config) bool { 1812 b := v.Block 1813 _ = b 1814 // match: (ConstString {s}) 1815 // cond: config.PtrSize == 4 && s.(string) == "" 1816 // result: (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0])) 1817 for { 1818 s := v.Aux 1819 if !(config.PtrSize == 4 && s.(string) == "") { 1820 break 1821 } 1822 v.reset(OpStringMake) 1823 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1824 v.AddArg(v0) 1825 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1826 v1.AuxInt = 0 1827 v.AddArg(v1) 1828 return true 1829 } 1830 // match: (ConstString {s}) 1831 // cond: config.PtrSize == 8 && s.(string) == "" 1832 // result: (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0])) 1833 for { 1834 s := v.Aux 1835 if !(config.PtrSize == 8 && s.(string) == "") { 1836 break 1837 } 1838 v.reset(OpStringMake) 1839 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1840 v.AddArg(v0) 1841 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1842 v1.AuxInt = 0 1843 v.AddArg(v1) 1844 return true 1845 } 1846 // match: (ConstString {s}) 1847 // cond: config.PtrSize == 4 && s.(string) != "" 1848 // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))])) 1849 for { 1850 s := v.Aux 1851 if !(config.PtrSize == 4 && s.(string) != "") { 1852 break 1853 } 1854 v.reset(OpStringMake) 1855 v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) 1856 v0.Aux = config.fe.StringData(s.(string)) 1857 v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) 1858 v0.AddArg(v1) 1859 v.AddArg(v0) 1860 v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1861 v2.AuxInt = int64(len(s.(string))) 1862 v.AddArg(v2) 1863 return true 1864 } 1865 // match: (ConstString {s}) 1866 // cond: config.PtrSize == 8 && s.(string) != "" 1867 // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))])) 1868 for { 1869 s := v.Aux 1870 if !(config.PtrSize == 8 && s.(string) != "") { 1871 break 1872 } 1873 v.reset(OpStringMake) 1874 v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) 1875 v0.Aux = config.fe.StringData(s.(string)) 1876 v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) 1877 v0.AddArg(v1) 1878 v.AddArg(v0) 1879 v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1880 v2.AuxInt = int64(len(s.(string))) 1881 v.AddArg(v2) 1882 return true 1883 } 1884 return false 1885 } 1886 func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool { 1887 b := v.Block 1888 _ = b 1889 // match: (Convert (Add64 (Convert ptr mem) off) mem) 1890 // cond: 1891 // result: (Add64 ptr off) 1892 for { 1893 v_0 := v.Args[0] 1894 if v_0.Op != OpAdd64 { 1895 break 1896 } 1897 v_0_0 := v_0.Args[0] 1898 if v_0_0.Op != OpConvert { 1899 break 1900 } 1901 ptr := v_0_0.Args[0] 1902 mem := v_0_0.Args[1] 1903 off := v_0.Args[1] 1904 if mem != v.Args[1] { 1905 break 1906 } 1907 v.reset(OpAdd64) 1908 v.AddArg(ptr) 1909 v.AddArg(off) 1910 return true 1911 } 1912 // match: (Convert (Add64 off (Convert ptr mem)) mem) 1913 // cond: 1914 // result: (Add64 ptr off) 1915 for { 1916 v_0 := v.Args[0] 1917 if v_0.Op != OpAdd64 { 1918 break 1919 } 1920 off := v_0.Args[0] 1921 v_0_1 := v_0.Args[1] 1922 if v_0_1.Op != OpConvert { 1923 break 1924 } 1925 ptr := v_0_1.Args[0] 1926 mem := v_0_1.Args[1] 1927 if mem != v.Args[1] { 1928 break 1929 } 1930 v.reset(OpAdd64) 1931 v.AddArg(ptr) 1932 v.AddArg(off) 1933 return true 1934 } 1935 // match: (Convert (Convert ptr mem) mem) 1936 // cond: 1937 // result: ptr 1938 for { 1939 v_0 := v.Args[0] 1940 if v_0.Op != OpConvert { 1941 break 1942 } 1943 ptr := v_0.Args[0] 1944 mem := v_0.Args[1] 1945 if mem != v.Args[1] { 1946 break 1947 } 1948 v.reset(OpCopy) 1949 v.Type = ptr.Type 1950 v.AddArg(ptr) 1951 return true 1952 } 1953 return false 1954 } 1955 func rewriteValuegeneric_OpCvt32Fto64F(v *Value, config *Config) bool { 1956 b := v.Block 1957 _ = b 1958 // match: (Cvt32Fto64F (Const32F [c])) 1959 // cond: 1960 // result: (Const64F [c]) 1961 for { 1962 v_0 := v.Args[0] 1963 if v_0.Op != OpConst32F { 1964 break 1965 } 1966 c := v_0.AuxInt 1967 v.reset(OpConst64F) 1968 v.AuxInt = c 1969 return true 1970 } 1971 return false 1972 } 1973 func rewriteValuegeneric_OpCvt64Fto32F(v *Value, config *Config) bool { 1974 b := v.Block 1975 _ = b 1976 // match: (Cvt64Fto32F (Const64F [c])) 1977 // cond: 1978 // result: (Const32F [f2i(float64(i2f32(c)))]) 1979 for { 1980 v_0 := v.Args[0] 1981 if v_0.Op != OpConst64F { 1982 break 1983 } 1984 c := v_0.AuxInt 1985 v.reset(OpConst32F) 1986 v.AuxInt = f2i(float64(i2f32(c))) 1987 return true 1988 } 1989 return false 1990 } 1991 func rewriteValuegeneric_OpDiv32F(v *Value, config *Config) bool { 1992 b := v.Block 1993 _ = b 1994 // match: (Div32F x (Const32F [f2i(1)])) 1995 // cond: 1996 // result: x 1997 for { 1998 x := v.Args[0] 1999 v_1 := v.Args[1] 2000 if v_1.Op != OpConst32F { 2001 break 2002 } 2003 if v_1.AuxInt != f2i(1) { 2004 break 2005 } 2006 v.reset(OpCopy) 2007 v.Type = x.Type 2008 v.AddArg(x) 2009 return true 2010 } 2011 // match: (Div32F x (Const32F [f2i(-1)])) 2012 // cond: 2013 // result: (Neg32F x) 2014 for { 2015 x := v.Args[0] 2016 v_1 := v.Args[1] 2017 if v_1.Op != OpConst32F { 2018 break 2019 } 2020 if v_1.AuxInt != f2i(-1) { 2021 break 2022 } 2023 v.reset(OpNeg32F) 2024 v.AddArg(x) 2025 return true 2026 } 2027 return false 2028 } 2029 func rewriteValuegeneric_OpDiv64(v *Value, config *Config) bool { 2030 b := v.Block 2031 _ = b 2032 // match: (Div64 <t> x (Const64 [c])) 2033 // cond: c > 0 && smagic64ok(c) && smagic64m(c) > 0 2034 // result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) 2035 for { 2036 t := v.Type 2037 x := v.Args[0] 2038 v_1 := v.Args[1] 2039 if v_1.Op != OpConst64 { 2040 break 2041 } 2042 c := v_1.AuxInt 2043 if !(c > 0 && smagic64ok(c) && smagic64m(c) > 0) { 2044 break 2045 } 2046 v.reset(OpSub64) 2047 v.Type = t 2048 v0 := b.NewValue0(v.Line, OpRsh64x64, t) 2049 v1 := b.NewValue0(v.Line, OpHmul64, t) 2050 v2 := b.NewValue0(v.Line, OpConst64, t) 2051 v2.AuxInt = smagic64m(c) 2052 v1.AddArg(v2) 2053 v1.AddArg(x) 2054 v0.AddArg(v1) 2055 v3 := b.NewValue0(v.Line, OpConst64, t) 2056 v3.AuxInt = smagic64s(c) 2057 v0.AddArg(v3) 2058 v.AddArg(v0) 2059 v4 := b.NewValue0(v.Line, OpRsh64x64, t) 2060 v4.AddArg(x) 2061 v5 := b.NewValue0(v.Line, OpConst64, t) 2062 v5.AuxInt = 63 2063 v4.AddArg(v5) 2064 v.AddArg(v4) 2065 return true 2066 } 2067 // match: (Div64 <t> x (Const64 [c])) 2068 // cond: c > 0 && smagic64ok(c) && smagic64m(c) < 0 2069 // result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) 2070 for { 2071 t := v.Type 2072 x := v.Args[0] 2073 v_1 := v.Args[1] 2074 if v_1.Op != OpConst64 { 2075 break 2076 } 2077 c := v_1.AuxInt 2078 if !(c > 0 && smagic64ok(c) && smagic64m(c) < 0) { 2079 break 2080 } 2081 v.reset(OpSub64) 2082 v.Type = t 2083 v0 := b.NewValue0(v.Line, OpRsh64x64, t) 2084 v1 := b.NewValue0(v.Line, OpAdd64, t) 2085 v2 := b.NewValue0(v.Line, OpHmul64, t) 2086 v3 := b.NewValue0(v.Line, OpConst64, t) 2087 v3.AuxInt = smagic64m(c) 2088 v2.AddArg(v3) 2089 v2.AddArg(x) 2090 v1.AddArg(v2) 2091 v1.AddArg(x) 2092 v0.AddArg(v1) 2093 v4 := b.NewValue0(v.Line, OpConst64, t) 2094 v4.AuxInt = smagic64s(c) 2095 v0.AddArg(v4) 2096 v.AddArg(v0) 2097 v5 := b.NewValue0(v.Line, OpRsh64x64, t) 2098 v5.AddArg(x) 2099 v6 := b.NewValue0(v.Line, OpConst64, t) 2100 v6.AuxInt = 63 2101 v5.AddArg(v6) 2102 v.AddArg(v5) 2103 return true 2104 } 2105 // match: (Div64 <t> x (Const64 [c])) 2106 // cond: c < 0 && smagic64ok(c) && smagic64m(c) > 0 2107 // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) 2108 for { 2109 t := v.Type 2110 x := v.Args[0] 2111 v_1 := v.Args[1] 2112 if v_1.Op != OpConst64 { 2113 break 2114 } 2115 c := v_1.AuxInt 2116 if !(c < 0 && smagic64ok(c) && smagic64m(c) > 0) { 2117 break 2118 } 2119 v.reset(OpNeg64) 2120 v.Type = t 2121 v0 := b.NewValue0(v.Line, OpSub64, t) 2122 v1 := b.NewValue0(v.Line, OpRsh64x64, t) 2123 v2 := b.NewValue0(v.Line, OpHmul64, t) 2124 v3 := b.NewValue0(v.Line, OpConst64, t) 2125 v3.AuxInt = smagic64m(c) 2126 v2.AddArg(v3) 2127 v2.AddArg(x) 2128 v1.AddArg(v2) 2129 v4 := b.NewValue0(v.Line, OpConst64, t) 2130 v4.AuxInt = smagic64s(c) 2131 v1.AddArg(v4) 2132 v0.AddArg(v1) 2133 v5 := b.NewValue0(v.Line, OpRsh64x64, t) 2134 v5.AddArg(x) 2135 v6 := b.NewValue0(v.Line, OpConst64, t) 2136 v6.AuxInt = 63 2137 v5.AddArg(v6) 2138 v0.AddArg(v5) 2139 v.AddArg(v0) 2140 return true 2141 } 2142 // match: (Div64 <t> x (Const64 [c])) 2143 // cond: c < 0 && smagic64ok(c) && smagic64m(c) < 0 2144 // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) 2145 for { 2146 t := v.Type 2147 x := v.Args[0] 2148 v_1 := v.Args[1] 2149 if v_1.Op != OpConst64 { 2150 break 2151 } 2152 c := v_1.AuxInt 2153 if !(c < 0 && smagic64ok(c) && smagic64m(c) < 0) { 2154 break 2155 } 2156 v.reset(OpNeg64) 2157 v.Type = t 2158 v0 := b.NewValue0(v.Line, OpSub64, t) 2159 v1 := b.NewValue0(v.Line, OpRsh64x64, t) 2160 v2 := b.NewValue0(v.Line, OpAdd64, t) 2161 v3 := b.NewValue0(v.Line, OpHmul64, t) 2162 v4 := b.NewValue0(v.Line, OpConst64, t) 2163 v4.AuxInt = smagic64m(c) 2164 v3.AddArg(v4) 2165 v3.AddArg(x) 2166 v2.AddArg(v3) 2167 v2.AddArg(x) 2168 v1.AddArg(v2) 2169 v5 := b.NewValue0(v.Line, OpConst64, t) 2170 v5.AuxInt = smagic64s(c) 2171 v1.AddArg(v5) 2172 v0.AddArg(v1) 2173 v6 := b.NewValue0(v.Line, OpRsh64x64, t) 2174 v6.AddArg(x) 2175 v7 := b.NewValue0(v.Line, OpConst64, t) 2176 v7.AuxInt = 63 2177 v6.AddArg(v7) 2178 v0.AddArg(v6) 2179 v.AddArg(v0) 2180 return true 2181 } 2182 return false 2183 } 2184 func rewriteValuegeneric_OpDiv64F(v *Value, config *Config) bool { 2185 b := v.Block 2186 _ = b 2187 // match: (Div64F x (Const64F [f2i(1)])) 2188 // cond: 2189 // result: x 2190 for { 2191 x := v.Args[0] 2192 v_1 := v.Args[1] 2193 if v_1.Op != OpConst64F { 2194 break 2195 } 2196 if v_1.AuxInt != f2i(1) { 2197 break 2198 } 2199 v.reset(OpCopy) 2200 v.Type = x.Type 2201 v.AddArg(x) 2202 return true 2203 } 2204 // match: (Div64F x (Const64F [f2i(-1)])) 2205 // cond: 2206 // result: (Neg32F x) 2207 for { 2208 x := v.Args[0] 2209 v_1 := v.Args[1] 2210 if v_1.Op != OpConst64F { 2211 break 2212 } 2213 if v_1.AuxInt != f2i(-1) { 2214 break 2215 } 2216 v.reset(OpNeg32F) 2217 v.AddArg(x) 2218 return true 2219 } 2220 return false 2221 } 2222 func rewriteValuegeneric_OpDiv64u(v *Value, config *Config) bool { 2223 b := v.Block 2224 _ = b 2225 // match: (Div64u <t> n (Const64 [c])) 2226 // cond: isPowerOfTwo(c) 2227 // result: (Rsh64Ux64 n (Const64 <t> [log2(c)])) 2228 for { 2229 t := v.Type 2230 n := v.Args[0] 2231 v_1 := v.Args[1] 2232 if v_1.Op != OpConst64 { 2233 break 2234 } 2235 c := v_1.AuxInt 2236 if !(isPowerOfTwo(c)) { 2237 break 2238 } 2239 v.reset(OpRsh64Ux64) 2240 v.AddArg(n) 2241 v0 := b.NewValue0(v.Line, OpConst64, t) 2242 v0.AuxInt = log2(c) 2243 v.AddArg(v0) 2244 return true 2245 } 2246 // match: (Div64u <t> x (Const64 [c])) 2247 // cond: umagic64ok(c) && !umagic64a(c) 2248 // result: (Rsh64Ux64 (Hmul64u <t> (Const64 <t> [umagic64m(c)]) x) (Const64 <t> [umagic64s(c)])) 2249 for { 2250 t := v.Type 2251 x := v.Args[0] 2252 v_1 := v.Args[1] 2253 if v_1.Op != OpConst64 { 2254 break 2255 } 2256 c := v_1.AuxInt 2257 if !(umagic64ok(c) && !umagic64a(c)) { 2258 break 2259 } 2260 v.reset(OpRsh64Ux64) 2261 v0 := b.NewValue0(v.Line, OpHmul64u, t) 2262 v1 := b.NewValue0(v.Line, OpConst64, t) 2263 v1.AuxInt = umagic64m(c) 2264 v0.AddArg(v1) 2265 v0.AddArg(x) 2266 v.AddArg(v0) 2267 v2 := b.NewValue0(v.Line, OpConst64, t) 2268 v2.AuxInt = umagic64s(c) 2269 v.AddArg(v2) 2270 return true 2271 } 2272 // match: (Div64u <t> x (Const64 [c])) 2273 // cond: umagic64ok(c) && umagic64a(c) 2274 // result: (Rsh64Ux64 (Avg64u <t> (Hmul64u <t> x (Const64 <t> [umagic64m(c)])) x) (Const64 <t> [umagic64s(c)-1])) 2275 for { 2276 t := v.Type 2277 x := v.Args[0] 2278 v_1 := v.Args[1] 2279 if v_1.Op != OpConst64 { 2280 break 2281 } 2282 c := v_1.AuxInt 2283 if !(umagic64ok(c) && umagic64a(c)) { 2284 break 2285 } 2286 v.reset(OpRsh64Ux64) 2287 v0 := b.NewValue0(v.Line, OpAvg64u, t) 2288 v1 := b.NewValue0(v.Line, OpHmul64u, t) 2289 v1.AddArg(x) 2290 v2 := b.NewValue0(v.Line, OpConst64, t) 2291 v2.AuxInt = umagic64m(c) 2292 v1.AddArg(v2) 2293 v0.AddArg(v1) 2294 v0.AddArg(x) 2295 v.AddArg(v0) 2296 v3 := b.NewValue0(v.Line, OpConst64, t) 2297 v3.AuxInt = umagic64s(c) - 1 2298 v.AddArg(v3) 2299 return true 2300 } 2301 return false 2302 } 2303 func rewriteValuegeneric_OpEq16(v *Value, config *Config) bool { 2304 b := v.Block 2305 _ = b 2306 // match: (Eq16 x x) 2307 // cond: 2308 // result: (ConstBool [1]) 2309 for { 2310 x := v.Args[0] 2311 if x != v.Args[1] { 2312 break 2313 } 2314 v.reset(OpConstBool) 2315 v.AuxInt = 1 2316 return true 2317 } 2318 // match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 2319 // cond: 2320 // result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x) 2321 for { 2322 v_0 := v.Args[0] 2323 if v_0.Op != OpConst16 { 2324 break 2325 } 2326 t := v_0.Type 2327 c := v_0.AuxInt 2328 v_1 := v.Args[1] 2329 if v_1.Op != OpAdd16 { 2330 break 2331 } 2332 v_1_0 := v_1.Args[0] 2333 if v_1_0.Op != OpConst16 { 2334 break 2335 } 2336 if v_1_0.Type != t { 2337 break 2338 } 2339 d := v_1_0.AuxInt 2340 x := v_1.Args[1] 2341 v.reset(OpEq16) 2342 v0 := b.NewValue0(v.Line, OpConst16, t) 2343 v0.AuxInt = int64(int16(c - d)) 2344 v.AddArg(v0) 2345 v.AddArg(x) 2346 return true 2347 } 2348 // match: (Eq16 x (Const16 <t> [c])) 2349 // cond: x.Op != OpConst16 2350 // result: (Eq16 (Const16 <t> [c]) x) 2351 for { 2352 x := v.Args[0] 2353 v_1 := v.Args[1] 2354 if v_1.Op != OpConst16 { 2355 break 2356 } 2357 t := v_1.Type 2358 c := v_1.AuxInt 2359 if !(x.Op != OpConst16) { 2360 break 2361 } 2362 v.reset(OpEq16) 2363 v0 := b.NewValue0(v.Line, OpConst16, t) 2364 v0.AuxInt = c 2365 v.AddArg(v0) 2366 v.AddArg(x) 2367 return true 2368 } 2369 // match: (Eq16 (Const16 [c]) (Const16 [d])) 2370 // cond: 2371 // result: (ConstBool [b2i(c == d)]) 2372 for { 2373 v_0 := v.Args[0] 2374 if v_0.Op != OpConst16 { 2375 break 2376 } 2377 c := v_0.AuxInt 2378 v_1 := v.Args[1] 2379 if v_1.Op != OpConst16 { 2380 break 2381 } 2382 d := v_1.AuxInt 2383 v.reset(OpConstBool) 2384 v.AuxInt = b2i(c == d) 2385 return true 2386 } 2387 return false 2388 } 2389 func rewriteValuegeneric_OpEq32(v *Value, config *Config) bool { 2390 b := v.Block 2391 _ = b 2392 // match: (Eq32 x x) 2393 // cond: 2394 // result: (ConstBool [1]) 2395 for { 2396 x := v.Args[0] 2397 if x != v.Args[1] { 2398 break 2399 } 2400 v.reset(OpConstBool) 2401 v.AuxInt = 1 2402 return true 2403 } 2404 // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 2405 // cond: 2406 // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x) 2407 for { 2408 v_0 := v.Args[0] 2409 if v_0.Op != OpConst32 { 2410 break 2411 } 2412 t := v_0.Type 2413 c := v_0.AuxInt 2414 v_1 := v.Args[1] 2415 if v_1.Op != OpAdd32 { 2416 break 2417 } 2418 v_1_0 := v_1.Args[0] 2419 if v_1_0.Op != OpConst32 { 2420 break 2421 } 2422 if v_1_0.Type != t { 2423 break 2424 } 2425 d := v_1_0.AuxInt 2426 x := v_1.Args[1] 2427 v.reset(OpEq32) 2428 v0 := b.NewValue0(v.Line, OpConst32, t) 2429 v0.AuxInt = int64(int32(c - d)) 2430 v.AddArg(v0) 2431 v.AddArg(x) 2432 return true 2433 } 2434 // match: (Eq32 x (Const32 <t> [c])) 2435 // cond: x.Op != OpConst32 2436 // result: (Eq32 (Const32 <t> [c]) x) 2437 for { 2438 x := v.Args[0] 2439 v_1 := v.Args[1] 2440 if v_1.Op != OpConst32 { 2441 break 2442 } 2443 t := v_1.Type 2444 c := v_1.AuxInt 2445 if !(x.Op != OpConst32) { 2446 break 2447 } 2448 v.reset(OpEq32) 2449 v0 := b.NewValue0(v.Line, OpConst32, t) 2450 v0.AuxInt = c 2451 v.AddArg(v0) 2452 v.AddArg(x) 2453 return true 2454 } 2455 // match: (Eq32 (Const32 [c]) (Const32 [d])) 2456 // cond: 2457 // result: (ConstBool [b2i(c == d)]) 2458 for { 2459 v_0 := v.Args[0] 2460 if v_0.Op != OpConst32 { 2461 break 2462 } 2463 c := v_0.AuxInt 2464 v_1 := v.Args[1] 2465 if v_1.Op != OpConst32 { 2466 break 2467 } 2468 d := v_1.AuxInt 2469 v.reset(OpConstBool) 2470 v.AuxInt = b2i(c == d) 2471 return true 2472 } 2473 return false 2474 } 2475 func rewriteValuegeneric_OpEq64(v *Value, config *Config) bool { 2476 b := v.Block 2477 _ = b 2478 // match: (Eq64 x x) 2479 // cond: 2480 // result: (ConstBool [1]) 2481 for { 2482 x := v.Args[0] 2483 if x != v.Args[1] { 2484 break 2485 } 2486 v.reset(OpConstBool) 2487 v.AuxInt = 1 2488 return true 2489 } 2490 // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 2491 // cond: 2492 // result: (Eq64 (Const64 <t> [c-d]) x) 2493 for { 2494 v_0 := v.Args[0] 2495 if v_0.Op != OpConst64 { 2496 break 2497 } 2498 t := v_0.Type 2499 c := v_0.AuxInt 2500 v_1 := v.Args[1] 2501 if v_1.Op != OpAdd64 { 2502 break 2503 } 2504 v_1_0 := v_1.Args[0] 2505 if v_1_0.Op != OpConst64 { 2506 break 2507 } 2508 if v_1_0.Type != t { 2509 break 2510 } 2511 d := v_1_0.AuxInt 2512 x := v_1.Args[1] 2513 v.reset(OpEq64) 2514 v0 := b.NewValue0(v.Line, OpConst64, t) 2515 v0.AuxInt = c - d 2516 v.AddArg(v0) 2517 v.AddArg(x) 2518 return true 2519 } 2520 // match: (Eq64 x (Const64 <t> [c])) 2521 // cond: x.Op != OpConst64 2522 // result: (Eq64 (Const64 <t> [c]) x) 2523 for { 2524 x := v.Args[0] 2525 v_1 := v.Args[1] 2526 if v_1.Op != OpConst64 { 2527 break 2528 } 2529 t := v_1.Type 2530 c := v_1.AuxInt 2531 if !(x.Op != OpConst64) { 2532 break 2533 } 2534 v.reset(OpEq64) 2535 v0 := b.NewValue0(v.Line, OpConst64, t) 2536 v0.AuxInt = c 2537 v.AddArg(v0) 2538 v.AddArg(x) 2539 return true 2540 } 2541 // match: (Eq64 (Const64 [c]) (Const64 [d])) 2542 // cond: 2543 // result: (ConstBool [b2i(c == d)]) 2544 for { 2545 v_0 := v.Args[0] 2546 if v_0.Op != OpConst64 { 2547 break 2548 } 2549 c := v_0.AuxInt 2550 v_1 := v.Args[1] 2551 if v_1.Op != OpConst64 { 2552 break 2553 } 2554 d := v_1.AuxInt 2555 v.reset(OpConstBool) 2556 v.AuxInt = b2i(c == d) 2557 return true 2558 } 2559 return false 2560 } 2561 func rewriteValuegeneric_OpEq8(v *Value, config *Config) bool { 2562 b := v.Block 2563 _ = b 2564 // match: (Eq8 x x) 2565 // cond: 2566 // result: (ConstBool [1]) 2567 for { 2568 x := v.Args[0] 2569 if x != v.Args[1] { 2570 break 2571 } 2572 v.reset(OpConstBool) 2573 v.AuxInt = 1 2574 return true 2575 } 2576 // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 2577 // cond: 2578 // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x) 2579 for { 2580 v_0 := v.Args[0] 2581 if v_0.Op != OpConst8 { 2582 break 2583 } 2584 t := v_0.Type 2585 c := v_0.AuxInt 2586 v_1 := v.Args[1] 2587 if v_1.Op != OpAdd8 { 2588 break 2589 } 2590 v_1_0 := v_1.Args[0] 2591 if v_1_0.Op != OpConst8 { 2592 break 2593 } 2594 if v_1_0.Type != t { 2595 break 2596 } 2597 d := v_1_0.AuxInt 2598 x := v_1.Args[1] 2599 v.reset(OpEq8) 2600 v0 := b.NewValue0(v.Line, OpConst8, t) 2601 v0.AuxInt = int64(int8(c - d)) 2602 v.AddArg(v0) 2603 v.AddArg(x) 2604 return true 2605 } 2606 // match: (Eq8 x (Const8 <t> [c])) 2607 // cond: x.Op != OpConst8 2608 // result: (Eq8 (Const8 <t> [c]) x) 2609 for { 2610 x := v.Args[0] 2611 v_1 := v.Args[1] 2612 if v_1.Op != OpConst8 { 2613 break 2614 } 2615 t := v_1.Type 2616 c := v_1.AuxInt 2617 if !(x.Op != OpConst8) { 2618 break 2619 } 2620 v.reset(OpEq8) 2621 v0 := b.NewValue0(v.Line, OpConst8, t) 2622 v0.AuxInt = c 2623 v.AddArg(v0) 2624 v.AddArg(x) 2625 return true 2626 } 2627 // match: (Eq8 (Const8 [c]) (Const8 [d])) 2628 // cond: 2629 // result: (ConstBool [b2i(c == d)]) 2630 for { 2631 v_0 := v.Args[0] 2632 if v_0.Op != OpConst8 { 2633 break 2634 } 2635 c := v_0.AuxInt 2636 v_1 := v.Args[1] 2637 if v_1.Op != OpConst8 { 2638 break 2639 } 2640 d := v_1.AuxInt 2641 v.reset(OpConstBool) 2642 v.AuxInt = b2i(c == d) 2643 return true 2644 } 2645 return false 2646 } 2647 func rewriteValuegeneric_OpEqB(v *Value, config *Config) bool { 2648 b := v.Block 2649 _ = b 2650 // match: (EqB (ConstBool [c]) (ConstBool [d])) 2651 // cond: 2652 // result: (ConstBool [b2i(c == d)]) 2653 for { 2654 v_0 := v.Args[0] 2655 if v_0.Op != OpConstBool { 2656 break 2657 } 2658 c := v_0.AuxInt 2659 v_1 := v.Args[1] 2660 if v_1.Op != OpConstBool { 2661 break 2662 } 2663 d := v_1.AuxInt 2664 v.reset(OpConstBool) 2665 v.AuxInt = b2i(c == d) 2666 return true 2667 } 2668 // match: (EqB (ConstBool [0]) x) 2669 // cond: 2670 // result: (Not x) 2671 for { 2672 v_0 := v.Args[0] 2673 if v_0.Op != OpConstBool { 2674 break 2675 } 2676 if v_0.AuxInt != 0 { 2677 break 2678 } 2679 x := v.Args[1] 2680 v.reset(OpNot) 2681 v.AddArg(x) 2682 return true 2683 } 2684 // match: (EqB (ConstBool [1]) x) 2685 // cond: 2686 // result: x 2687 for { 2688 v_0 := v.Args[0] 2689 if v_0.Op != OpConstBool { 2690 break 2691 } 2692 if v_0.AuxInt != 1 { 2693 break 2694 } 2695 x := v.Args[1] 2696 v.reset(OpCopy) 2697 v.Type = x.Type 2698 v.AddArg(x) 2699 return true 2700 } 2701 return false 2702 } 2703 func rewriteValuegeneric_OpEqInter(v *Value, config *Config) bool { 2704 b := v.Block 2705 _ = b 2706 // match: (EqInter x y) 2707 // cond: 2708 // result: (EqPtr (ITab x) (ITab y)) 2709 for { 2710 x := v.Args[0] 2711 y := v.Args[1] 2712 v.reset(OpEqPtr) 2713 v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 2714 v0.AddArg(x) 2715 v.AddArg(v0) 2716 v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 2717 v1.AddArg(y) 2718 v.AddArg(v1) 2719 return true 2720 } 2721 } 2722 func rewriteValuegeneric_OpEqPtr(v *Value, config *Config) bool { 2723 b := v.Block 2724 _ = b 2725 // match: (EqPtr p (ConstNil)) 2726 // cond: 2727 // result: (Not (IsNonNil p)) 2728 for { 2729 p := v.Args[0] 2730 v_1 := v.Args[1] 2731 if v_1.Op != OpConstNil { 2732 break 2733 } 2734 v.reset(OpNot) 2735 v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) 2736 v0.AddArg(p) 2737 v.AddArg(v0) 2738 return true 2739 } 2740 // match: (EqPtr (ConstNil) p) 2741 // cond: 2742 // result: (Not (IsNonNil p)) 2743 for { 2744 v_0 := v.Args[0] 2745 if v_0.Op != OpConstNil { 2746 break 2747 } 2748 p := v.Args[1] 2749 v.reset(OpNot) 2750 v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) 2751 v0.AddArg(p) 2752 v.AddArg(v0) 2753 return true 2754 } 2755 return false 2756 } 2757 func rewriteValuegeneric_OpEqSlice(v *Value, config *Config) bool { 2758 b := v.Block 2759 _ = b 2760 // match: (EqSlice x y) 2761 // cond: 2762 // result: (EqPtr (SlicePtr x) (SlicePtr y)) 2763 for { 2764 x := v.Args[0] 2765 y := v.Args[1] 2766 v.reset(OpEqPtr) 2767 v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 2768 v0.AddArg(x) 2769 v.AddArg(v0) 2770 v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 2771 v1.AddArg(y) 2772 v.AddArg(v1) 2773 return true 2774 } 2775 } 2776 func rewriteValuegeneric_OpGeq16(v *Value, config *Config) bool { 2777 b := v.Block 2778 _ = b 2779 // match: (Geq16 (Const16 [c]) (Const16 [d])) 2780 // cond: 2781 // result: (ConstBool [b2i(c >= d)]) 2782 for { 2783 v_0 := v.Args[0] 2784 if v_0.Op != OpConst16 { 2785 break 2786 } 2787 c := v_0.AuxInt 2788 v_1 := v.Args[1] 2789 if v_1.Op != OpConst16 { 2790 break 2791 } 2792 d := v_1.AuxInt 2793 v.reset(OpConstBool) 2794 v.AuxInt = b2i(c >= d) 2795 return true 2796 } 2797 return false 2798 } 2799 func rewriteValuegeneric_OpGeq16U(v *Value, config *Config) bool { 2800 b := v.Block 2801 _ = b 2802 // match: (Geq16U (Const16 [c]) (Const16 [d])) 2803 // cond: 2804 // result: (ConstBool [b2i(uint16(c) >= uint16(d))]) 2805 for { 2806 v_0 := v.Args[0] 2807 if v_0.Op != OpConst16 { 2808 break 2809 } 2810 c := v_0.AuxInt 2811 v_1 := v.Args[1] 2812 if v_1.Op != OpConst16 { 2813 break 2814 } 2815 d := v_1.AuxInt 2816 v.reset(OpConstBool) 2817 v.AuxInt = b2i(uint16(c) >= uint16(d)) 2818 return true 2819 } 2820 return false 2821 } 2822 func rewriteValuegeneric_OpGeq32(v *Value, config *Config) bool { 2823 b := v.Block 2824 _ = b 2825 // match: (Geq32 (Const32 [c]) (Const32 [d])) 2826 // cond: 2827 // result: (ConstBool [b2i(c >= d)]) 2828 for { 2829 v_0 := v.Args[0] 2830 if v_0.Op != OpConst32 { 2831 break 2832 } 2833 c := v_0.AuxInt 2834 v_1 := v.Args[1] 2835 if v_1.Op != OpConst32 { 2836 break 2837 } 2838 d := v_1.AuxInt 2839 v.reset(OpConstBool) 2840 v.AuxInt = b2i(c >= d) 2841 return true 2842 } 2843 return false 2844 } 2845 func rewriteValuegeneric_OpGeq32U(v *Value, config *Config) bool { 2846 b := v.Block 2847 _ = b 2848 // match: (Geq32U (Const32 [c]) (Const32 [d])) 2849 // cond: 2850 // result: (ConstBool [b2i(uint32(c) >= uint32(d))]) 2851 for { 2852 v_0 := v.Args[0] 2853 if v_0.Op != OpConst32 { 2854 break 2855 } 2856 c := v_0.AuxInt 2857 v_1 := v.Args[1] 2858 if v_1.Op != OpConst32 { 2859 break 2860 } 2861 d := v_1.AuxInt 2862 v.reset(OpConstBool) 2863 v.AuxInt = b2i(uint32(c) >= uint32(d)) 2864 return true 2865 } 2866 return false 2867 } 2868 func rewriteValuegeneric_OpGeq64(v *Value, config *Config) bool { 2869 b := v.Block 2870 _ = b 2871 // match: (Geq64 (Const64 [c]) (Const64 [d])) 2872 // cond: 2873 // result: (ConstBool [b2i(c >= d)]) 2874 for { 2875 v_0 := v.Args[0] 2876 if v_0.Op != OpConst64 { 2877 break 2878 } 2879 c := v_0.AuxInt 2880 v_1 := v.Args[1] 2881 if v_1.Op != OpConst64 { 2882 break 2883 } 2884 d := v_1.AuxInt 2885 v.reset(OpConstBool) 2886 v.AuxInt = b2i(c >= d) 2887 return true 2888 } 2889 return false 2890 } 2891 func rewriteValuegeneric_OpGeq64U(v *Value, config *Config) bool { 2892 b := v.Block 2893 _ = b 2894 // match: (Geq64U (Const64 [c]) (Const64 [d])) 2895 // cond: 2896 // result: (ConstBool [b2i(uint64(c) >= uint64(d))]) 2897 for { 2898 v_0 := v.Args[0] 2899 if v_0.Op != OpConst64 { 2900 break 2901 } 2902 c := v_0.AuxInt 2903 v_1 := v.Args[1] 2904 if v_1.Op != OpConst64 { 2905 break 2906 } 2907 d := v_1.AuxInt 2908 v.reset(OpConstBool) 2909 v.AuxInt = b2i(uint64(c) >= uint64(d)) 2910 return true 2911 } 2912 return false 2913 } 2914 func rewriteValuegeneric_OpGeq8(v *Value, config *Config) bool { 2915 b := v.Block 2916 _ = b 2917 // match: (Geq8 (Const8 [c]) (Const8 [d])) 2918 // cond: 2919 // result: (ConstBool [b2i(c >= d)]) 2920 for { 2921 v_0 := v.Args[0] 2922 if v_0.Op != OpConst8 { 2923 break 2924 } 2925 c := v_0.AuxInt 2926 v_1 := v.Args[1] 2927 if v_1.Op != OpConst8 { 2928 break 2929 } 2930 d := v_1.AuxInt 2931 v.reset(OpConstBool) 2932 v.AuxInt = b2i(c >= d) 2933 return true 2934 } 2935 return false 2936 } 2937 func rewriteValuegeneric_OpGeq8U(v *Value, config *Config) bool { 2938 b := v.Block 2939 _ = b 2940 // match: (Geq8U (Const8 [c]) (Const8 [d])) 2941 // cond: 2942 // result: (ConstBool [b2i(uint8(c) >= uint8(d))]) 2943 for { 2944 v_0 := v.Args[0] 2945 if v_0.Op != OpConst8 { 2946 break 2947 } 2948 c := v_0.AuxInt 2949 v_1 := v.Args[1] 2950 if v_1.Op != OpConst8 { 2951 break 2952 } 2953 d := v_1.AuxInt 2954 v.reset(OpConstBool) 2955 v.AuxInt = b2i(uint8(c) >= uint8(d)) 2956 return true 2957 } 2958 return false 2959 } 2960 func rewriteValuegeneric_OpGreater16(v *Value, config *Config) bool { 2961 b := v.Block 2962 _ = b 2963 // match: (Greater16 (Const16 [c]) (Const16 [d])) 2964 // cond: 2965 // result: (ConstBool [b2i(c > d)]) 2966 for { 2967 v_0 := v.Args[0] 2968 if v_0.Op != OpConst16 { 2969 break 2970 } 2971 c := v_0.AuxInt 2972 v_1 := v.Args[1] 2973 if v_1.Op != OpConst16 { 2974 break 2975 } 2976 d := v_1.AuxInt 2977 v.reset(OpConstBool) 2978 v.AuxInt = b2i(c > d) 2979 return true 2980 } 2981 return false 2982 } 2983 func rewriteValuegeneric_OpGreater16U(v *Value, config *Config) bool { 2984 b := v.Block 2985 _ = b 2986 // match: (Greater16U (Const16 [c]) (Const16 [d])) 2987 // cond: 2988 // result: (ConstBool [b2i(uint16(c) > uint16(d))]) 2989 for { 2990 v_0 := v.Args[0] 2991 if v_0.Op != OpConst16 { 2992 break 2993 } 2994 c := v_0.AuxInt 2995 v_1 := v.Args[1] 2996 if v_1.Op != OpConst16 { 2997 break 2998 } 2999 d := v_1.AuxInt 3000 v.reset(OpConstBool) 3001 v.AuxInt = b2i(uint16(c) > uint16(d)) 3002 return true 3003 } 3004 return false 3005 } 3006 func rewriteValuegeneric_OpGreater32(v *Value, config *Config) bool { 3007 b := v.Block 3008 _ = b 3009 // match: (Greater32 (Const32 [c]) (Const32 [d])) 3010 // cond: 3011 // result: (ConstBool [b2i(c > d)]) 3012 for { 3013 v_0 := v.Args[0] 3014 if v_0.Op != OpConst32 { 3015 break 3016 } 3017 c := v_0.AuxInt 3018 v_1 := v.Args[1] 3019 if v_1.Op != OpConst32 { 3020 break 3021 } 3022 d := v_1.AuxInt 3023 v.reset(OpConstBool) 3024 v.AuxInt = b2i(c > d) 3025 return true 3026 } 3027 return false 3028 } 3029 func rewriteValuegeneric_OpGreater32U(v *Value, config *Config) bool { 3030 b := v.Block 3031 _ = b 3032 // match: (Greater32U (Const32 [c]) (Const32 [d])) 3033 // cond: 3034 // result: (ConstBool [b2i(uint32(c) > uint32(d))]) 3035 for { 3036 v_0 := v.Args[0] 3037 if v_0.Op != OpConst32 { 3038 break 3039 } 3040 c := v_0.AuxInt 3041 v_1 := v.Args[1] 3042 if v_1.Op != OpConst32 { 3043 break 3044 } 3045 d := v_1.AuxInt 3046 v.reset(OpConstBool) 3047 v.AuxInt = b2i(uint32(c) > uint32(d)) 3048 return true 3049 } 3050 return false 3051 } 3052 func rewriteValuegeneric_OpGreater64(v *Value, config *Config) bool { 3053 b := v.Block 3054 _ = b 3055 // match: (Greater64 (Const64 [c]) (Const64 [d])) 3056 // cond: 3057 // result: (ConstBool [b2i(c > d)]) 3058 for { 3059 v_0 := v.Args[0] 3060 if v_0.Op != OpConst64 { 3061 break 3062 } 3063 c := v_0.AuxInt 3064 v_1 := v.Args[1] 3065 if v_1.Op != OpConst64 { 3066 break 3067 } 3068 d := v_1.AuxInt 3069 v.reset(OpConstBool) 3070 v.AuxInt = b2i(c > d) 3071 return true 3072 } 3073 return false 3074 } 3075 func rewriteValuegeneric_OpGreater64U(v *Value, config *Config) bool { 3076 b := v.Block 3077 _ = b 3078 // match: (Greater64U (Const64 [c]) (Const64 [d])) 3079 // cond: 3080 // result: (ConstBool [b2i(uint64(c) > uint64(d))]) 3081 for { 3082 v_0 := v.Args[0] 3083 if v_0.Op != OpConst64 { 3084 break 3085 } 3086 c := v_0.AuxInt 3087 v_1 := v.Args[1] 3088 if v_1.Op != OpConst64 { 3089 break 3090 } 3091 d := v_1.AuxInt 3092 v.reset(OpConstBool) 3093 v.AuxInt = b2i(uint64(c) > uint64(d)) 3094 return true 3095 } 3096 return false 3097 } 3098 func rewriteValuegeneric_OpGreater8(v *Value, config *Config) bool { 3099 b := v.Block 3100 _ = b 3101 // match: (Greater8 (Const8 [c]) (Const8 [d])) 3102 // cond: 3103 // result: (ConstBool [b2i(c > d)]) 3104 for { 3105 v_0 := v.Args[0] 3106 if v_0.Op != OpConst8 { 3107 break 3108 } 3109 c := v_0.AuxInt 3110 v_1 := v.Args[1] 3111 if v_1.Op != OpConst8 { 3112 break 3113 } 3114 d := v_1.AuxInt 3115 v.reset(OpConstBool) 3116 v.AuxInt = b2i(c > d) 3117 return true 3118 } 3119 return false 3120 } 3121 func rewriteValuegeneric_OpGreater8U(v *Value, config *Config) bool { 3122 b := v.Block 3123 _ = b 3124 // match: (Greater8U (Const8 [c]) (Const8 [d])) 3125 // cond: 3126 // result: (ConstBool [b2i(uint8(c) > uint8(d))]) 3127 for { 3128 v_0 := v.Args[0] 3129 if v_0.Op != OpConst8 { 3130 break 3131 } 3132 c := v_0.AuxInt 3133 v_1 := v.Args[1] 3134 if v_1.Op != OpConst8 { 3135 break 3136 } 3137 d := v_1.AuxInt 3138 v.reset(OpConstBool) 3139 v.AuxInt = b2i(uint8(c) > uint8(d)) 3140 return true 3141 } 3142 return false 3143 } 3144 func rewriteValuegeneric_OpIMake(v *Value, config *Config) bool { 3145 b := v.Block 3146 _ = b 3147 // match: (IMake typ (StructMake1 val)) 3148 // cond: 3149 // result: (IMake typ val) 3150 for { 3151 typ := v.Args[0] 3152 v_1 := v.Args[1] 3153 if v_1.Op != OpStructMake1 { 3154 break 3155 } 3156 val := v_1.Args[0] 3157 v.reset(OpIMake) 3158 v.AddArg(typ) 3159 v.AddArg(val) 3160 return true 3161 } 3162 // match: (IMake typ (ArrayMake1 val)) 3163 // cond: 3164 // result: (IMake typ val) 3165 for { 3166 typ := v.Args[0] 3167 v_1 := v.Args[1] 3168 if v_1.Op != OpArrayMake1 { 3169 break 3170 } 3171 val := v_1.Args[0] 3172 v.reset(OpIMake) 3173 v.AddArg(typ) 3174 v.AddArg(val) 3175 return true 3176 } 3177 return false 3178 } 3179 func rewriteValuegeneric_OpIsInBounds(v *Value, config *Config) bool { 3180 b := v.Block 3181 _ = b 3182 // match: (IsInBounds (ZeroExt8to32 _) (Const32 [c])) 3183 // cond: (1 << 8) <= c 3184 // result: (ConstBool [1]) 3185 for { 3186 v_0 := v.Args[0] 3187 if v_0.Op != OpZeroExt8to32 { 3188 break 3189 } 3190 v_1 := v.Args[1] 3191 if v_1.Op != OpConst32 { 3192 break 3193 } 3194 c := v_1.AuxInt 3195 if !((1 << 8) <= c) { 3196 break 3197 } 3198 v.reset(OpConstBool) 3199 v.AuxInt = 1 3200 return true 3201 } 3202 // match: (IsInBounds (ZeroExt8to64 _) (Const64 [c])) 3203 // cond: (1 << 8) <= c 3204 // result: (ConstBool [1]) 3205 for { 3206 v_0 := v.Args[0] 3207 if v_0.Op != OpZeroExt8to64 { 3208 break 3209 } 3210 v_1 := v.Args[1] 3211 if v_1.Op != OpConst64 { 3212 break 3213 } 3214 c := v_1.AuxInt 3215 if !((1 << 8) <= c) { 3216 break 3217 } 3218 v.reset(OpConstBool) 3219 v.AuxInt = 1 3220 return true 3221 } 3222 // match: (IsInBounds (ZeroExt16to32 _) (Const32 [c])) 3223 // cond: (1 << 16) <= c 3224 // result: (ConstBool [1]) 3225 for { 3226 v_0 := v.Args[0] 3227 if v_0.Op != OpZeroExt16to32 { 3228 break 3229 } 3230 v_1 := v.Args[1] 3231 if v_1.Op != OpConst32 { 3232 break 3233 } 3234 c := v_1.AuxInt 3235 if !((1 << 16) <= c) { 3236 break 3237 } 3238 v.reset(OpConstBool) 3239 v.AuxInt = 1 3240 return true 3241 } 3242 // match: (IsInBounds (ZeroExt16to64 _) (Const64 [c])) 3243 // cond: (1 << 16) <= c 3244 // result: (ConstBool [1]) 3245 for { 3246 v_0 := v.Args[0] 3247 if v_0.Op != OpZeroExt16to64 { 3248 break 3249 } 3250 v_1 := v.Args[1] 3251 if v_1.Op != OpConst64 { 3252 break 3253 } 3254 c := v_1.AuxInt 3255 if !((1 << 16) <= c) { 3256 break 3257 } 3258 v.reset(OpConstBool) 3259 v.AuxInt = 1 3260 return true 3261 } 3262 // match: (IsInBounds x x) 3263 // cond: 3264 // result: (ConstBool [0]) 3265 for { 3266 x := v.Args[0] 3267 if x != v.Args[1] { 3268 break 3269 } 3270 v.reset(OpConstBool) 3271 v.AuxInt = 0 3272 return true 3273 } 3274 // match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d])) 3275 // cond: 0 <= c && c < d 3276 // result: (ConstBool [1]) 3277 for { 3278 v_0 := v.Args[0] 3279 if v_0.Op != OpAnd32 { 3280 break 3281 } 3282 v_0_0 := v_0.Args[0] 3283 if v_0_0.Op != OpConst32 { 3284 break 3285 } 3286 c := v_0_0.AuxInt 3287 v_1 := v.Args[1] 3288 if v_1.Op != OpConst32 { 3289 break 3290 } 3291 d := v_1.AuxInt 3292 if !(0 <= c && c < d) { 3293 break 3294 } 3295 v.reset(OpConstBool) 3296 v.AuxInt = 1 3297 return true 3298 } 3299 // match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d])) 3300 // cond: 0 <= c && c < d 3301 // result: (ConstBool [1]) 3302 for { 3303 v_0 := v.Args[0] 3304 if v_0.Op != OpAnd64 { 3305 break 3306 } 3307 v_0_0 := v_0.Args[0] 3308 if v_0_0.Op != OpConst64 { 3309 break 3310 } 3311 c := v_0_0.AuxInt 3312 v_1 := v.Args[1] 3313 if v_1.Op != OpConst64 { 3314 break 3315 } 3316 d := v_1.AuxInt 3317 if !(0 <= c && c < d) { 3318 break 3319 } 3320 v.reset(OpConstBool) 3321 v.AuxInt = 1 3322 return true 3323 } 3324 // match: (IsInBounds (Const32 [c]) (Const32 [d])) 3325 // cond: 3326 // result: (ConstBool [b2i(0 <= c && c < d)]) 3327 for { 3328 v_0 := v.Args[0] 3329 if v_0.Op != OpConst32 { 3330 break 3331 } 3332 c := v_0.AuxInt 3333 v_1 := v.Args[1] 3334 if v_1.Op != OpConst32 { 3335 break 3336 } 3337 d := v_1.AuxInt 3338 v.reset(OpConstBool) 3339 v.AuxInt = b2i(0 <= c && c < d) 3340 return true 3341 } 3342 // match: (IsInBounds (Const64 [c]) (Const64 [d])) 3343 // cond: 3344 // result: (ConstBool [b2i(0 <= c && c < d)]) 3345 for { 3346 v_0 := v.Args[0] 3347 if v_0.Op != OpConst64 { 3348 break 3349 } 3350 c := v_0.AuxInt 3351 v_1 := v.Args[1] 3352 if v_1.Op != OpConst64 { 3353 break 3354 } 3355 d := v_1.AuxInt 3356 v.reset(OpConstBool) 3357 v.AuxInt = b2i(0 <= c && c < d) 3358 return true 3359 } 3360 // match: (IsInBounds (Mod32u _ y) y) 3361 // cond: 3362 // result: (ConstBool [1]) 3363 for { 3364 v_0 := v.Args[0] 3365 if v_0.Op != OpMod32u { 3366 break 3367 } 3368 y := v_0.Args[1] 3369 if y != v.Args[1] { 3370 break 3371 } 3372 v.reset(OpConstBool) 3373 v.AuxInt = 1 3374 return true 3375 } 3376 // match: (IsInBounds (Mod64u _ y) y) 3377 // cond: 3378 // result: (ConstBool [1]) 3379 for { 3380 v_0 := v.Args[0] 3381 if v_0.Op != OpMod64u { 3382 break 3383 } 3384 y := v_0.Args[1] 3385 if y != v.Args[1] { 3386 break 3387 } 3388 v.reset(OpConstBool) 3389 v.AuxInt = 1 3390 return true 3391 } 3392 return false 3393 } 3394 func rewriteValuegeneric_OpIsSliceInBounds(v *Value, config *Config) bool { 3395 b := v.Block 3396 _ = b 3397 // match: (IsSliceInBounds x x) 3398 // cond: 3399 // result: (ConstBool [1]) 3400 for { 3401 x := v.Args[0] 3402 if x != v.Args[1] { 3403 break 3404 } 3405 v.reset(OpConstBool) 3406 v.AuxInt = 1 3407 return true 3408 } 3409 // match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d])) 3410 // cond: 0 <= c && c <= d 3411 // result: (ConstBool [1]) 3412 for { 3413 v_0 := v.Args[0] 3414 if v_0.Op != OpAnd32 { 3415 break 3416 } 3417 v_0_0 := v_0.Args[0] 3418 if v_0_0.Op != OpConst32 { 3419 break 3420 } 3421 c := v_0_0.AuxInt 3422 v_1 := v.Args[1] 3423 if v_1.Op != OpConst32 { 3424 break 3425 } 3426 d := v_1.AuxInt 3427 if !(0 <= c && c <= d) { 3428 break 3429 } 3430 v.reset(OpConstBool) 3431 v.AuxInt = 1 3432 return true 3433 } 3434 // match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d])) 3435 // cond: 0 <= c && c <= d 3436 // result: (ConstBool [1]) 3437 for { 3438 v_0 := v.Args[0] 3439 if v_0.Op != OpAnd64 { 3440 break 3441 } 3442 v_0_0 := v_0.Args[0] 3443 if v_0_0.Op != OpConst64 { 3444 break 3445 } 3446 c := v_0_0.AuxInt 3447 v_1 := v.Args[1] 3448 if v_1.Op != OpConst64 { 3449 break 3450 } 3451 d := v_1.AuxInt 3452 if !(0 <= c && c <= d) { 3453 break 3454 } 3455 v.reset(OpConstBool) 3456 v.AuxInt = 1 3457 return true 3458 } 3459 // match: (IsSliceInBounds (Const32 [0]) _) 3460 // cond: 3461 // result: (ConstBool [1]) 3462 for { 3463 v_0 := v.Args[0] 3464 if v_0.Op != OpConst32 { 3465 break 3466 } 3467 if v_0.AuxInt != 0 { 3468 break 3469 } 3470 v.reset(OpConstBool) 3471 v.AuxInt = 1 3472 return true 3473 } 3474 // match: (IsSliceInBounds (Const64 [0]) _) 3475 // cond: 3476 // result: (ConstBool [1]) 3477 for { 3478 v_0 := v.Args[0] 3479 if v_0.Op != OpConst64 { 3480 break 3481 } 3482 if v_0.AuxInt != 0 { 3483 break 3484 } 3485 v.reset(OpConstBool) 3486 v.AuxInt = 1 3487 return true 3488 } 3489 // match: (IsSliceInBounds (Const32 [c]) (Const32 [d])) 3490 // cond: 3491 // result: (ConstBool [b2i(0 <= c && c <= d)]) 3492 for { 3493 v_0 := v.Args[0] 3494 if v_0.Op != OpConst32 { 3495 break 3496 } 3497 c := v_0.AuxInt 3498 v_1 := v.Args[1] 3499 if v_1.Op != OpConst32 { 3500 break 3501 } 3502 d := v_1.AuxInt 3503 v.reset(OpConstBool) 3504 v.AuxInt = b2i(0 <= c && c <= d) 3505 return true 3506 } 3507 // match: (IsSliceInBounds (Const64 [c]) (Const64 [d])) 3508 // cond: 3509 // result: (ConstBool [b2i(0 <= c && c <= d)]) 3510 for { 3511 v_0 := v.Args[0] 3512 if v_0.Op != OpConst64 { 3513 break 3514 } 3515 c := v_0.AuxInt 3516 v_1 := v.Args[1] 3517 if v_1.Op != OpConst64 { 3518 break 3519 } 3520 d := v_1.AuxInt 3521 v.reset(OpConstBool) 3522 v.AuxInt = b2i(0 <= c && c <= d) 3523 return true 3524 } 3525 // match: (IsSliceInBounds (SliceLen x) (SliceCap x)) 3526 // cond: 3527 // result: (ConstBool [1]) 3528 for { 3529 v_0 := v.Args[0] 3530 if v_0.Op != OpSliceLen { 3531 break 3532 } 3533 x := v_0.Args[0] 3534 v_1 := v.Args[1] 3535 if v_1.Op != OpSliceCap { 3536 break 3537 } 3538 if x != v_1.Args[0] { 3539 break 3540 } 3541 v.reset(OpConstBool) 3542 v.AuxInt = 1 3543 return true 3544 } 3545 return false 3546 } 3547 func rewriteValuegeneric_OpLeq16(v *Value, config *Config) bool { 3548 b := v.Block 3549 _ = b 3550 // match: (Leq16 (Const16 [c]) (Const16 [d])) 3551 // cond: 3552 // result: (ConstBool [b2i(c <= d)]) 3553 for { 3554 v_0 := v.Args[0] 3555 if v_0.Op != OpConst16 { 3556 break 3557 } 3558 c := v_0.AuxInt 3559 v_1 := v.Args[1] 3560 if v_1.Op != OpConst16 { 3561 break 3562 } 3563 d := v_1.AuxInt 3564 v.reset(OpConstBool) 3565 v.AuxInt = b2i(c <= d) 3566 return true 3567 } 3568 return false 3569 } 3570 func rewriteValuegeneric_OpLeq16U(v *Value, config *Config) bool { 3571 b := v.Block 3572 _ = b 3573 // match: (Leq16U (Const16 [c]) (Const16 [d])) 3574 // cond: 3575 // result: (ConstBool [b2i(uint16(c) <= uint16(d))]) 3576 for { 3577 v_0 := v.Args[0] 3578 if v_0.Op != OpConst16 { 3579 break 3580 } 3581 c := v_0.AuxInt 3582 v_1 := v.Args[1] 3583 if v_1.Op != OpConst16 { 3584 break 3585 } 3586 d := v_1.AuxInt 3587 v.reset(OpConstBool) 3588 v.AuxInt = b2i(uint16(c) <= uint16(d)) 3589 return true 3590 } 3591 return false 3592 } 3593 func rewriteValuegeneric_OpLeq32(v *Value, config *Config) bool { 3594 b := v.Block 3595 _ = b 3596 // match: (Leq32 (Const32 [c]) (Const32 [d])) 3597 // cond: 3598 // result: (ConstBool [b2i(c <= d)]) 3599 for { 3600 v_0 := v.Args[0] 3601 if v_0.Op != OpConst32 { 3602 break 3603 } 3604 c := v_0.AuxInt 3605 v_1 := v.Args[1] 3606 if v_1.Op != OpConst32 { 3607 break 3608 } 3609 d := v_1.AuxInt 3610 v.reset(OpConstBool) 3611 v.AuxInt = b2i(c <= d) 3612 return true 3613 } 3614 return false 3615 } 3616 func rewriteValuegeneric_OpLeq32U(v *Value, config *Config) bool { 3617 b := v.Block 3618 _ = b 3619 // match: (Leq32U (Const32 [c]) (Const32 [d])) 3620 // cond: 3621 // result: (ConstBool [b2i(uint32(c) <= uint32(d))]) 3622 for { 3623 v_0 := v.Args[0] 3624 if v_0.Op != OpConst32 { 3625 break 3626 } 3627 c := v_0.AuxInt 3628 v_1 := v.Args[1] 3629 if v_1.Op != OpConst32 { 3630 break 3631 } 3632 d := v_1.AuxInt 3633 v.reset(OpConstBool) 3634 v.AuxInt = b2i(uint32(c) <= uint32(d)) 3635 return true 3636 } 3637 return false 3638 } 3639 func rewriteValuegeneric_OpLeq64(v *Value, config *Config) bool { 3640 b := v.Block 3641 _ = b 3642 // match: (Leq64 (Const64 [c]) (Const64 [d])) 3643 // cond: 3644 // result: (ConstBool [b2i(c <= d)]) 3645 for { 3646 v_0 := v.Args[0] 3647 if v_0.Op != OpConst64 { 3648 break 3649 } 3650 c := v_0.AuxInt 3651 v_1 := v.Args[1] 3652 if v_1.Op != OpConst64 { 3653 break 3654 } 3655 d := v_1.AuxInt 3656 v.reset(OpConstBool) 3657 v.AuxInt = b2i(c <= d) 3658 return true 3659 } 3660 return false 3661 } 3662 func rewriteValuegeneric_OpLeq64U(v *Value, config *Config) bool { 3663 b := v.Block 3664 _ = b 3665 // match: (Leq64U (Const64 [c]) (Const64 [d])) 3666 // cond: 3667 // result: (ConstBool [b2i(uint64(c) <= uint64(d))]) 3668 for { 3669 v_0 := v.Args[0] 3670 if v_0.Op != OpConst64 { 3671 break 3672 } 3673 c := v_0.AuxInt 3674 v_1 := v.Args[1] 3675 if v_1.Op != OpConst64 { 3676 break 3677 } 3678 d := v_1.AuxInt 3679 v.reset(OpConstBool) 3680 v.AuxInt = b2i(uint64(c) <= uint64(d)) 3681 return true 3682 } 3683 return false 3684 } 3685 func rewriteValuegeneric_OpLeq8(v *Value, config *Config) bool { 3686 b := v.Block 3687 _ = b 3688 // match: (Leq8 (Const8 [c]) (Const8 [d])) 3689 // cond: 3690 // result: (ConstBool [b2i(c <= d)]) 3691 for { 3692 v_0 := v.Args[0] 3693 if v_0.Op != OpConst8 { 3694 break 3695 } 3696 c := v_0.AuxInt 3697 v_1 := v.Args[1] 3698 if v_1.Op != OpConst8 { 3699 break 3700 } 3701 d := v_1.AuxInt 3702 v.reset(OpConstBool) 3703 v.AuxInt = b2i(c <= d) 3704 return true 3705 } 3706 return false 3707 } 3708 func rewriteValuegeneric_OpLeq8U(v *Value, config *Config) bool { 3709 b := v.Block 3710 _ = b 3711 // match: (Leq8U (Const8 [c]) (Const8 [d])) 3712 // cond: 3713 // result: (ConstBool [b2i(uint8(c) <= uint8(d))]) 3714 for { 3715 v_0 := v.Args[0] 3716 if v_0.Op != OpConst8 { 3717 break 3718 } 3719 c := v_0.AuxInt 3720 v_1 := v.Args[1] 3721 if v_1.Op != OpConst8 { 3722 break 3723 } 3724 d := v_1.AuxInt 3725 v.reset(OpConstBool) 3726 v.AuxInt = b2i(uint8(c) <= uint8(d)) 3727 return true 3728 } 3729 return false 3730 } 3731 func rewriteValuegeneric_OpLess16(v *Value, config *Config) bool { 3732 b := v.Block 3733 _ = b 3734 // match: (Less16 (Const16 [c]) (Const16 [d])) 3735 // cond: 3736 // result: (ConstBool [b2i(c < d)]) 3737 for { 3738 v_0 := v.Args[0] 3739 if v_0.Op != OpConst16 { 3740 break 3741 } 3742 c := v_0.AuxInt 3743 v_1 := v.Args[1] 3744 if v_1.Op != OpConst16 { 3745 break 3746 } 3747 d := v_1.AuxInt 3748 v.reset(OpConstBool) 3749 v.AuxInt = b2i(c < d) 3750 return true 3751 } 3752 return false 3753 } 3754 func rewriteValuegeneric_OpLess16U(v *Value, config *Config) bool { 3755 b := v.Block 3756 _ = b 3757 // match: (Less16U (Const16 [c]) (Const16 [d])) 3758 // cond: 3759 // result: (ConstBool [b2i(uint16(c) < uint16(d))]) 3760 for { 3761 v_0 := v.Args[0] 3762 if v_0.Op != OpConst16 { 3763 break 3764 } 3765 c := v_0.AuxInt 3766 v_1 := v.Args[1] 3767 if v_1.Op != OpConst16 { 3768 break 3769 } 3770 d := v_1.AuxInt 3771 v.reset(OpConstBool) 3772 v.AuxInt = b2i(uint16(c) < uint16(d)) 3773 return true 3774 } 3775 return false 3776 } 3777 func rewriteValuegeneric_OpLess32(v *Value, config *Config) bool { 3778 b := v.Block 3779 _ = b 3780 // match: (Less32 (Const32 [c]) (Const32 [d])) 3781 // cond: 3782 // result: (ConstBool [b2i(c < d)]) 3783 for { 3784 v_0 := v.Args[0] 3785 if v_0.Op != OpConst32 { 3786 break 3787 } 3788 c := v_0.AuxInt 3789 v_1 := v.Args[1] 3790 if v_1.Op != OpConst32 { 3791 break 3792 } 3793 d := v_1.AuxInt 3794 v.reset(OpConstBool) 3795 v.AuxInt = b2i(c < d) 3796 return true 3797 } 3798 return false 3799 } 3800 func rewriteValuegeneric_OpLess32U(v *Value, config *Config) bool { 3801 b := v.Block 3802 _ = b 3803 // match: (Less32U (Const32 [c]) (Const32 [d])) 3804 // cond: 3805 // result: (ConstBool [b2i(uint32(c) < uint32(d))]) 3806 for { 3807 v_0 := v.Args[0] 3808 if v_0.Op != OpConst32 { 3809 break 3810 } 3811 c := v_0.AuxInt 3812 v_1 := v.Args[1] 3813 if v_1.Op != OpConst32 { 3814 break 3815 } 3816 d := v_1.AuxInt 3817 v.reset(OpConstBool) 3818 v.AuxInt = b2i(uint32(c) < uint32(d)) 3819 return true 3820 } 3821 return false 3822 } 3823 func rewriteValuegeneric_OpLess64(v *Value, config *Config) bool { 3824 b := v.Block 3825 _ = b 3826 // match: (Less64 (Const64 [c]) (Const64 [d])) 3827 // cond: 3828 // result: (ConstBool [b2i(c < d)]) 3829 for { 3830 v_0 := v.Args[0] 3831 if v_0.Op != OpConst64 { 3832 break 3833 } 3834 c := v_0.AuxInt 3835 v_1 := v.Args[1] 3836 if v_1.Op != OpConst64 { 3837 break 3838 } 3839 d := v_1.AuxInt 3840 v.reset(OpConstBool) 3841 v.AuxInt = b2i(c < d) 3842 return true 3843 } 3844 return false 3845 } 3846 func rewriteValuegeneric_OpLess64U(v *Value, config *Config) bool { 3847 b := v.Block 3848 _ = b 3849 // match: (Less64U (Const64 [c]) (Const64 [d])) 3850 // cond: 3851 // result: (ConstBool [b2i(uint64(c) < uint64(d))]) 3852 for { 3853 v_0 := v.Args[0] 3854 if v_0.Op != OpConst64 { 3855 break 3856 } 3857 c := v_0.AuxInt 3858 v_1 := v.Args[1] 3859 if v_1.Op != OpConst64 { 3860 break 3861 } 3862 d := v_1.AuxInt 3863 v.reset(OpConstBool) 3864 v.AuxInt = b2i(uint64(c) < uint64(d)) 3865 return true 3866 } 3867 return false 3868 } 3869 func rewriteValuegeneric_OpLess8(v *Value, config *Config) bool { 3870 b := v.Block 3871 _ = b 3872 // match: (Less8 (Const8 [c]) (Const8 [d])) 3873 // cond: 3874 // result: (ConstBool [b2i(c < d)]) 3875 for { 3876 v_0 := v.Args[0] 3877 if v_0.Op != OpConst8 { 3878 break 3879 } 3880 c := v_0.AuxInt 3881 v_1 := v.Args[1] 3882 if v_1.Op != OpConst8 { 3883 break 3884 } 3885 d := v_1.AuxInt 3886 v.reset(OpConstBool) 3887 v.AuxInt = b2i(c < d) 3888 return true 3889 } 3890 return false 3891 } 3892 func rewriteValuegeneric_OpLess8U(v *Value, config *Config) bool { 3893 b := v.Block 3894 _ = b 3895 // match: (Less8U (Const8 [c]) (Const8 [d])) 3896 // cond: 3897 // result: (ConstBool [b2i(uint8(c) < uint8(d))]) 3898 for { 3899 v_0 := v.Args[0] 3900 if v_0.Op != OpConst8 { 3901 break 3902 } 3903 c := v_0.AuxInt 3904 v_1 := v.Args[1] 3905 if v_1.Op != OpConst8 { 3906 break 3907 } 3908 d := v_1.AuxInt 3909 v.reset(OpConstBool) 3910 v.AuxInt = b2i(uint8(c) < uint8(d)) 3911 return true 3912 } 3913 return false 3914 } 3915 func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool { 3916 b := v.Block 3917 _ = b 3918 // match: (Load <t1> p1 (Store [w] p2 x _)) 3919 // cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() 3920 // result: x 3921 for { 3922 t1 := v.Type 3923 p1 := v.Args[0] 3924 v_1 := v.Args[1] 3925 if v_1.Op != OpStore { 3926 break 3927 } 3928 w := v_1.AuxInt 3929 p2 := v_1.Args[0] 3930 x := v_1.Args[1] 3931 if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && w == t1.Size()) { 3932 break 3933 } 3934 v.reset(OpCopy) 3935 v.Type = x.Type 3936 v.AddArg(x) 3937 return true 3938 } 3939 // match: (Load <t> _ _) 3940 // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) 3941 // result: (StructMake0) 3942 for { 3943 t := v.Type 3944 if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { 3945 break 3946 } 3947 v.reset(OpStructMake0) 3948 return true 3949 } 3950 // match: (Load <t> ptr mem) 3951 // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) 3952 // result: (StructMake1 (Load <t.FieldType(0)> ptr mem)) 3953 for { 3954 t := v.Type 3955 ptr := v.Args[0] 3956 mem := v.Args[1] 3957 if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { 3958 break 3959 } 3960 v.reset(OpStructMake1) 3961 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3962 v0.AddArg(ptr) 3963 v0.AddArg(mem) 3964 v.AddArg(v0) 3965 return true 3966 } 3967 // match: (Load <t> ptr mem) 3968 // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) 3969 // result: (StructMake2 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)) 3970 for { 3971 t := v.Type 3972 ptr := v.Args[0] 3973 mem := v.Args[1] 3974 if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { 3975 break 3976 } 3977 v.reset(OpStructMake2) 3978 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3979 v0.AddArg(ptr) 3980 v0.AddArg(mem) 3981 v.AddArg(v0) 3982 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3983 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 3984 v2.AuxInt = t.FieldOff(1) 3985 v2.AddArg(ptr) 3986 v1.AddArg(v2) 3987 v1.AddArg(mem) 3988 v.AddArg(v1) 3989 return true 3990 } 3991 // match: (Load <t> ptr mem) 3992 // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) 3993 // result: (StructMake3 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)) 3994 for { 3995 t := v.Type 3996 ptr := v.Args[0] 3997 mem := v.Args[1] 3998 if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { 3999 break 4000 } 4001 v.reset(OpStructMake3) 4002 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 4003 v0.AddArg(ptr) 4004 v0.AddArg(mem) 4005 v.AddArg(v0) 4006 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 4007 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 4008 v2.AuxInt = t.FieldOff(1) 4009 v2.AddArg(ptr) 4010 v1.AddArg(v2) 4011 v1.AddArg(mem) 4012 v.AddArg(v1) 4013 v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) 4014 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 4015 v4.AuxInt = t.FieldOff(2) 4016 v4.AddArg(ptr) 4017 v3.AddArg(v4) 4018 v3.AddArg(mem) 4019 v.AddArg(v3) 4020 return true 4021 } 4022 // match: (Load <t> ptr mem) 4023 // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) 4024 // result: (StructMake4 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem)) 4025 for { 4026 t := v.Type 4027 ptr := v.Args[0] 4028 mem := v.Args[1] 4029 if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { 4030 break 4031 } 4032 v.reset(OpStructMake4) 4033 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 4034 v0.AddArg(ptr) 4035 v0.AddArg(mem) 4036 v.AddArg(v0) 4037 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 4038 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 4039 v2.AuxInt = t.FieldOff(1) 4040 v2.AddArg(ptr) 4041 v1.AddArg(v2) 4042 v1.AddArg(mem) 4043 v.AddArg(v1) 4044 v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) 4045 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 4046 v4.AuxInt = t.FieldOff(2) 4047 v4.AddArg(ptr) 4048 v3.AddArg(v4) 4049 v3.AddArg(mem) 4050 v.AddArg(v3) 4051 v5 := b.NewValue0(v.Line, OpLoad, t.FieldType(3)) 4052 v6 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) 4053 v6.AuxInt = t.FieldOff(3) 4054 v6.AddArg(ptr) 4055 v5.AddArg(v6) 4056 v5.AddArg(mem) 4057 v.AddArg(v5) 4058 return true 4059 } 4060 // match: (Load <t> _ _) 4061 // cond: t.IsArray() && t.NumElem() == 0 4062 // result: (ArrayMake0) 4063 for { 4064 t := v.Type 4065 if !(t.IsArray() && t.NumElem() == 0) { 4066 break 4067 } 4068 v.reset(OpArrayMake0) 4069 return true 4070 } 4071 // match: (Load <t> ptr mem) 4072 // cond: t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t) 4073 // result: (ArrayMake1 (Load <t.ElemType()> ptr mem)) 4074 for { 4075 t := v.Type 4076 ptr := v.Args[0] 4077 mem := v.Args[1] 4078 if !(t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)) { 4079 break 4080 } 4081 v.reset(OpArrayMake1) 4082 v0 := b.NewValue0(v.Line, OpLoad, t.ElemType()) 4083 v0.AddArg(ptr) 4084 v0.AddArg(mem) 4085 v.AddArg(v0) 4086 return true 4087 } 4088 return false 4089 } 4090 func rewriteValuegeneric_OpLsh16x16(v *Value, config *Config) bool { 4091 b := v.Block 4092 _ = b 4093 // match: (Lsh16x16 <t> x (Const16 [c])) 4094 // cond: 4095 // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))])) 4096 for { 4097 t := v.Type 4098 x := v.Args[0] 4099 v_1 := v.Args[1] 4100 if v_1.Op != OpConst16 { 4101 break 4102 } 4103 c := v_1.AuxInt 4104 v.reset(OpLsh16x64) 4105 v.AddArg(x) 4106 v0 := b.NewValue0(v.Line, OpConst64, t) 4107 v0.AuxInt = int64(uint16(c)) 4108 v.AddArg(v0) 4109 return true 4110 } 4111 // match: (Lsh16x16 (Const16 [0]) _) 4112 // cond: 4113 // result: (Const16 [0]) 4114 for { 4115 v_0 := v.Args[0] 4116 if v_0.Op != OpConst16 { 4117 break 4118 } 4119 if v_0.AuxInt != 0 { 4120 break 4121 } 4122 v.reset(OpConst16) 4123 v.AuxInt = 0 4124 return true 4125 } 4126 return false 4127 } 4128 func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool { 4129 b := v.Block 4130 _ = b 4131 // match: (Lsh16x32 <t> x (Const32 [c])) 4132 // cond: 4133 // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))])) 4134 for { 4135 t := v.Type 4136 x := v.Args[0] 4137 v_1 := v.Args[1] 4138 if v_1.Op != OpConst32 { 4139 break 4140 } 4141 c := v_1.AuxInt 4142 v.reset(OpLsh16x64) 4143 v.AddArg(x) 4144 v0 := b.NewValue0(v.Line, OpConst64, t) 4145 v0.AuxInt = int64(uint32(c)) 4146 v.AddArg(v0) 4147 return true 4148 } 4149 // match: (Lsh16x32 (Const16 [0]) _) 4150 // cond: 4151 // result: (Const16 [0]) 4152 for { 4153 v_0 := v.Args[0] 4154 if v_0.Op != OpConst16 { 4155 break 4156 } 4157 if v_0.AuxInt != 0 { 4158 break 4159 } 4160 v.reset(OpConst16) 4161 v.AuxInt = 0 4162 return true 4163 } 4164 return false 4165 } 4166 func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { 4167 b := v.Block 4168 _ = b 4169 // match: (Lsh16x64 (Const16 [c]) (Const64 [d])) 4170 // cond: 4171 // result: (Const16 [int64(int16(c) << uint64(d))]) 4172 for { 4173 v_0 := v.Args[0] 4174 if v_0.Op != OpConst16 { 4175 break 4176 } 4177 c := v_0.AuxInt 4178 v_1 := v.Args[1] 4179 if v_1.Op != OpConst64 { 4180 break 4181 } 4182 d := v_1.AuxInt 4183 v.reset(OpConst16) 4184 v.AuxInt = int64(int16(c) << uint64(d)) 4185 return true 4186 } 4187 // match: (Lsh16x64 x (Const64 [0])) 4188 // cond: 4189 // result: x 4190 for { 4191 x := v.Args[0] 4192 v_1 := v.Args[1] 4193 if v_1.Op != OpConst64 { 4194 break 4195 } 4196 if v_1.AuxInt != 0 { 4197 break 4198 } 4199 v.reset(OpCopy) 4200 v.Type = x.Type 4201 v.AddArg(x) 4202 return true 4203 } 4204 // match: (Lsh16x64 (Const16 [0]) _) 4205 // cond: 4206 // result: (Const16 [0]) 4207 for { 4208 v_0 := v.Args[0] 4209 if v_0.Op != OpConst16 { 4210 break 4211 } 4212 if v_0.AuxInt != 0 { 4213 break 4214 } 4215 v.reset(OpConst16) 4216 v.AuxInt = 0 4217 return true 4218 } 4219 // match: (Lsh16x64 _ (Const64 [c])) 4220 // cond: uint64(c) >= 16 4221 // result: (Const16 [0]) 4222 for { 4223 v_1 := v.Args[1] 4224 if v_1.Op != OpConst64 { 4225 break 4226 } 4227 c := v_1.AuxInt 4228 if !(uint64(c) >= 16) { 4229 break 4230 } 4231 v.reset(OpConst16) 4232 v.AuxInt = 0 4233 return true 4234 } 4235 // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) 4236 // cond: !uaddOvf(c,d) 4237 // result: (Lsh16x64 x (Const64 <t> [c+d])) 4238 for { 4239 t := v.Type 4240 v_0 := v.Args[0] 4241 if v_0.Op != OpLsh16x64 { 4242 break 4243 } 4244 x := v_0.Args[0] 4245 v_0_1 := v_0.Args[1] 4246 if v_0_1.Op != OpConst64 { 4247 break 4248 } 4249 c := v_0_1.AuxInt 4250 v_1 := v.Args[1] 4251 if v_1.Op != OpConst64 { 4252 break 4253 } 4254 d := v_1.AuxInt 4255 if !(!uaddOvf(c, d)) { 4256 break 4257 } 4258 v.reset(OpLsh16x64) 4259 v.AddArg(x) 4260 v0 := b.NewValue0(v.Line, OpConst64, t) 4261 v0.AuxInt = c + d 4262 v.AddArg(v0) 4263 return true 4264 } 4265 // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4266 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4267 // result: (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4268 for { 4269 v_0 := v.Args[0] 4270 if v_0.Op != OpRsh16Ux64 { 4271 break 4272 } 4273 v_0_0 := v_0.Args[0] 4274 if v_0_0.Op != OpLsh16x64 { 4275 break 4276 } 4277 x := v_0_0.Args[0] 4278 v_0_0_1 := v_0_0.Args[1] 4279 if v_0_0_1.Op != OpConst64 { 4280 break 4281 } 4282 c1 := v_0_0_1.AuxInt 4283 v_0_1 := v_0.Args[1] 4284 if v_0_1.Op != OpConst64 { 4285 break 4286 } 4287 c2 := v_0_1.AuxInt 4288 v_1 := v.Args[1] 4289 if v_1.Op != OpConst64 { 4290 break 4291 } 4292 c3 := v_1.AuxInt 4293 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4294 break 4295 } 4296 v.reset(OpLsh16x64) 4297 v.AddArg(x) 4298 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4299 v0.AuxInt = c1 - c2 + c3 4300 v.AddArg(v0) 4301 return true 4302 } 4303 return false 4304 } 4305 func rewriteValuegeneric_OpLsh16x8(v *Value, config *Config) bool { 4306 b := v.Block 4307 _ = b 4308 // match: (Lsh16x8 <t> x (Const8 [c])) 4309 // cond: 4310 // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))])) 4311 for { 4312 t := v.Type 4313 x := v.Args[0] 4314 v_1 := v.Args[1] 4315 if v_1.Op != OpConst8 { 4316 break 4317 } 4318 c := v_1.AuxInt 4319 v.reset(OpLsh16x64) 4320 v.AddArg(x) 4321 v0 := b.NewValue0(v.Line, OpConst64, t) 4322 v0.AuxInt = int64(uint8(c)) 4323 v.AddArg(v0) 4324 return true 4325 } 4326 // match: (Lsh16x8 (Const16 [0]) _) 4327 // cond: 4328 // result: (Const16 [0]) 4329 for { 4330 v_0 := v.Args[0] 4331 if v_0.Op != OpConst16 { 4332 break 4333 } 4334 if v_0.AuxInt != 0 { 4335 break 4336 } 4337 v.reset(OpConst16) 4338 v.AuxInt = 0 4339 return true 4340 } 4341 return false 4342 } 4343 func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool { 4344 b := v.Block 4345 _ = b 4346 // match: (Lsh32x16 <t> x (Const16 [c])) 4347 // cond: 4348 // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))])) 4349 for { 4350 t := v.Type 4351 x := v.Args[0] 4352 v_1 := v.Args[1] 4353 if v_1.Op != OpConst16 { 4354 break 4355 } 4356 c := v_1.AuxInt 4357 v.reset(OpLsh32x64) 4358 v.AddArg(x) 4359 v0 := b.NewValue0(v.Line, OpConst64, t) 4360 v0.AuxInt = int64(uint16(c)) 4361 v.AddArg(v0) 4362 return true 4363 } 4364 // match: (Lsh32x16 (Const32 [0]) _) 4365 // cond: 4366 // result: (Const32 [0]) 4367 for { 4368 v_0 := v.Args[0] 4369 if v_0.Op != OpConst32 { 4370 break 4371 } 4372 if v_0.AuxInt != 0 { 4373 break 4374 } 4375 v.reset(OpConst32) 4376 v.AuxInt = 0 4377 return true 4378 } 4379 return false 4380 } 4381 func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool { 4382 b := v.Block 4383 _ = b 4384 // match: (Lsh32x32 <t> x (Const32 [c])) 4385 // cond: 4386 // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))])) 4387 for { 4388 t := v.Type 4389 x := v.Args[0] 4390 v_1 := v.Args[1] 4391 if v_1.Op != OpConst32 { 4392 break 4393 } 4394 c := v_1.AuxInt 4395 v.reset(OpLsh32x64) 4396 v.AddArg(x) 4397 v0 := b.NewValue0(v.Line, OpConst64, t) 4398 v0.AuxInt = int64(uint32(c)) 4399 v.AddArg(v0) 4400 return true 4401 } 4402 // match: (Lsh32x32 (Const32 [0]) _) 4403 // cond: 4404 // result: (Const32 [0]) 4405 for { 4406 v_0 := v.Args[0] 4407 if v_0.Op != OpConst32 { 4408 break 4409 } 4410 if v_0.AuxInt != 0 { 4411 break 4412 } 4413 v.reset(OpConst32) 4414 v.AuxInt = 0 4415 return true 4416 } 4417 return false 4418 } 4419 func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { 4420 b := v.Block 4421 _ = b 4422 // match: (Lsh32x64 (Const32 [c]) (Const64 [d])) 4423 // cond: 4424 // result: (Const32 [int64(int32(c) << uint64(d))]) 4425 for { 4426 v_0 := v.Args[0] 4427 if v_0.Op != OpConst32 { 4428 break 4429 } 4430 c := v_0.AuxInt 4431 v_1 := v.Args[1] 4432 if v_1.Op != OpConst64 { 4433 break 4434 } 4435 d := v_1.AuxInt 4436 v.reset(OpConst32) 4437 v.AuxInt = int64(int32(c) << uint64(d)) 4438 return true 4439 } 4440 // match: (Lsh32x64 x (Const64 [0])) 4441 // cond: 4442 // result: x 4443 for { 4444 x := v.Args[0] 4445 v_1 := v.Args[1] 4446 if v_1.Op != OpConst64 { 4447 break 4448 } 4449 if v_1.AuxInt != 0 { 4450 break 4451 } 4452 v.reset(OpCopy) 4453 v.Type = x.Type 4454 v.AddArg(x) 4455 return true 4456 } 4457 // match: (Lsh32x64 (Const32 [0]) _) 4458 // cond: 4459 // result: (Const32 [0]) 4460 for { 4461 v_0 := v.Args[0] 4462 if v_0.Op != OpConst32 { 4463 break 4464 } 4465 if v_0.AuxInt != 0 { 4466 break 4467 } 4468 v.reset(OpConst32) 4469 v.AuxInt = 0 4470 return true 4471 } 4472 // match: (Lsh32x64 _ (Const64 [c])) 4473 // cond: uint64(c) >= 32 4474 // result: (Const32 [0]) 4475 for { 4476 v_1 := v.Args[1] 4477 if v_1.Op != OpConst64 { 4478 break 4479 } 4480 c := v_1.AuxInt 4481 if !(uint64(c) >= 32) { 4482 break 4483 } 4484 v.reset(OpConst32) 4485 v.AuxInt = 0 4486 return true 4487 } 4488 // match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d])) 4489 // cond: !uaddOvf(c,d) 4490 // result: (Lsh32x64 x (Const64 <t> [c+d])) 4491 for { 4492 t := v.Type 4493 v_0 := v.Args[0] 4494 if v_0.Op != OpLsh32x64 { 4495 break 4496 } 4497 x := v_0.Args[0] 4498 v_0_1 := v_0.Args[1] 4499 if v_0_1.Op != OpConst64 { 4500 break 4501 } 4502 c := v_0_1.AuxInt 4503 v_1 := v.Args[1] 4504 if v_1.Op != OpConst64 { 4505 break 4506 } 4507 d := v_1.AuxInt 4508 if !(!uaddOvf(c, d)) { 4509 break 4510 } 4511 v.reset(OpLsh32x64) 4512 v.AddArg(x) 4513 v0 := b.NewValue0(v.Line, OpConst64, t) 4514 v0.AuxInt = c + d 4515 v.AddArg(v0) 4516 return true 4517 } 4518 // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4519 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4520 // result: (Lsh32x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4521 for { 4522 v_0 := v.Args[0] 4523 if v_0.Op != OpRsh32Ux64 { 4524 break 4525 } 4526 v_0_0 := v_0.Args[0] 4527 if v_0_0.Op != OpLsh32x64 { 4528 break 4529 } 4530 x := v_0_0.Args[0] 4531 v_0_0_1 := v_0_0.Args[1] 4532 if v_0_0_1.Op != OpConst64 { 4533 break 4534 } 4535 c1 := v_0_0_1.AuxInt 4536 v_0_1 := v_0.Args[1] 4537 if v_0_1.Op != OpConst64 { 4538 break 4539 } 4540 c2 := v_0_1.AuxInt 4541 v_1 := v.Args[1] 4542 if v_1.Op != OpConst64 { 4543 break 4544 } 4545 c3 := v_1.AuxInt 4546 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4547 break 4548 } 4549 v.reset(OpLsh32x64) 4550 v.AddArg(x) 4551 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4552 v0.AuxInt = c1 - c2 + c3 4553 v.AddArg(v0) 4554 return true 4555 } 4556 return false 4557 } 4558 func rewriteValuegeneric_OpLsh32x8(v *Value, config *Config) bool { 4559 b := v.Block 4560 _ = b 4561 // match: (Lsh32x8 <t> x (Const8 [c])) 4562 // cond: 4563 // result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))])) 4564 for { 4565 t := v.Type 4566 x := v.Args[0] 4567 v_1 := v.Args[1] 4568 if v_1.Op != OpConst8 { 4569 break 4570 } 4571 c := v_1.AuxInt 4572 v.reset(OpLsh32x64) 4573 v.AddArg(x) 4574 v0 := b.NewValue0(v.Line, OpConst64, t) 4575 v0.AuxInt = int64(uint8(c)) 4576 v.AddArg(v0) 4577 return true 4578 } 4579 // match: (Lsh32x8 (Const32 [0]) _) 4580 // cond: 4581 // result: (Const32 [0]) 4582 for { 4583 v_0 := v.Args[0] 4584 if v_0.Op != OpConst32 { 4585 break 4586 } 4587 if v_0.AuxInt != 0 { 4588 break 4589 } 4590 v.reset(OpConst32) 4591 v.AuxInt = 0 4592 return true 4593 } 4594 return false 4595 } 4596 func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool { 4597 b := v.Block 4598 _ = b 4599 // match: (Lsh64x16 <t> x (Const16 [c])) 4600 // cond: 4601 // result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))])) 4602 for { 4603 t := v.Type 4604 x := v.Args[0] 4605 v_1 := v.Args[1] 4606 if v_1.Op != OpConst16 { 4607 break 4608 } 4609 c := v_1.AuxInt 4610 v.reset(OpLsh64x64) 4611 v.AddArg(x) 4612 v0 := b.NewValue0(v.Line, OpConst64, t) 4613 v0.AuxInt = int64(uint16(c)) 4614 v.AddArg(v0) 4615 return true 4616 } 4617 // match: (Lsh64x16 (Const64 [0]) _) 4618 // cond: 4619 // result: (Const64 [0]) 4620 for { 4621 v_0 := v.Args[0] 4622 if v_0.Op != OpConst64 { 4623 break 4624 } 4625 if v_0.AuxInt != 0 { 4626 break 4627 } 4628 v.reset(OpConst64) 4629 v.AuxInt = 0 4630 return true 4631 } 4632 return false 4633 } 4634 func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool { 4635 b := v.Block 4636 _ = b 4637 // match: (Lsh64x32 <t> x (Const32 [c])) 4638 // cond: 4639 // result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))])) 4640 for { 4641 t := v.Type 4642 x := v.Args[0] 4643 v_1 := v.Args[1] 4644 if v_1.Op != OpConst32 { 4645 break 4646 } 4647 c := v_1.AuxInt 4648 v.reset(OpLsh64x64) 4649 v.AddArg(x) 4650 v0 := b.NewValue0(v.Line, OpConst64, t) 4651 v0.AuxInt = int64(uint32(c)) 4652 v.AddArg(v0) 4653 return true 4654 } 4655 // match: (Lsh64x32 (Const64 [0]) _) 4656 // cond: 4657 // result: (Const64 [0]) 4658 for { 4659 v_0 := v.Args[0] 4660 if v_0.Op != OpConst64 { 4661 break 4662 } 4663 if v_0.AuxInt != 0 { 4664 break 4665 } 4666 v.reset(OpConst64) 4667 v.AuxInt = 0 4668 return true 4669 } 4670 return false 4671 } 4672 func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool { 4673 b := v.Block 4674 _ = b 4675 // match: (Lsh64x64 (Const64 [c]) (Const64 [d])) 4676 // cond: 4677 // result: (Const64 [c << uint64(d)]) 4678 for { 4679 v_0 := v.Args[0] 4680 if v_0.Op != OpConst64 { 4681 break 4682 } 4683 c := v_0.AuxInt 4684 v_1 := v.Args[1] 4685 if v_1.Op != OpConst64 { 4686 break 4687 } 4688 d := v_1.AuxInt 4689 v.reset(OpConst64) 4690 v.AuxInt = c << uint64(d) 4691 return true 4692 } 4693 // match: (Lsh64x64 x (Const64 [0])) 4694 // cond: 4695 // result: x 4696 for { 4697 x := v.Args[0] 4698 v_1 := v.Args[1] 4699 if v_1.Op != OpConst64 { 4700 break 4701 } 4702 if v_1.AuxInt != 0 { 4703 break 4704 } 4705 v.reset(OpCopy) 4706 v.Type = x.Type 4707 v.AddArg(x) 4708 return true 4709 } 4710 // match: (Lsh64x64 (Const64 [0]) _) 4711 // cond: 4712 // result: (Const64 [0]) 4713 for { 4714 v_0 := v.Args[0] 4715 if v_0.Op != OpConst64 { 4716 break 4717 } 4718 if v_0.AuxInt != 0 { 4719 break 4720 } 4721 v.reset(OpConst64) 4722 v.AuxInt = 0 4723 return true 4724 } 4725 // match: (Lsh64x64 _ (Const64 [c])) 4726 // cond: uint64(c) >= 64 4727 // result: (Const64 [0]) 4728 for { 4729 v_1 := v.Args[1] 4730 if v_1.Op != OpConst64 { 4731 break 4732 } 4733 c := v_1.AuxInt 4734 if !(uint64(c) >= 64) { 4735 break 4736 } 4737 v.reset(OpConst64) 4738 v.AuxInt = 0 4739 return true 4740 } 4741 // match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d])) 4742 // cond: !uaddOvf(c,d) 4743 // result: (Lsh64x64 x (Const64 <t> [c+d])) 4744 for { 4745 t := v.Type 4746 v_0 := v.Args[0] 4747 if v_0.Op != OpLsh64x64 { 4748 break 4749 } 4750 x := v_0.Args[0] 4751 v_0_1 := v_0.Args[1] 4752 if v_0_1.Op != OpConst64 { 4753 break 4754 } 4755 c := v_0_1.AuxInt 4756 v_1 := v.Args[1] 4757 if v_1.Op != OpConst64 { 4758 break 4759 } 4760 d := v_1.AuxInt 4761 if !(!uaddOvf(c, d)) { 4762 break 4763 } 4764 v.reset(OpLsh64x64) 4765 v.AddArg(x) 4766 v0 := b.NewValue0(v.Line, OpConst64, t) 4767 v0.AuxInt = c + d 4768 v.AddArg(v0) 4769 return true 4770 } 4771 // match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4772 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4773 // result: (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4774 for { 4775 v_0 := v.Args[0] 4776 if v_0.Op != OpRsh64Ux64 { 4777 break 4778 } 4779 v_0_0 := v_0.Args[0] 4780 if v_0_0.Op != OpLsh64x64 { 4781 break 4782 } 4783 x := v_0_0.Args[0] 4784 v_0_0_1 := v_0_0.Args[1] 4785 if v_0_0_1.Op != OpConst64 { 4786 break 4787 } 4788 c1 := v_0_0_1.AuxInt 4789 v_0_1 := v_0.Args[1] 4790 if v_0_1.Op != OpConst64 { 4791 break 4792 } 4793 c2 := v_0_1.AuxInt 4794 v_1 := v.Args[1] 4795 if v_1.Op != OpConst64 { 4796 break 4797 } 4798 c3 := v_1.AuxInt 4799 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4800 break 4801 } 4802 v.reset(OpLsh64x64) 4803 v.AddArg(x) 4804 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4805 v0.AuxInt = c1 - c2 + c3 4806 v.AddArg(v0) 4807 return true 4808 } 4809 return false 4810 } 4811 func rewriteValuegeneric_OpLsh64x8(v *Value, config *Config) bool { 4812 b := v.Block 4813 _ = b 4814 // match: (Lsh64x8 <t> x (Const8 [c])) 4815 // cond: 4816 // result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))])) 4817 for { 4818 t := v.Type 4819 x := v.Args[0] 4820 v_1 := v.Args[1] 4821 if v_1.Op != OpConst8 { 4822 break 4823 } 4824 c := v_1.AuxInt 4825 v.reset(OpLsh64x64) 4826 v.AddArg(x) 4827 v0 := b.NewValue0(v.Line, OpConst64, t) 4828 v0.AuxInt = int64(uint8(c)) 4829 v.AddArg(v0) 4830 return true 4831 } 4832 // match: (Lsh64x8 (Const64 [0]) _) 4833 // cond: 4834 // result: (Const64 [0]) 4835 for { 4836 v_0 := v.Args[0] 4837 if v_0.Op != OpConst64 { 4838 break 4839 } 4840 if v_0.AuxInt != 0 { 4841 break 4842 } 4843 v.reset(OpConst64) 4844 v.AuxInt = 0 4845 return true 4846 } 4847 return false 4848 } 4849 func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool { 4850 b := v.Block 4851 _ = b 4852 // match: (Lsh8x16 <t> x (Const16 [c])) 4853 // cond: 4854 // result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))])) 4855 for { 4856 t := v.Type 4857 x := v.Args[0] 4858 v_1 := v.Args[1] 4859 if v_1.Op != OpConst16 { 4860 break 4861 } 4862 c := v_1.AuxInt 4863 v.reset(OpLsh8x64) 4864 v.AddArg(x) 4865 v0 := b.NewValue0(v.Line, OpConst64, t) 4866 v0.AuxInt = int64(uint16(c)) 4867 v.AddArg(v0) 4868 return true 4869 } 4870 // match: (Lsh8x16 (Const8 [0]) _) 4871 // cond: 4872 // result: (Const8 [0]) 4873 for { 4874 v_0 := v.Args[0] 4875 if v_0.Op != OpConst8 { 4876 break 4877 } 4878 if v_0.AuxInt != 0 { 4879 break 4880 } 4881 v.reset(OpConst8) 4882 v.AuxInt = 0 4883 return true 4884 } 4885 return false 4886 } 4887 func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool { 4888 b := v.Block 4889 _ = b 4890 // match: (Lsh8x32 <t> x (Const32 [c])) 4891 // cond: 4892 // result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))])) 4893 for { 4894 t := v.Type 4895 x := v.Args[0] 4896 v_1 := v.Args[1] 4897 if v_1.Op != OpConst32 { 4898 break 4899 } 4900 c := v_1.AuxInt 4901 v.reset(OpLsh8x64) 4902 v.AddArg(x) 4903 v0 := b.NewValue0(v.Line, OpConst64, t) 4904 v0.AuxInt = int64(uint32(c)) 4905 v.AddArg(v0) 4906 return true 4907 } 4908 // match: (Lsh8x32 (Const8 [0]) _) 4909 // cond: 4910 // result: (Const8 [0]) 4911 for { 4912 v_0 := v.Args[0] 4913 if v_0.Op != OpConst8 { 4914 break 4915 } 4916 if v_0.AuxInt != 0 { 4917 break 4918 } 4919 v.reset(OpConst8) 4920 v.AuxInt = 0 4921 return true 4922 } 4923 return false 4924 } 4925 func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { 4926 b := v.Block 4927 _ = b 4928 // match: (Lsh8x64 (Const8 [c]) (Const64 [d])) 4929 // cond: 4930 // result: (Const8 [int64(int8(c) << uint64(d))]) 4931 for { 4932 v_0 := v.Args[0] 4933 if v_0.Op != OpConst8 { 4934 break 4935 } 4936 c := v_0.AuxInt 4937 v_1 := v.Args[1] 4938 if v_1.Op != OpConst64 { 4939 break 4940 } 4941 d := v_1.AuxInt 4942 v.reset(OpConst8) 4943 v.AuxInt = int64(int8(c) << uint64(d)) 4944 return true 4945 } 4946 // match: (Lsh8x64 x (Const64 [0])) 4947 // cond: 4948 // result: x 4949 for { 4950 x := v.Args[0] 4951 v_1 := v.Args[1] 4952 if v_1.Op != OpConst64 { 4953 break 4954 } 4955 if v_1.AuxInt != 0 { 4956 break 4957 } 4958 v.reset(OpCopy) 4959 v.Type = x.Type 4960 v.AddArg(x) 4961 return true 4962 } 4963 // match: (Lsh8x64 (Const8 [0]) _) 4964 // cond: 4965 // result: (Const8 [0]) 4966 for { 4967 v_0 := v.Args[0] 4968 if v_0.Op != OpConst8 { 4969 break 4970 } 4971 if v_0.AuxInt != 0 { 4972 break 4973 } 4974 v.reset(OpConst8) 4975 v.AuxInt = 0 4976 return true 4977 } 4978 // match: (Lsh8x64 _ (Const64 [c])) 4979 // cond: uint64(c) >= 8 4980 // result: (Const8 [0]) 4981 for { 4982 v_1 := v.Args[1] 4983 if v_1.Op != OpConst64 { 4984 break 4985 } 4986 c := v_1.AuxInt 4987 if !(uint64(c) >= 8) { 4988 break 4989 } 4990 v.reset(OpConst8) 4991 v.AuxInt = 0 4992 return true 4993 } 4994 // match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d])) 4995 // cond: !uaddOvf(c,d) 4996 // result: (Lsh8x64 x (Const64 <t> [c+d])) 4997 for { 4998 t := v.Type 4999 v_0 := v.Args[0] 5000 if v_0.Op != OpLsh8x64 { 5001 break 5002 } 5003 x := v_0.Args[0] 5004 v_0_1 := v_0.Args[1] 5005 if v_0_1.Op != OpConst64 { 5006 break 5007 } 5008 c := v_0_1.AuxInt 5009 v_1 := v.Args[1] 5010 if v_1.Op != OpConst64 { 5011 break 5012 } 5013 d := v_1.AuxInt 5014 if !(!uaddOvf(c, d)) { 5015 break 5016 } 5017 v.reset(OpLsh8x64) 5018 v.AddArg(x) 5019 v0 := b.NewValue0(v.Line, OpConst64, t) 5020 v0.AuxInt = c + d 5021 v.AddArg(v0) 5022 return true 5023 } 5024 // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 5025 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 5026 // result: (Lsh8x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 5027 for { 5028 v_0 := v.Args[0] 5029 if v_0.Op != OpRsh8Ux64 { 5030 break 5031 } 5032 v_0_0 := v_0.Args[0] 5033 if v_0_0.Op != OpLsh8x64 { 5034 break 5035 } 5036 x := v_0_0.Args[0] 5037 v_0_0_1 := v_0_0.Args[1] 5038 if v_0_0_1.Op != OpConst64 { 5039 break 5040 } 5041 c1 := v_0_0_1.AuxInt 5042 v_0_1 := v_0.Args[1] 5043 if v_0_1.Op != OpConst64 { 5044 break 5045 } 5046 c2 := v_0_1.AuxInt 5047 v_1 := v.Args[1] 5048 if v_1.Op != OpConst64 { 5049 break 5050 } 5051 c3 := v_1.AuxInt 5052 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 5053 break 5054 } 5055 v.reset(OpLsh8x64) 5056 v.AddArg(x) 5057 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 5058 v0.AuxInt = c1 - c2 + c3 5059 v.AddArg(v0) 5060 return true 5061 } 5062 return false 5063 } 5064 func rewriteValuegeneric_OpLsh8x8(v *Value, config *Config) bool { 5065 b := v.Block 5066 _ = b 5067 // match: (Lsh8x8 <t> x (Const8 [c])) 5068 // cond: 5069 // result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))])) 5070 for { 5071 t := v.Type 5072 x := v.Args[0] 5073 v_1 := v.Args[1] 5074 if v_1.Op != OpConst8 { 5075 break 5076 } 5077 c := v_1.AuxInt 5078 v.reset(OpLsh8x64) 5079 v.AddArg(x) 5080 v0 := b.NewValue0(v.Line, OpConst64, t) 5081 v0.AuxInt = int64(uint8(c)) 5082 v.AddArg(v0) 5083 return true 5084 } 5085 // match: (Lsh8x8 (Const8 [0]) _) 5086 // cond: 5087 // result: (Const8 [0]) 5088 for { 5089 v_0 := v.Args[0] 5090 if v_0.Op != OpConst8 { 5091 break 5092 } 5093 if v_0.AuxInt != 0 { 5094 break 5095 } 5096 v.reset(OpConst8) 5097 v.AuxInt = 0 5098 return true 5099 } 5100 return false 5101 } 5102 func rewriteValuegeneric_OpMod16(v *Value, config *Config) bool { 5103 b := v.Block 5104 _ = b 5105 // match: (Mod16 (Const16 [c]) (Const16 [d])) 5106 // cond: d != 0 5107 // result: (Const16 [int64(int16(c % d))]) 5108 for { 5109 v_0 := v.Args[0] 5110 if v_0.Op != OpConst16 { 5111 break 5112 } 5113 c := v_0.AuxInt 5114 v_1 := v.Args[1] 5115 if v_1.Op != OpConst16 { 5116 break 5117 } 5118 d := v_1.AuxInt 5119 if !(d != 0) { 5120 break 5121 } 5122 v.reset(OpConst16) 5123 v.AuxInt = int64(int16(c % d)) 5124 return true 5125 } 5126 return false 5127 } 5128 func rewriteValuegeneric_OpMod16u(v *Value, config *Config) bool { 5129 b := v.Block 5130 _ = b 5131 // match: (Mod16u (Const16 [c]) (Const16 [d])) 5132 // cond: d != 0 5133 // result: (Const16 [int64(uint16(c) % uint16(d))]) 5134 for { 5135 v_0 := v.Args[0] 5136 if v_0.Op != OpConst16 { 5137 break 5138 } 5139 c := v_0.AuxInt 5140 v_1 := v.Args[1] 5141 if v_1.Op != OpConst16 { 5142 break 5143 } 5144 d := v_1.AuxInt 5145 if !(d != 0) { 5146 break 5147 } 5148 v.reset(OpConst16) 5149 v.AuxInt = int64(uint16(c) % uint16(d)) 5150 return true 5151 } 5152 return false 5153 } 5154 func rewriteValuegeneric_OpMod32(v *Value, config *Config) bool { 5155 b := v.Block 5156 _ = b 5157 // match: (Mod32 (Const32 [c]) (Const32 [d])) 5158 // cond: d != 0 5159 // result: (Const32 [int64(int32(c % d))]) 5160 for { 5161 v_0 := v.Args[0] 5162 if v_0.Op != OpConst32 { 5163 break 5164 } 5165 c := v_0.AuxInt 5166 v_1 := v.Args[1] 5167 if v_1.Op != OpConst32 { 5168 break 5169 } 5170 d := v_1.AuxInt 5171 if !(d != 0) { 5172 break 5173 } 5174 v.reset(OpConst32) 5175 v.AuxInt = int64(int32(c % d)) 5176 return true 5177 } 5178 return false 5179 } 5180 func rewriteValuegeneric_OpMod32u(v *Value, config *Config) bool { 5181 b := v.Block 5182 _ = b 5183 // match: (Mod32u (Const32 [c]) (Const32 [d])) 5184 // cond: d != 0 5185 // result: (Const32 [int64(uint32(c) % uint32(d))]) 5186 for { 5187 v_0 := v.Args[0] 5188 if v_0.Op != OpConst32 { 5189 break 5190 } 5191 c := v_0.AuxInt 5192 v_1 := v.Args[1] 5193 if v_1.Op != OpConst32 { 5194 break 5195 } 5196 d := v_1.AuxInt 5197 if !(d != 0) { 5198 break 5199 } 5200 v.reset(OpConst32) 5201 v.AuxInt = int64(uint32(c) % uint32(d)) 5202 return true 5203 } 5204 return false 5205 } 5206 func rewriteValuegeneric_OpMod64(v *Value, config *Config) bool { 5207 b := v.Block 5208 _ = b 5209 // match: (Mod64 (Const64 [c]) (Const64 [d])) 5210 // cond: d != 0 5211 // result: (Const64 [c % d]) 5212 for { 5213 v_0 := v.Args[0] 5214 if v_0.Op != OpConst64 { 5215 break 5216 } 5217 c := v_0.AuxInt 5218 v_1 := v.Args[1] 5219 if v_1.Op != OpConst64 { 5220 break 5221 } 5222 d := v_1.AuxInt 5223 if !(d != 0) { 5224 break 5225 } 5226 v.reset(OpConst64) 5227 v.AuxInt = c % d 5228 return true 5229 } 5230 // match: (Mod64 <t> x (Const64 [c])) 5231 // cond: x.Op != OpConst64 && smagic64ok(c) 5232 // result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 5233 for { 5234 t := v.Type 5235 x := v.Args[0] 5236 v_1 := v.Args[1] 5237 if v_1.Op != OpConst64 { 5238 break 5239 } 5240 c := v_1.AuxInt 5241 if !(x.Op != OpConst64 && smagic64ok(c)) { 5242 break 5243 } 5244 v.reset(OpSub64) 5245 v.AddArg(x) 5246 v0 := b.NewValue0(v.Line, OpMul64, t) 5247 v1 := b.NewValue0(v.Line, OpDiv64, t) 5248 v1.AddArg(x) 5249 v2 := b.NewValue0(v.Line, OpConst64, t) 5250 v2.AuxInt = c 5251 v1.AddArg(v2) 5252 v0.AddArg(v1) 5253 v3 := b.NewValue0(v.Line, OpConst64, t) 5254 v3.AuxInt = c 5255 v0.AddArg(v3) 5256 v.AddArg(v0) 5257 return true 5258 } 5259 return false 5260 } 5261 func rewriteValuegeneric_OpMod64u(v *Value, config *Config) bool { 5262 b := v.Block 5263 _ = b 5264 // match: (Mod64u (Const64 [c]) (Const64 [d])) 5265 // cond: d != 0 5266 // result: (Const64 [int64(uint64(c) % uint64(d))]) 5267 for { 5268 v_0 := v.Args[0] 5269 if v_0.Op != OpConst64 { 5270 break 5271 } 5272 c := v_0.AuxInt 5273 v_1 := v.Args[1] 5274 if v_1.Op != OpConst64 { 5275 break 5276 } 5277 d := v_1.AuxInt 5278 if !(d != 0) { 5279 break 5280 } 5281 v.reset(OpConst64) 5282 v.AuxInt = int64(uint64(c) % uint64(d)) 5283 return true 5284 } 5285 // match: (Mod64u <t> n (Const64 [c])) 5286 // cond: isPowerOfTwo(c) 5287 // result: (And64 n (Const64 <t> [c-1])) 5288 for { 5289 t := v.Type 5290 n := v.Args[0] 5291 v_1 := v.Args[1] 5292 if v_1.Op != OpConst64 { 5293 break 5294 } 5295 c := v_1.AuxInt 5296 if !(isPowerOfTwo(c)) { 5297 break 5298 } 5299 v.reset(OpAnd64) 5300 v.AddArg(n) 5301 v0 := b.NewValue0(v.Line, OpConst64, t) 5302 v0.AuxInt = c - 1 5303 v.AddArg(v0) 5304 return true 5305 } 5306 // match: (Mod64u <t> x (Const64 [c])) 5307 // cond: x.Op != OpConst64 && umagic64ok(c) 5308 // result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 5309 for { 5310 t := v.Type 5311 x := v.Args[0] 5312 v_1 := v.Args[1] 5313 if v_1.Op != OpConst64 { 5314 break 5315 } 5316 c := v_1.AuxInt 5317 if !(x.Op != OpConst64 && umagic64ok(c)) { 5318 break 5319 } 5320 v.reset(OpSub64) 5321 v.AddArg(x) 5322 v0 := b.NewValue0(v.Line, OpMul64, t) 5323 v1 := b.NewValue0(v.Line, OpDiv64u, t) 5324 v1.AddArg(x) 5325 v2 := b.NewValue0(v.Line, OpConst64, t) 5326 v2.AuxInt = c 5327 v1.AddArg(v2) 5328 v0.AddArg(v1) 5329 v3 := b.NewValue0(v.Line, OpConst64, t) 5330 v3.AuxInt = c 5331 v0.AddArg(v3) 5332 v.AddArg(v0) 5333 return true 5334 } 5335 return false 5336 } 5337 func rewriteValuegeneric_OpMod8(v *Value, config *Config) bool { 5338 b := v.Block 5339 _ = b 5340 // match: (Mod8 (Const8 [c]) (Const8 [d])) 5341 // cond: d != 0 5342 // result: (Const8 [int64(int8(c % d))]) 5343 for { 5344 v_0 := v.Args[0] 5345 if v_0.Op != OpConst8 { 5346 break 5347 } 5348 c := v_0.AuxInt 5349 v_1 := v.Args[1] 5350 if v_1.Op != OpConst8 { 5351 break 5352 } 5353 d := v_1.AuxInt 5354 if !(d != 0) { 5355 break 5356 } 5357 v.reset(OpConst8) 5358 v.AuxInt = int64(int8(c % d)) 5359 return true 5360 } 5361 return false 5362 } 5363 func rewriteValuegeneric_OpMod8u(v *Value, config *Config) bool { 5364 b := v.Block 5365 _ = b 5366 // match: (Mod8u (Const8 [c]) (Const8 [d])) 5367 // cond: d != 0 5368 // result: (Const8 [int64(uint8(c) % uint8(d))]) 5369 for { 5370 v_0 := v.Args[0] 5371 if v_0.Op != OpConst8 { 5372 break 5373 } 5374 c := v_0.AuxInt 5375 v_1 := v.Args[1] 5376 if v_1.Op != OpConst8 { 5377 break 5378 } 5379 d := v_1.AuxInt 5380 if !(d != 0) { 5381 break 5382 } 5383 v.reset(OpConst8) 5384 v.AuxInt = int64(uint8(c) % uint8(d)) 5385 return true 5386 } 5387 return false 5388 } 5389 func rewriteValuegeneric_OpMul16(v *Value, config *Config) bool { 5390 b := v.Block 5391 _ = b 5392 // match: (Mul16 (Const16 [c]) (Const16 [d])) 5393 // cond: 5394 // result: (Const16 [int64(int16(c*d))]) 5395 for { 5396 v_0 := v.Args[0] 5397 if v_0.Op != OpConst16 { 5398 break 5399 } 5400 c := v_0.AuxInt 5401 v_1 := v.Args[1] 5402 if v_1.Op != OpConst16 { 5403 break 5404 } 5405 d := v_1.AuxInt 5406 v.reset(OpConst16) 5407 v.AuxInt = int64(int16(c * d)) 5408 return true 5409 } 5410 // match: (Mul16 (Const16 [-1]) x) 5411 // cond: 5412 // result: (Neg16 x) 5413 for { 5414 v_0 := v.Args[0] 5415 if v_0.Op != OpConst16 { 5416 break 5417 } 5418 if v_0.AuxInt != -1 { 5419 break 5420 } 5421 x := v.Args[1] 5422 v.reset(OpNeg16) 5423 v.AddArg(x) 5424 return true 5425 } 5426 // match: (Mul16 x (Const16 <t> [c])) 5427 // cond: x.Op != OpConst16 5428 // result: (Mul16 (Const16 <t> [c]) x) 5429 for { 5430 x := v.Args[0] 5431 v_1 := v.Args[1] 5432 if v_1.Op != OpConst16 { 5433 break 5434 } 5435 t := v_1.Type 5436 c := v_1.AuxInt 5437 if !(x.Op != OpConst16) { 5438 break 5439 } 5440 v.reset(OpMul16) 5441 v0 := b.NewValue0(v.Line, OpConst16, t) 5442 v0.AuxInt = c 5443 v.AddArg(v0) 5444 v.AddArg(x) 5445 return true 5446 } 5447 // match: (Mul16 (Const16 [0]) _) 5448 // cond: 5449 // result: (Const16 [0]) 5450 for { 5451 v_0 := v.Args[0] 5452 if v_0.Op != OpConst16 { 5453 break 5454 } 5455 if v_0.AuxInt != 0 { 5456 break 5457 } 5458 v.reset(OpConst16) 5459 v.AuxInt = 0 5460 return true 5461 } 5462 return false 5463 } 5464 func rewriteValuegeneric_OpMul32(v *Value, config *Config) bool { 5465 b := v.Block 5466 _ = b 5467 // match: (Mul32 (Const32 [c]) (Const32 [d])) 5468 // cond: 5469 // result: (Const32 [int64(int32(c*d))]) 5470 for { 5471 v_0 := v.Args[0] 5472 if v_0.Op != OpConst32 { 5473 break 5474 } 5475 c := v_0.AuxInt 5476 v_1 := v.Args[1] 5477 if v_1.Op != OpConst32 { 5478 break 5479 } 5480 d := v_1.AuxInt 5481 v.reset(OpConst32) 5482 v.AuxInt = int64(int32(c * d)) 5483 return true 5484 } 5485 // match: (Mul32 (Const32 [-1]) x) 5486 // cond: 5487 // result: (Neg32 x) 5488 for { 5489 v_0 := v.Args[0] 5490 if v_0.Op != OpConst32 { 5491 break 5492 } 5493 if v_0.AuxInt != -1 { 5494 break 5495 } 5496 x := v.Args[1] 5497 v.reset(OpNeg32) 5498 v.AddArg(x) 5499 return true 5500 } 5501 // match: (Mul32 x (Const32 <t> [c])) 5502 // cond: x.Op != OpConst32 5503 // result: (Mul32 (Const32 <t> [c]) x) 5504 for { 5505 x := v.Args[0] 5506 v_1 := v.Args[1] 5507 if v_1.Op != OpConst32 { 5508 break 5509 } 5510 t := v_1.Type 5511 c := v_1.AuxInt 5512 if !(x.Op != OpConst32) { 5513 break 5514 } 5515 v.reset(OpMul32) 5516 v0 := b.NewValue0(v.Line, OpConst32, t) 5517 v0.AuxInt = c 5518 v.AddArg(v0) 5519 v.AddArg(x) 5520 return true 5521 } 5522 // match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x)) 5523 // cond: 5524 // result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x)) 5525 for { 5526 v_0 := v.Args[0] 5527 if v_0.Op != OpConst32 { 5528 break 5529 } 5530 t := v_0.Type 5531 c := v_0.AuxInt 5532 v_1 := v.Args[1] 5533 if v_1.Op != OpAdd32 { 5534 break 5535 } 5536 if v_1.Type != t { 5537 break 5538 } 5539 v_1_0 := v_1.Args[0] 5540 if v_1_0.Op != OpConst32 { 5541 break 5542 } 5543 if v_1_0.Type != t { 5544 break 5545 } 5546 d := v_1_0.AuxInt 5547 x := v_1.Args[1] 5548 v.reset(OpAdd32) 5549 v0 := b.NewValue0(v.Line, OpConst32, t) 5550 v0.AuxInt = int64(int32(c * d)) 5551 v.AddArg(v0) 5552 v1 := b.NewValue0(v.Line, OpMul32, t) 5553 v2 := b.NewValue0(v.Line, OpConst32, t) 5554 v2.AuxInt = c 5555 v1.AddArg(v2) 5556 v1.AddArg(x) 5557 v.AddArg(v1) 5558 return true 5559 } 5560 // match: (Mul32 (Const32 [0]) _) 5561 // cond: 5562 // result: (Const32 [0]) 5563 for { 5564 v_0 := v.Args[0] 5565 if v_0.Op != OpConst32 { 5566 break 5567 } 5568 if v_0.AuxInt != 0 { 5569 break 5570 } 5571 v.reset(OpConst32) 5572 v.AuxInt = 0 5573 return true 5574 } 5575 return false 5576 } 5577 func rewriteValuegeneric_OpMul32F(v *Value, config *Config) bool { 5578 b := v.Block 5579 _ = b 5580 // match: (Mul32F (Const32F [c]) (Const32F [d])) 5581 // cond: 5582 // result: (Const32F [f2i(float64(i2f32(c) * i2f32(d)))]) 5583 for { 5584 v_0 := v.Args[0] 5585 if v_0.Op != OpConst32F { 5586 break 5587 } 5588 c := v_0.AuxInt 5589 v_1 := v.Args[1] 5590 if v_1.Op != OpConst32F { 5591 break 5592 } 5593 d := v_1.AuxInt 5594 v.reset(OpConst32F) 5595 v.AuxInt = f2i(float64(i2f32(c) * i2f32(d))) 5596 return true 5597 } 5598 // match: (Mul32F x (Const32F [f2i(1)])) 5599 // cond: 5600 // result: x 5601 for { 5602 x := v.Args[0] 5603 v_1 := v.Args[1] 5604 if v_1.Op != OpConst32F { 5605 break 5606 } 5607 if v_1.AuxInt != f2i(1) { 5608 break 5609 } 5610 v.reset(OpCopy) 5611 v.Type = x.Type 5612 v.AddArg(x) 5613 return true 5614 } 5615 // match: (Mul32F (Const32F [f2i(1)]) x) 5616 // cond: 5617 // result: x 5618 for { 5619 v_0 := v.Args[0] 5620 if v_0.Op != OpConst32F { 5621 break 5622 } 5623 if v_0.AuxInt != f2i(1) { 5624 break 5625 } 5626 x := v.Args[1] 5627 v.reset(OpCopy) 5628 v.Type = x.Type 5629 v.AddArg(x) 5630 return true 5631 } 5632 // match: (Mul32F x (Const32F [f2i(-1)])) 5633 // cond: 5634 // result: (Neg32F x) 5635 for { 5636 x := v.Args[0] 5637 v_1 := v.Args[1] 5638 if v_1.Op != OpConst32F { 5639 break 5640 } 5641 if v_1.AuxInt != f2i(-1) { 5642 break 5643 } 5644 v.reset(OpNeg32F) 5645 v.AddArg(x) 5646 return true 5647 } 5648 // match: (Mul32F (Const32F [f2i(-1)]) x) 5649 // cond: 5650 // result: (Neg32F x) 5651 for { 5652 v_0 := v.Args[0] 5653 if v_0.Op != OpConst32F { 5654 break 5655 } 5656 if v_0.AuxInt != f2i(-1) { 5657 break 5658 } 5659 x := v.Args[1] 5660 v.reset(OpNeg32F) 5661 v.AddArg(x) 5662 return true 5663 } 5664 return false 5665 } 5666 func rewriteValuegeneric_OpMul64(v *Value, config *Config) bool { 5667 b := v.Block 5668 _ = b 5669 // match: (Mul64 (Const64 [c]) (Const64 [d])) 5670 // cond: 5671 // result: (Const64 [c*d]) 5672 for { 5673 v_0 := v.Args[0] 5674 if v_0.Op != OpConst64 { 5675 break 5676 } 5677 c := v_0.AuxInt 5678 v_1 := v.Args[1] 5679 if v_1.Op != OpConst64 { 5680 break 5681 } 5682 d := v_1.AuxInt 5683 v.reset(OpConst64) 5684 v.AuxInt = c * d 5685 return true 5686 } 5687 // match: (Mul64 (Const64 [-1]) x) 5688 // cond: 5689 // result: (Neg64 x) 5690 for { 5691 v_0 := v.Args[0] 5692 if v_0.Op != OpConst64 { 5693 break 5694 } 5695 if v_0.AuxInt != -1 { 5696 break 5697 } 5698 x := v.Args[1] 5699 v.reset(OpNeg64) 5700 v.AddArg(x) 5701 return true 5702 } 5703 // match: (Mul64 x (Const64 <t> [c])) 5704 // cond: x.Op != OpConst64 5705 // result: (Mul64 (Const64 <t> [c]) x) 5706 for { 5707 x := v.Args[0] 5708 v_1 := v.Args[1] 5709 if v_1.Op != OpConst64 { 5710 break 5711 } 5712 t := v_1.Type 5713 c := v_1.AuxInt 5714 if !(x.Op != OpConst64) { 5715 break 5716 } 5717 v.reset(OpMul64) 5718 v0 := b.NewValue0(v.Line, OpConst64, t) 5719 v0.AuxInt = c 5720 v.AddArg(v0) 5721 v.AddArg(x) 5722 return true 5723 } 5724 // match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) 5725 // cond: 5726 // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) 5727 for { 5728 v_0 := v.Args[0] 5729 if v_0.Op != OpConst64 { 5730 break 5731 } 5732 t := v_0.Type 5733 c := v_0.AuxInt 5734 v_1 := v.Args[1] 5735 if v_1.Op != OpAdd64 { 5736 break 5737 } 5738 if v_1.Type != t { 5739 break 5740 } 5741 v_1_0 := v_1.Args[0] 5742 if v_1_0.Op != OpConst64 { 5743 break 5744 } 5745 if v_1_0.Type != t { 5746 break 5747 } 5748 d := v_1_0.AuxInt 5749 x := v_1.Args[1] 5750 v.reset(OpAdd64) 5751 v0 := b.NewValue0(v.Line, OpConst64, t) 5752 v0.AuxInt = c * d 5753 v.AddArg(v0) 5754 v1 := b.NewValue0(v.Line, OpMul64, t) 5755 v2 := b.NewValue0(v.Line, OpConst64, t) 5756 v2.AuxInt = c 5757 v1.AddArg(v2) 5758 v1.AddArg(x) 5759 v.AddArg(v1) 5760 return true 5761 } 5762 // match: (Mul64 (Const64 [0]) _) 5763 // cond: 5764 // result: (Const64 [0]) 5765 for { 5766 v_0 := v.Args[0] 5767 if v_0.Op != OpConst64 { 5768 break 5769 } 5770 if v_0.AuxInt != 0 { 5771 break 5772 } 5773 v.reset(OpConst64) 5774 v.AuxInt = 0 5775 return true 5776 } 5777 return false 5778 } 5779 func rewriteValuegeneric_OpMul64F(v *Value, config *Config) bool { 5780 b := v.Block 5781 _ = b 5782 // match: (Mul64F (Const64F [c]) (Const64F [d])) 5783 // cond: 5784 // result: (Const64F [f2i(i2f(c) * i2f(d))]) 5785 for { 5786 v_0 := v.Args[0] 5787 if v_0.Op != OpConst64F { 5788 break 5789 } 5790 c := v_0.AuxInt 5791 v_1 := v.Args[1] 5792 if v_1.Op != OpConst64F { 5793 break 5794 } 5795 d := v_1.AuxInt 5796 v.reset(OpConst64F) 5797 v.AuxInt = f2i(i2f(c) * i2f(d)) 5798 return true 5799 } 5800 // match: (Mul64F x (Const64F [f2i(1)])) 5801 // cond: 5802 // result: x 5803 for { 5804 x := v.Args[0] 5805 v_1 := v.Args[1] 5806 if v_1.Op != OpConst64F { 5807 break 5808 } 5809 if v_1.AuxInt != f2i(1) { 5810 break 5811 } 5812 v.reset(OpCopy) 5813 v.Type = x.Type 5814 v.AddArg(x) 5815 return true 5816 } 5817 // match: (Mul64F (Const64F [f2i(1)]) x) 5818 // cond: 5819 // result: x 5820 for { 5821 v_0 := v.Args[0] 5822 if v_0.Op != OpConst64F { 5823 break 5824 } 5825 if v_0.AuxInt != f2i(1) { 5826 break 5827 } 5828 x := v.Args[1] 5829 v.reset(OpCopy) 5830 v.Type = x.Type 5831 v.AddArg(x) 5832 return true 5833 } 5834 // match: (Mul64F x (Const64F [f2i(-1)])) 5835 // cond: 5836 // result: (Neg64F x) 5837 for { 5838 x := v.Args[0] 5839 v_1 := v.Args[1] 5840 if v_1.Op != OpConst64F { 5841 break 5842 } 5843 if v_1.AuxInt != f2i(-1) { 5844 break 5845 } 5846 v.reset(OpNeg64F) 5847 v.AddArg(x) 5848 return true 5849 } 5850 // match: (Mul64F (Const64F [f2i(-1)]) x) 5851 // cond: 5852 // result: (Neg64F x) 5853 for { 5854 v_0 := v.Args[0] 5855 if v_0.Op != OpConst64F { 5856 break 5857 } 5858 if v_0.AuxInt != f2i(-1) { 5859 break 5860 } 5861 x := v.Args[1] 5862 v.reset(OpNeg64F) 5863 v.AddArg(x) 5864 return true 5865 } 5866 return false 5867 } 5868 func rewriteValuegeneric_OpMul8(v *Value, config *Config) bool { 5869 b := v.Block 5870 _ = b 5871 // match: (Mul8 (Const8 [c]) (Const8 [d])) 5872 // cond: 5873 // result: (Const8 [int64(int8(c*d))]) 5874 for { 5875 v_0 := v.Args[0] 5876 if v_0.Op != OpConst8 { 5877 break 5878 } 5879 c := v_0.AuxInt 5880 v_1 := v.Args[1] 5881 if v_1.Op != OpConst8 { 5882 break 5883 } 5884 d := v_1.AuxInt 5885 v.reset(OpConst8) 5886 v.AuxInt = int64(int8(c * d)) 5887 return true 5888 } 5889 // match: (Mul8 (Const8 [-1]) x) 5890 // cond: 5891 // result: (Neg8 x) 5892 for { 5893 v_0 := v.Args[0] 5894 if v_0.Op != OpConst8 { 5895 break 5896 } 5897 if v_0.AuxInt != -1 { 5898 break 5899 } 5900 x := v.Args[1] 5901 v.reset(OpNeg8) 5902 v.AddArg(x) 5903 return true 5904 } 5905 // match: (Mul8 x (Const8 <t> [c])) 5906 // cond: x.Op != OpConst8 5907 // result: (Mul8 (Const8 <t> [c]) x) 5908 for { 5909 x := v.Args[0] 5910 v_1 := v.Args[1] 5911 if v_1.Op != OpConst8 { 5912 break 5913 } 5914 t := v_1.Type 5915 c := v_1.AuxInt 5916 if !(x.Op != OpConst8) { 5917 break 5918 } 5919 v.reset(OpMul8) 5920 v0 := b.NewValue0(v.Line, OpConst8, t) 5921 v0.AuxInt = c 5922 v.AddArg(v0) 5923 v.AddArg(x) 5924 return true 5925 } 5926 // match: (Mul8 (Const8 [0]) _) 5927 // cond: 5928 // result: (Const8 [0]) 5929 for { 5930 v_0 := v.Args[0] 5931 if v_0.Op != OpConst8 { 5932 break 5933 } 5934 if v_0.AuxInt != 0 { 5935 break 5936 } 5937 v.reset(OpConst8) 5938 v.AuxInt = 0 5939 return true 5940 } 5941 return false 5942 } 5943 func rewriteValuegeneric_OpNeg16(v *Value, config *Config) bool { 5944 b := v.Block 5945 _ = b 5946 // match: (Neg16 (Sub16 x y)) 5947 // cond: 5948 // result: (Sub16 y x) 5949 for { 5950 v_0 := v.Args[0] 5951 if v_0.Op != OpSub16 { 5952 break 5953 } 5954 x := v_0.Args[0] 5955 y := v_0.Args[1] 5956 v.reset(OpSub16) 5957 v.AddArg(y) 5958 v.AddArg(x) 5959 return true 5960 } 5961 return false 5962 } 5963 func rewriteValuegeneric_OpNeg32(v *Value, config *Config) bool { 5964 b := v.Block 5965 _ = b 5966 // match: (Neg32 (Sub32 x y)) 5967 // cond: 5968 // result: (Sub32 y x) 5969 for { 5970 v_0 := v.Args[0] 5971 if v_0.Op != OpSub32 { 5972 break 5973 } 5974 x := v_0.Args[0] 5975 y := v_0.Args[1] 5976 v.reset(OpSub32) 5977 v.AddArg(y) 5978 v.AddArg(x) 5979 return true 5980 } 5981 return false 5982 } 5983 func rewriteValuegeneric_OpNeg64(v *Value, config *Config) bool { 5984 b := v.Block 5985 _ = b 5986 // match: (Neg64 (Sub64 x y)) 5987 // cond: 5988 // result: (Sub64 y x) 5989 for { 5990 v_0 := v.Args[0] 5991 if v_0.Op != OpSub64 { 5992 break 5993 } 5994 x := v_0.Args[0] 5995 y := v_0.Args[1] 5996 v.reset(OpSub64) 5997 v.AddArg(y) 5998 v.AddArg(x) 5999 return true 6000 } 6001 return false 6002 } 6003 func rewriteValuegeneric_OpNeg8(v *Value, config *Config) bool { 6004 b := v.Block 6005 _ = b 6006 // match: (Neg8 (Sub8 x y)) 6007 // cond: 6008 // result: (Sub8 y x) 6009 for { 6010 v_0 := v.Args[0] 6011 if v_0.Op != OpSub8 { 6012 break 6013 } 6014 x := v_0.Args[0] 6015 y := v_0.Args[1] 6016 v.reset(OpSub8) 6017 v.AddArg(y) 6018 v.AddArg(x) 6019 return true 6020 } 6021 return false 6022 } 6023 func rewriteValuegeneric_OpNeq16(v *Value, config *Config) bool { 6024 b := v.Block 6025 _ = b 6026 // match: (Neq16 x x) 6027 // cond: 6028 // result: (ConstBool [0]) 6029 for { 6030 x := v.Args[0] 6031 if x != v.Args[1] { 6032 break 6033 } 6034 v.reset(OpConstBool) 6035 v.AuxInt = 0 6036 return true 6037 } 6038 // match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 6039 // cond: 6040 // result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x) 6041 for { 6042 v_0 := v.Args[0] 6043 if v_0.Op != OpConst16 { 6044 break 6045 } 6046 t := v_0.Type 6047 c := v_0.AuxInt 6048 v_1 := v.Args[1] 6049 if v_1.Op != OpAdd16 { 6050 break 6051 } 6052 v_1_0 := v_1.Args[0] 6053 if v_1_0.Op != OpConst16 { 6054 break 6055 } 6056 if v_1_0.Type != t { 6057 break 6058 } 6059 d := v_1_0.AuxInt 6060 x := v_1.Args[1] 6061 v.reset(OpNeq16) 6062 v0 := b.NewValue0(v.Line, OpConst16, t) 6063 v0.AuxInt = int64(int16(c - d)) 6064 v.AddArg(v0) 6065 v.AddArg(x) 6066 return true 6067 } 6068 // match: (Neq16 x (Const16 <t> [c])) 6069 // cond: x.Op != OpConst16 6070 // result: (Neq16 (Const16 <t> [c]) x) 6071 for { 6072 x := v.Args[0] 6073 v_1 := v.Args[1] 6074 if v_1.Op != OpConst16 { 6075 break 6076 } 6077 t := v_1.Type 6078 c := v_1.AuxInt 6079 if !(x.Op != OpConst16) { 6080 break 6081 } 6082 v.reset(OpNeq16) 6083 v0 := b.NewValue0(v.Line, OpConst16, t) 6084 v0.AuxInt = c 6085 v.AddArg(v0) 6086 v.AddArg(x) 6087 return true 6088 } 6089 // match: (Neq16 (Const16 [c]) (Const16 [d])) 6090 // cond: 6091 // result: (ConstBool [b2i(c != d)]) 6092 for { 6093 v_0 := v.Args[0] 6094 if v_0.Op != OpConst16 { 6095 break 6096 } 6097 c := v_0.AuxInt 6098 v_1 := v.Args[1] 6099 if v_1.Op != OpConst16 { 6100 break 6101 } 6102 d := v_1.AuxInt 6103 v.reset(OpConstBool) 6104 v.AuxInt = b2i(c != d) 6105 return true 6106 } 6107 return false 6108 } 6109 func rewriteValuegeneric_OpNeq32(v *Value, config *Config) bool { 6110 b := v.Block 6111 _ = b 6112 // match: (Neq32 x x) 6113 // cond: 6114 // result: (ConstBool [0]) 6115 for { 6116 x := v.Args[0] 6117 if x != v.Args[1] { 6118 break 6119 } 6120 v.reset(OpConstBool) 6121 v.AuxInt = 0 6122 return true 6123 } 6124 // match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 6125 // cond: 6126 // result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x) 6127 for { 6128 v_0 := v.Args[0] 6129 if v_0.Op != OpConst32 { 6130 break 6131 } 6132 t := v_0.Type 6133 c := v_0.AuxInt 6134 v_1 := v.Args[1] 6135 if v_1.Op != OpAdd32 { 6136 break 6137 } 6138 v_1_0 := v_1.Args[0] 6139 if v_1_0.Op != OpConst32 { 6140 break 6141 } 6142 if v_1_0.Type != t { 6143 break 6144 } 6145 d := v_1_0.AuxInt 6146 x := v_1.Args[1] 6147 v.reset(OpNeq32) 6148 v0 := b.NewValue0(v.Line, OpConst32, t) 6149 v0.AuxInt = int64(int32(c - d)) 6150 v.AddArg(v0) 6151 v.AddArg(x) 6152 return true 6153 } 6154 // match: (Neq32 x (Const32 <t> [c])) 6155 // cond: x.Op != OpConst32 6156 // result: (Neq32 (Const32 <t> [c]) x) 6157 for { 6158 x := v.Args[0] 6159 v_1 := v.Args[1] 6160 if v_1.Op != OpConst32 { 6161 break 6162 } 6163 t := v_1.Type 6164 c := v_1.AuxInt 6165 if !(x.Op != OpConst32) { 6166 break 6167 } 6168 v.reset(OpNeq32) 6169 v0 := b.NewValue0(v.Line, OpConst32, t) 6170 v0.AuxInt = c 6171 v.AddArg(v0) 6172 v.AddArg(x) 6173 return true 6174 } 6175 // match: (Neq32 (Const32 [c]) (Const32 [d])) 6176 // cond: 6177 // result: (ConstBool [b2i(c != d)]) 6178 for { 6179 v_0 := v.Args[0] 6180 if v_0.Op != OpConst32 { 6181 break 6182 } 6183 c := v_0.AuxInt 6184 v_1 := v.Args[1] 6185 if v_1.Op != OpConst32 { 6186 break 6187 } 6188 d := v_1.AuxInt 6189 v.reset(OpConstBool) 6190 v.AuxInt = b2i(c != d) 6191 return true 6192 } 6193 return false 6194 } 6195 func rewriteValuegeneric_OpNeq64(v *Value, config *Config) bool { 6196 b := v.Block 6197 _ = b 6198 // match: (Neq64 x x) 6199 // cond: 6200 // result: (ConstBool [0]) 6201 for { 6202 x := v.Args[0] 6203 if x != v.Args[1] { 6204 break 6205 } 6206 v.reset(OpConstBool) 6207 v.AuxInt = 0 6208 return true 6209 } 6210 // match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 6211 // cond: 6212 // result: (Neq64 (Const64 <t> [c-d]) x) 6213 for { 6214 v_0 := v.Args[0] 6215 if v_0.Op != OpConst64 { 6216 break 6217 } 6218 t := v_0.Type 6219 c := v_0.AuxInt 6220 v_1 := v.Args[1] 6221 if v_1.Op != OpAdd64 { 6222 break 6223 } 6224 v_1_0 := v_1.Args[0] 6225 if v_1_0.Op != OpConst64 { 6226 break 6227 } 6228 if v_1_0.Type != t { 6229 break 6230 } 6231 d := v_1_0.AuxInt 6232 x := v_1.Args[1] 6233 v.reset(OpNeq64) 6234 v0 := b.NewValue0(v.Line, OpConst64, t) 6235 v0.AuxInt = c - d 6236 v.AddArg(v0) 6237 v.AddArg(x) 6238 return true 6239 } 6240 // match: (Neq64 x (Const64 <t> [c])) 6241 // cond: x.Op != OpConst64 6242 // result: (Neq64 (Const64 <t> [c]) x) 6243 for { 6244 x := v.Args[0] 6245 v_1 := v.Args[1] 6246 if v_1.Op != OpConst64 { 6247 break 6248 } 6249 t := v_1.Type 6250 c := v_1.AuxInt 6251 if !(x.Op != OpConst64) { 6252 break 6253 } 6254 v.reset(OpNeq64) 6255 v0 := b.NewValue0(v.Line, OpConst64, t) 6256 v0.AuxInt = c 6257 v.AddArg(v0) 6258 v.AddArg(x) 6259 return true 6260 } 6261 // match: (Neq64 (Const64 [c]) (Const64 [d])) 6262 // cond: 6263 // result: (ConstBool [b2i(c != d)]) 6264 for { 6265 v_0 := v.Args[0] 6266 if v_0.Op != OpConst64 { 6267 break 6268 } 6269 c := v_0.AuxInt 6270 v_1 := v.Args[1] 6271 if v_1.Op != OpConst64 { 6272 break 6273 } 6274 d := v_1.AuxInt 6275 v.reset(OpConstBool) 6276 v.AuxInt = b2i(c != d) 6277 return true 6278 } 6279 return false 6280 } 6281 func rewriteValuegeneric_OpNeq8(v *Value, config *Config) bool { 6282 b := v.Block 6283 _ = b 6284 // match: (Neq8 x x) 6285 // cond: 6286 // result: (ConstBool [0]) 6287 for { 6288 x := v.Args[0] 6289 if x != v.Args[1] { 6290 break 6291 } 6292 v.reset(OpConstBool) 6293 v.AuxInt = 0 6294 return true 6295 } 6296 // match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 6297 // cond: 6298 // result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x) 6299 for { 6300 v_0 := v.Args[0] 6301 if v_0.Op != OpConst8 { 6302 break 6303 } 6304 t := v_0.Type 6305 c := v_0.AuxInt 6306 v_1 := v.Args[1] 6307 if v_1.Op != OpAdd8 { 6308 break 6309 } 6310 v_1_0 := v_1.Args[0] 6311 if v_1_0.Op != OpConst8 { 6312 break 6313 } 6314 if v_1_0.Type != t { 6315 break 6316 } 6317 d := v_1_0.AuxInt 6318 x := v_1.Args[1] 6319 v.reset(OpNeq8) 6320 v0 := b.NewValue0(v.Line, OpConst8, t) 6321 v0.AuxInt = int64(int8(c - d)) 6322 v.AddArg(v0) 6323 v.AddArg(x) 6324 return true 6325 } 6326 // match: (Neq8 x (Const8 <t> [c])) 6327 // cond: x.Op != OpConst8 6328 // result: (Neq8 (Const8 <t> [c]) x) 6329 for { 6330 x := v.Args[0] 6331 v_1 := v.Args[1] 6332 if v_1.Op != OpConst8 { 6333 break 6334 } 6335 t := v_1.Type 6336 c := v_1.AuxInt 6337 if !(x.Op != OpConst8) { 6338 break 6339 } 6340 v.reset(OpNeq8) 6341 v0 := b.NewValue0(v.Line, OpConst8, t) 6342 v0.AuxInt = c 6343 v.AddArg(v0) 6344 v.AddArg(x) 6345 return true 6346 } 6347 // match: (Neq8 (Const8 [c]) (Const8 [d])) 6348 // cond: 6349 // result: (ConstBool [b2i(c != d)]) 6350 for { 6351 v_0 := v.Args[0] 6352 if v_0.Op != OpConst8 { 6353 break 6354 } 6355 c := v_0.AuxInt 6356 v_1 := v.Args[1] 6357 if v_1.Op != OpConst8 { 6358 break 6359 } 6360 d := v_1.AuxInt 6361 v.reset(OpConstBool) 6362 v.AuxInt = b2i(c != d) 6363 return true 6364 } 6365 return false 6366 } 6367 func rewriteValuegeneric_OpNeqB(v *Value, config *Config) bool { 6368 b := v.Block 6369 _ = b 6370 // match: (NeqB (ConstBool [c]) (ConstBool [d])) 6371 // cond: 6372 // result: (ConstBool [b2i(c != d)]) 6373 for { 6374 v_0 := v.Args[0] 6375 if v_0.Op != OpConstBool { 6376 break 6377 } 6378 c := v_0.AuxInt 6379 v_1 := v.Args[1] 6380 if v_1.Op != OpConstBool { 6381 break 6382 } 6383 d := v_1.AuxInt 6384 v.reset(OpConstBool) 6385 v.AuxInt = b2i(c != d) 6386 return true 6387 } 6388 // match: (NeqB (ConstBool [0]) x) 6389 // cond: 6390 // result: x 6391 for { 6392 v_0 := v.Args[0] 6393 if v_0.Op != OpConstBool { 6394 break 6395 } 6396 if v_0.AuxInt != 0 { 6397 break 6398 } 6399 x := v.Args[1] 6400 v.reset(OpCopy) 6401 v.Type = x.Type 6402 v.AddArg(x) 6403 return true 6404 } 6405 // match: (NeqB (ConstBool [1]) x) 6406 // cond: 6407 // result: (Not x) 6408 for { 6409 v_0 := v.Args[0] 6410 if v_0.Op != OpConstBool { 6411 break 6412 } 6413 if v_0.AuxInt != 1 { 6414 break 6415 } 6416 x := v.Args[1] 6417 v.reset(OpNot) 6418 v.AddArg(x) 6419 return true 6420 } 6421 return false 6422 } 6423 func rewriteValuegeneric_OpNeqInter(v *Value, config *Config) bool { 6424 b := v.Block 6425 _ = b 6426 // match: (NeqInter x y) 6427 // cond: 6428 // result: (NeqPtr (ITab x) (ITab y)) 6429 for { 6430 x := v.Args[0] 6431 y := v.Args[1] 6432 v.reset(OpNeqPtr) 6433 v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 6434 v0.AddArg(x) 6435 v.AddArg(v0) 6436 v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 6437 v1.AddArg(y) 6438 v.AddArg(v1) 6439 return true 6440 } 6441 } 6442 func rewriteValuegeneric_OpNeqPtr(v *Value, config *Config) bool { 6443 b := v.Block 6444 _ = b 6445 // match: (NeqPtr p (ConstNil)) 6446 // cond: 6447 // result: (IsNonNil p) 6448 for { 6449 p := v.Args[0] 6450 v_1 := v.Args[1] 6451 if v_1.Op != OpConstNil { 6452 break 6453 } 6454 v.reset(OpIsNonNil) 6455 v.AddArg(p) 6456 return true 6457 } 6458 // match: (NeqPtr (ConstNil) p) 6459 // cond: 6460 // result: (IsNonNil p) 6461 for { 6462 v_0 := v.Args[0] 6463 if v_0.Op != OpConstNil { 6464 break 6465 } 6466 p := v.Args[1] 6467 v.reset(OpIsNonNil) 6468 v.AddArg(p) 6469 return true 6470 } 6471 return false 6472 } 6473 func rewriteValuegeneric_OpNeqSlice(v *Value, config *Config) bool { 6474 b := v.Block 6475 _ = b 6476 // match: (NeqSlice x y) 6477 // cond: 6478 // result: (NeqPtr (SlicePtr x) (SlicePtr y)) 6479 for { 6480 x := v.Args[0] 6481 y := v.Args[1] 6482 v.reset(OpNeqPtr) 6483 v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 6484 v0.AddArg(x) 6485 v.AddArg(v0) 6486 v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 6487 v1.AddArg(y) 6488 v.AddArg(v1) 6489 return true 6490 } 6491 } 6492 func rewriteValuegeneric_OpNilCheck(v *Value, config *Config) bool { 6493 b := v.Block 6494 _ = b 6495 // match: (NilCheck (GetG mem) mem) 6496 // cond: 6497 // result: mem 6498 for { 6499 v_0 := v.Args[0] 6500 if v_0.Op != OpGetG { 6501 break 6502 } 6503 mem := v_0.Args[0] 6504 if mem != v.Args[1] { 6505 break 6506 } 6507 v.reset(OpCopy) 6508 v.Type = mem.Type 6509 v.AddArg(mem) 6510 return true 6511 } 6512 // match: (NilCheck (Load (OffPtr [c] (SP)) mem) mem) 6513 // cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(config.Debug_checknil() && int(v.Line) > 1, v, "removed nil check") 6514 // result: (Invalid) 6515 for { 6516 v_0 := v.Args[0] 6517 if v_0.Op != OpLoad { 6518 break 6519 } 6520 v_0_0 := v_0.Args[0] 6521 if v_0_0.Op != OpOffPtr { 6522 break 6523 } 6524 c := v_0_0.AuxInt 6525 v_0_0_0 := v_0_0.Args[0] 6526 if v_0_0_0.Op != OpSP { 6527 break 6528 } 6529 mem := v_0.Args[1] 6530 if mem != v.Args[1] { 6531 break 6532 } 6533 if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(config.Debug_checknil() && int(v.Line) > 1, v, "removed nil check")) { 6534 break 6535 } 6536 v.reset(OpInvalid) 6537 return true 6538 } 6539 // match: (NilCheck (OffPtr (Load (OffPtr [c] (SP)) mem)) mem) 6540 // cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(config.Debug_checknil() && int(v.Line) > 1, v, "removed nil check") 6541 // result: (Invalid) 6542 for { 6543 v_0 := v.Args[0] 6544 if v_0.Op != OpOffPtr { 6545 break 6546 } 6547 v_0_0 := v_0.Args[0] 6548 if v_0_0.Op != OpLoad { 6549 break 6550 } 6551 v_0_0_0 := v_0_0.Args[0] 6552 if v_0_0_0.Op != OpOffPtr { 6553 break 6554 } 6555 c := v_0_0_0.AuxInt 6556 v_0_0_0_0 := v_0_0_0.Args[0] 6557 if v_0_0_0_0.Op != OpSP { 6558 break 6559 } 6560 mem := v_0_0.Args[1] 6561 if mem != v.Args[1] { 6562 break 6563 } 6564 if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(config.Debug_checknil() && int(v.Line) > 1, v, "removed nil check")) { 6565 break 6566 } 6567 v.reset(OpInvalid) 6568 return true 6569 } 6570 return false 6571 } 6572 func rewriteValuegeneric_OpNot(v *Value, config *Config) bool { 6573 b := v.Block 6574 _ = b 6575 // match: (Not (Eq64 x y)) 6576 // cond: 6577 // result: (Neq64 x y) 6578 for { 6579 v_0 := v.Args[0] 6580 if v_0.Op != OpEq64 { 6581 break 6582 } 6583 x := v_0.Args[0] 6584 y := v_0.Args[1] 6585 v.reset(OpNeq64) 6586 v.AddArg(x) 6587 v.AddArg(y) 6588 return true 6589 } 6590 // match: (Not (Eq32 x y)) 6591 // cond: 6592 // result: (Neq32 x y) 6593 for { 6594 v_0 := v.Args[0] 6595 if v_0.Op != OpEq32 { 6596 break 6597 } 6598 x := v_0.Args[0] 6599 y := v_0.Args[1] 6600 v.reset(OpNeq32) 6601 v.AddArg(x) 6602 v.AddArg(y) 6603 return true 6604 } 6605 // match: (Not (Eq16 x y)) 6606 // cond: 6607 // result: (Neq16 x y) 6608 for { 6609 v_0 := v.Args[0] 6610 if v_0.Op != OpEq16 { 6611 break 6612 } 6613 x := v_0.Args[0] 6614 y := v_0.Args[1] 6615 v.reset(OpNeq16) 6616 v.AddArg(x) 6617 v.AddArg(y) 6618 return true 6619 } 6620 // match: (Not (Eq8 x y)) 6621 // cond: 6622 // result: (Neq8 x y) 6623 for { 6624 v_0 := v.Args[0] 6625 if v_0.Op != OpEq8 { 6626 break 6627 } 6628 x := v_0.Args[0] 6629 y := v_0.Args[1] 6630 v.reset(OpNeq8) 6631 v.AddArg(x) 6632 v.AddArg(y) 6633 return true 6634 } 6635 // match: (Not (EqB x y)) 6636 // cond: 6637 // result: (NeqB x y) 6638 for { 6639 v_0 := v.Args[0] 6640 if v_0.Op != OpEqB { 6641 break 6642 } 6643 x := v_0.Args[0] 6644 y := v_0.Args[1] 6645 v.reset(OpNeqB) 6646 v.AddArg(x) 6647 v.AddArg(y) 6648 return true 6649 } 6650 // match: (Not (Neq64 x y)) 6651 // cond: 6652 // result: (Eq64 x y) 6653 for { 6654 v_0 := v.Args[0] 6655 if v_0.Op != OpNeq64 { 6656 break 6657 } 6658 x := v_0.Args[0] 6659 y := v_0.Args[1] 6660 v.reset(OpEq64) 6661 v.AddArg(x) 6662 v.AddArg(y) 6663 return true 6664 } 6665 // match: (Not (Neq32 x y)) 6666 // cond: 6667 // result: (Eq32 x y) 6668 for { 6669 v_0 := v.Args[0] 6670 if v_0.Op != OpNeq32 { 6671 break 6672 } 6673 x := v_0.Args[0] 6674 y := v_0.Args[1] 6675 v.reset(OpEq32) 6676 v.AddArg(x) 6677 v.AddArg(y) 6678 return true 6679 } 6680 // match: (Not (Neq16 x y)) 6681 // cond: 6682 // result: (Eq16 x y) 6683 for { 6684 v_0 := v.Args[0] 6685 if v_0.Op != OpNeq16 { 6686 break 6687 } 6688 x := v_0.Args[0] 6689 y := v_0.Args[1] 6690 v.reset(OpEq16) 6691 v.AddArg(x) 6692 v.AddArg(y) 6693 return true 6694 } 6695 // match: (Not (Neq8 x y)) 6696 // cond: 6697 // result: (Eq8 x y) 6698 for { 6699 v_0 := v.Args[0] 6700 if v_0.Op != OpNeq8 { 6701 break 6702 } 6703 x := v_0.Args[0] 6704 y := v_0.Args[1] 6705 v.reset(OpEq8) 6706 v.AddArg(x) 6707 v.AddArg(y) 6708 return true 6709 } 6710 // match: (Not (NeqB x y)) 6711 // cond: 6712 // result: (EqB x y) 6713 for { 6714 v_0 := v.Args[0] 6715 if v_0.Op != OpNeqB { 6716 break 6717 } 6718 x := v_0.Args[0] 6719 y := v_0.Args[1] 6720 v.reset(OpEqB) 6721 v.AddArg(x) 6722 v.AddArg(y) 6723 return true 6724 } 6725 // match: (Not (Greater64 x y)) 6726 // cond: 6727 // result: (Leq64 x y) 6728 for { 6729 v_0 := v.Args[0] 6730 if v_0.Op != OpGreater64 { 6731 break 6732 } 6733 x := v_0.Args[0] 6734 y := v_0.Args[1] 6735 v.reset(OpLeq64) 6736 v.AddArg(x) 6737 v.AddArg(y) 6738 return true 6739 } 6740 // match: (Not (Greater32 x y)) 6741 // cond: 6742 // result: (Leq32 x y) 6743 for { 6744 v_0 := v.Args[0] 6745 if v_0.Op != OpGreater32 { 6746 break 6747 } 6748 x := v_0.Args[0] 6749 y := v_0.Args[1] 6750 v.reset(OpLeq32) 6751 v.AddArg(x) 6752 v.AddArg(y) 6753 return true 6754 } 6755 // match: (Not (Greater16 x y)) 6756 // cond: 6757 // result: (Leq16 x y) 6758 for { 6759 v_0 := v.Args[0] 6760 if v_0.Op != OpGreater16 { 6761 break 6762 } 6763 x := v_0.Args[0] 6764 y := v_0.Args[1] 6765 v.reset(OpLeq16) 6766 v.AddArg(x) 6767 v.AddArg(y) 6768 return true 6769 } 6770 // match: (Not (Greater8 x y)) 6771 // cond: 6772 // result: (Leq8 x y) 6773 for { 6774 v_0 := v.Args[0] 6775 if v_0.Op != OpGreater8 { 6776 break 6777 } 6778 x := v_0.Args[0] 6779 y := v_0.Args[1] 6780 v.reset(OpLeq8) 6781 v.AddArg(x) 6782 v.AddArg(y) 6783 return true 6784 } 6785 // match: (Not (Greater64U x y)) 6786 // cond: 6787 // result: (Leq64U x y) 6788 for { 6789 v_0 := v.Args[0] 6790 if v_0.Op != OpGreater64U { 6791 break 6792 } 6793 x := v_0.Args[0] 6794 y := v_0.Args[1] 6795 v.reset(OpLeq64U) 6796 v.AddArg(x) 6797 v.AddArg(y) 6798 return true 6799 } 6800 // match: (Not (Greater32U x y)) 6801 // cond: 6802 // result: (Leq32U x y) 6803 for { 6804 v_0 := v.Args[0] 6805 if v_0.Op != OpGreater32U { 6806 break 6807 } 6808 x := v_0.Args[0] 6809 y := v_0.Args[1] 6810 v.reset(OpLeq32U) 6811 v.AddArg(x) 6812 v.AddArg(y) 6813 return true 6814 } 6815 // match: (Not (Greater16U x y)) 6816 // cond: 6817 // result: (Leq16U x y) 6818 for { 6819 v_0 := v.Args[0] 6820 if v_0.Op != OpGreater16U { 6821 break 6822 } 6823 x := v_0.Args[0] 6824 y := v_0.Args[1] 6825 v.reset(OpLeq16U) 6826 v.AddArg(x) 6827 v.AddArg(y) 6828 return true 6829 } 6830 // match: (Not (Greater8U x y)) 6831 // cond: 6832 // result: (Leq8U x y) 6833 for { 6834 v_0 := v.Args[0] 6835 if v_0.Op != OpGreater8U { 6836 break 6837 } 6838 x := v_0.Args[0] 6839 y := v_0.Args[1] 6840 v.reset(OpLeq8U) 6841 v.AddArg(x) 6842 v.AddArg(y) 6843 return true 6844 } 6845 // match: (Not (Geq64 x y)) 6846 // cond: 6847 // result: (Less64 x y) 6848 for { 6849 v_0 := v.Args[0] 6850 if v_0.Op != OpGeq64 { 6851 break 6852 } 6853 x := v_0.Args[0] 6854 y := v_0.Args[1] 6855 v.reset(OpLess64) 6856 v.AddArg(x) 6857 v.AddArg(y) 6858 return true 6859 } 6860 // match: (Not (Geq32 x y)) 6861 // cond: 6862 // result: (Less32 x y) 6863 for { 6864 v_0 := v.Args[0] 6865 if v_0.Op != OpGeq32 { 6866 break 6867 } 6868 x := v_0.Args[0] 6869 y := v_0.Args[1] 6870 v.reset(OpLess32) 6871 v.AddArg(x) 6872 v.AddArg(y) 6873 return true 6874 } 6875 // match: (Not (Geq16 x y)) 6876 // cond: 6877 // result: (Less16 x y) 6878 for { 6879 v_0 := v.Args[0] 6880 if v_0.Op != OpGeq16 { 6881 break 6882 } 6883 x := v_0.Args[0] 6884 y := v_0.Args[1] 6885 v.reset(OpLess16) 6886 v.AddArg(x) 6887 v.AddArg(y) 6888 return true 6889 } 6890 // match: (Not (Geq8 x y)) 6891 // cond: 6892 // result: (Less8 x y) 6893 for { 6894 v_0 := v.Args[0] 6895 if v_0.Op != OpGeq8 { 6896 break 6897 } 6898 x := v_0.Args[0] 6899 y := v_0.Args[1] 6900 v.reset(OpLess8) 6901 v.AddArg(x) 6902 v.AddArg(y) 6903 return true 6904 } 6905 // match: (Not (Geq64U x y)) 6906 // cond: 6907 // result: (Less64U x y) 6908 for { 6909 v_0 := v.Args[0] 6910 if v_0.Op != OpGeq64U { 6911 break 6912 } 6913 x := v_0.Args[0] 6914 y := v_0.Args[1] 6915 v.reset(OpLess64U) 6916 v.AddArg(x) 6917 v.AddArg(y) 6918 return true 6919 } 6920 // match: (Not (Geq32U x y)) 6921 // cond: 6922 // result: (Less32U x y) 6923 for { 6924 v_0 := v.Args[0] 6925 if v_0.Op != OpGeq32U { 6926 break 6927 } 6928 x := v_0.Args[0] 6929 y := v_0.Args[1] 6930 v.reset(OpLess32U) 6931 v.AddArg(x) 6932 v.AddArg(y) 6933 return true 6934 } 6935 // match: (Not (Geq16U x y)) 6936 // cond: 6937 // result: (Less16U x y) 6938 for { 6939 v_0 := v.Args[0] 6940 if v_0.Op != OpGeq16U { 6941 break 6942 } 6943 x := v_0.Args[0] 6944 y := v_0.Args[1] 6945 v.reset(OpLess16U) 6946 v.AddArg(x) 6947 v.AddArg(y) 6948 return true 6949 } 6950 // match: (Not (Geq8U x y)) 6951 // cond: 6952 // result: (Less8U x y) 6953 for { 6954 v_0 := v.Args[0] 6955 if v_0.Op != OpGeq8U { 6956 break 6957 } 6958 x := v_0.Args[0] 6959 y := v_0.Args[1] 6960 v.reset(OpLess8U) 6961 v.AddArg(x) 6962 v.AddArg(y) 6963 return true 6964 } 6965 // match: (Not (Less64 x y)) 6966 // cond: 6967 // result: (Geq64 x y) 6968 for { 6969 v_0 := v.Args[0] 6970 if v_0.Op != OpLess64 { 6971 break 6972 } 6973 x := v_0.Args[0] 6974 y := v_0.Args[1] 6975 v.reset(OpGeq64) 6976 v.AddArg(x) 6977 v.AddArg(y) 6978 return true 6979 } 6980 // match: (Not (Less32 x y)) 6981 // cond: 6982 // result: (Geq32 x y) 6983 for { 6984 v_0 := v.Args[0] 6985 if v_0.Op != OpLess32 { 6986 break 6987 } 6988 x := v_0.Args[0] 6989 y := v_0.Args[1] 6990 v.reset(OpGeq32) 6991 v.AddArg(x) 6992 v.AddArg(y) 6993 return true 6994 } 6995 // match: (Not (Less16 x y)) 6996 // cond: 6997 // result: (Geq16 x y) 6998 for { 6999 v_0 := v.Args[0] 7000 if v_0.Op != OpLess16 { 7001 break 7002 } 7003 x := v_0.Args[0] 7004 y := v_0.Args[1] 7005 v.reset(OpGeq16) 7006 v.AddArg(x) 7007 v.AddArg(y) 7008 return true 7009 } 7010 // match: (Not (Less8 x y)) 7011 // cond: 7012 // result: (Geq8 x y) 7013 for { 7014 v_0 := v.Args[0] 7015 if v_0.Op != OpLess8 { 7016 break 7017 } 7018 x := v_0.Args[0] 7019 y := v_0.Args[1] 7020 v.reset(OpGeq8) 7021 v.AddArg(x) 7022 v.AddArg(y) 7023 return true 7024 } 7025 // match: (Not (Less64U x y)) 7026 // cond: 7027 // result: (Geq64U x y) 7028 for { 7029 v_0 := v.Args[0] 7030 if v_0.Op != OpLess64U { 7031 break 7032 } 7033 x := v_0.Args[0] 7034 y := v_0.Args[1] 7035 v.reset(OpGeq64U) 7036 v.AddArg(x) 7037 v.AddArg(y) 7038 return true 7039 } 7040 // match: (Not (Less32U x y)) 7041 // cond: 7042 // result: (Geq32U x y) 7043 for { 7044 v_0 := v.Args[0] 7045 if v_0.Op != OpLess32U { 7046 break 7047 } 7048 x := v_0.Args[0] 7049 y := v_0.Args[1] 7050 v.reset(OpGeq32U) 7051 v.AddArg(x) 7052 v.AddArg(y) 7053 return true 7054 } 7055 // match: (Not (Less16U x y)) 7056 // cond: 7057 // result: (Geq16U x y) 7058 for { 7059 v_0 := v.Args[0] 7060 if v_0.Op != OpLess16U { 7061 break 7062 } 7063 x := v_0.Args[0] 7064 y := v_0.Args[1] 7065 v.reset(OpGeq16U) 7066 v.AddArg(x) 7067 v.AddArg(y) 7068 return true 7069 } 7070 // match: (Not (Less8U x y)) 7071 // cond: 7072 // result: (Geq8U x y) 7073 for { 7074 v_0 := v.Args[0] 7075 if v_0.Op != OpLess8U { 7076 break 7077 } 7078 x := v_0.Args[0] 7079 y := v_0.Args[1] 7080 v.reset(OpGeq8U) 7081 v.AddArg(x) 7082 v.AddArg(y) 7083 return true 7084 } 7085 // match: (Not (Leq64 x y)) 7086 // cond: 7087 // result: (Greater64 x y) 7088 for { 7089 v_0 := v.Args[0] 7090 if v_0.Op != OpLeq64 { 7091 break 7092 } 7093 x := v_0.Args[0] 7094 y := v_0.Args[1] 7095 v.reset(OpGreater64) 7096 v.AddArg(x) 7097 v.AddArg(y) 7098 return true 7099 } 7100 // match: (Not (Leq32 x y)) 7101 // cond: 7102 // result: (Greater32 x y) 7103 for { 7104 v_0 := v.Args[0] 7105 if v_0.Op != OpLeq32 { 7106 break 7107 } 7108 x := v_0.Args[0] 7109 y := v_0.Args[1] 7110 v.reset(OpGreater32) 7111 v.AddArg(x) 7112 v.AddArg(y) 7113 return true 7114 } 7115 // match: (Not (Leq16 x y)) 7116 // cond: 7117 // result: (Greater16 x y) 7118 for { 7119 v_0 := v.Args[0] 7120 if v_0.Op != OpLeq16 { 7121 break 7122 } 7123 x := v_0.Args[0] 7124 y := v_0.Args[1] 7125 v.reset(OpGreater16) 7126 v.AddArg(x) 7127 v.AddArg(y) 7128 return true 7129 } 7130 // match: (Not (Leq8 x y)) 7131 // cond: 7132 // result: (Greater8 x y) 7133 for { 7134 v_0 := v.Args[0] 7135 if v_0.Op != OpLeq8 { 7136 break 7137 } 7138 x := v_0.Args[0] 7139 y := v_0.Args[1] 7140 v.reset(OpGreater8) 7141 v.AddArg(x) 7142 v.AddArg(y) 7143 return true 7144 } 7145 // match: (Not (Leq64U x y)) 7146 // cond: 7147 // result: (Greater64U x y) 7148 for { 7149 v_0 := v.Args[0] 7150 if v_0.Op != OpLeq64U { 7151 break 7152 } 7153 x := v_0.Args[0] 7154 y := v_0.Args[1] 7155 v.reset(OpGreater64U) 7156 v.AddArg(x) 7157 v.AddArg(y) 7158 return true 7159 } 7160 // match: (Not (Leq32U x y)) 7161 // cond: 7162 // result: (Greater32U x y) 7163 for { 7164 v_0 := v.Args[0] 7165 if v_0.Op != OpLeq32U { 7166 break 7167 } 7168 x := v_0.Args[0] 7169 y := v_0.Args[1] 7170 v.reset(OpGreater32U) 7171 v.AddArg(x) 7172 v.AddArg(y) 7173 return true 7174 } 7175 // match: (Not (Leq16U x y)) 7176 // cond: 7177 // result: (Greater16U x y) 7178 for { 7179 v_0 := v.Args[0] 7180 if v_0.Op != OpLeq16U { 7181 break 7182 } 7183 x := v_0.Args[0] 7184 y := v_0.Args[1] 7185 v.reset(OpGreater16U) 7186 v.AddArg(x) 7187 v.AddArg(y) 7188 return true 7189 } 7190 // match: (Not (Leq8U x y)) 7191 // cond: 7192 // result: (Greater8U x y) 7193 for { 7194 v_0 := v.Args[0] 7195 if v_0.Op != OpLeq8U { 7196 break 7197 } 7198 x := v_0.Args[0] 7199 y := v_0.Args[1] 7200 v.reset(OpGreater8U) 7201 v.AddArg(x) 7202 v.AddArg(y) 7203 return true 7204 } 7205 return false 7206 } 7207 func rewriteValuegeneric_OpOffPtr(v *Value, config *Config) bool { 7208 b := v.Block 7209 _ = b 7210 // match: (OffPtr (OffPtr p [b]) [a]) 7211 // cond: 7212 // result: (OffPtr p [a+b]) 7213 for { 7214 a := v.AuxInt 7215 v_0 := v.Args[0] 7216 if v_0.Op != OpOffPtr { 7217 break 7218 } 7219 b := v_0.AuxInt 7220 p := v_0.Args[0] 7221 v.reset(OpOffPtr) 7222 v.AuxInt = a + b 7223 v.AddArg(p) 7224 return true 7225 } 7226 // match: (OffPtr p [0]) 7227 // cond: v.Type.Compare(p.Type) == CMPeq 7228 // result: p 7229 for { 7230 if v.AuxInt != 0 { 7231 break 7232 } 7233 p := v.Args[0] 7234 if !(v.Type.Compare(p.Type) == CMPeq) { 7235 break 7236 } 7237 v.reset(OpCopy) 7238 v.Type = p.Type 7239 v.AddArg(p) 7240 return true 7241 } 7242 return false 7243 } 7244 func rewriteValuegeneric_OpOr16(v *Value, config *Config) bool { 7245 b := v.Block 7246 _ = b 7247 // match: (Or16 x (Const16 <t> [c])) 7248 // cond: x.Op != OpConst16 7249 // result: (Or16 (Const16 <t> [c]) x) 7250 for { 7251 x := v.Args[0] 7252 v_1 := v.Args[1] 7253 if v_1.Op != OpConst16 { 7254 break 7255 } 7256 t := v_1.Type 7257 c := v_1.AuxInt 7258 if !(x.Op != OpConst16) { 7259 break 7260 } 7261 v.reset(OpOr16) 7262 v0 := b.NewValue0(v.Line, OpConst16, t) 7263 v0.AuxInt = c 7264 v.AddArg(v0) 7265 v.AddArg(x) 7266 return true 7267 } 7268 // match: (Or16 x x) 7269 // cond: 7270 // result: x 7271 for { 7272 x := v.Args[0] 7273 if x != v.Args[1] { 7274 break 7275 } 7276 v.reset(OpCopy) 7277 v.Type = x.Type 7278 v.AddArg(x) 7279 return true 7280 } 7281 // match: (Or16 (Const16 [0]) x) 7282 // cond: 7283 // result: x 7284 for { 7285 v_0 := v.Args[0] 7286 if v_0.Op != OpConst16 { 7287 break 7288 } 7289 if v_0.AuxInt != 0 { 7290 break 7291 } 7292 x := v.Args[1] 7293 v.reset(OpCopy) 7294 v.Type = x.Type 7295 v.AddArg(x) 7296 return true 7297 } 7298 // match: (Or16 (Const16 [-1]) _) 7299 // cond: 7300 // result: (Const16 [-1]) 7301 for { 7302 v_0 := v.Args[0] 7303 if v_0.Op != OpConst16 { 7304 break 7305 } 7306 if v_0.AuxInt != -1 { 7307 break 7308 } 7309 v.reset(OpConst16) 7310 v.AuxInt = -1 7311 return true 7312 } 7313 // match: (Or16 x (Or16 x y)) 7314 // cond: 7315 // result: (Or16 x y) 7316 for { 7317 x := v.Args[0] 7318 v_1 := v.Args[1] 7319 if v_1.Op != OpOr16 { 7320 break 7321 } 7322 if x != v_1.Args[0] { 7323 break 7324 } 7325 y := v_1.Args[1] 7326 v.reset(OpOr16) 7327 v.AddArg(x) 7328 v.AddArg(y) 7329 return true 7330 } 7331 // match: (Or16 x (Or16 y x)) 7332 // cond: 7333 // result: (Or16 x y) 7334 for { 7335 x := v.Args[0] 7336 v_1 := v.Args[1] 7337 if v_1.Op != OpOr16 { 7338 break 7339 } 7340 y := v_1.Args[0] 7341 if x != v_1.Args[1] { 7342 break 7343 } 7344 v.reset(OpOr16) 7345 v.AddArg(x) 7346 v.AddArg(y) 7347 return true 7348 } 7349 // match: (Or16 (Or16 x y) x) 7350 // cond: 7351 // result: (Or16 x y) 7352 for { 7353 v_0 := v.Args[0] 7354 if v_0.Op != OpOr16 { 7355 break 7356 } 7357 x := v_0.Args[0] 7358 y := v_0.Args[1] 7359 if x != v.Args[1] { 7360 break 7361 } 7362 v.reset(OpOr16) 7363 v.AddArg(x) 7364 v.AddArg(y) 7365 return true 7366 } 7367 // match: (Or16 (Or16 x y) y) 7368 // cond: 7369 // result: (Or16 x y) 7370 for { 7371 v_0 := v.Args[0] 7372 if v_0.Op != OpOr16 { 7373 break 7374 } 7375 x := v_0.Args[0] 7376 y := v_0.Args[1] 7377 if y != v.Args[1] { 7378 break 7379 } 7380 v.reset(OpOr16) 7381 v.AddArg(x) 7382 v.AddArg(y) 7383 return true 7384 } 7385 return false 7386 } 7387 func rewriteValuegeneric_OpOr32(v *Value, config *Config) bool { 7388 b := v.Block 7389 _ = b 7390 // match: (Or32 x (Const32 <t> [c])) 7391 // cond: x.Op != OpConst32 7392 // result: (Or32 (Const32 <t> [c]) x) 7393 for { 7394 x := v.Args[0] 7395 v_1 := v.Args[1] 7396 if v_1.Op != OpConst32 { 7397 break 7398 } 7399 t := v_1.Type 7400 c := v_1.AuxInt 7401 if !(x.Op != OpConst32) { 7402 break 7403 } 7404 v.reset(OpOr32) 7405 v0 := b.NewValue0(v.Line, OpConst32, t) 7406 v0.AuxInt = c 7407 v.AddArg(v0) 7408 v.AddArg(x) 7409 return true 7410 } 7411 // match: (Or32 x x) 7412 // cond: 7413 // result: x 7414 for { 7415 x := v.Args[0] 7416 if x != v.Args[1] { 7417 break 7418 } 7419 v.reset(OpCopy) 7420 v.Type = x.Type 7421 v.AddArg(x) 7422 return true 7423 } 7424 // match: (Or32 (Const32 [0]) x) 7425 // cond: 7426 // result: x 7427 for { 7428 v_0 := v.Args[0] 7429 if v_0.Op != OpConst32 { 7430 break 7431 } 7432 if v_0.AuxInt != 0 { 7433 break 7434 } 7435 x := v.Args[1] 7436 v.reset(OpCopy) 7437 v.Type = x.Type 7438 v.AddArg(x) 7439 return true 7440 } 7441 // match: (Or32 (Const32 [-1]) _) 7442 // cond: 7443 // result: (Const32 [-1]) 7444 for { 7445 v_0 := v.Args[0] 7446 if v_0.Op != OpConst32 { 7447 break 7448 } 7449 if v_0.AuxInt != -1 { 7450 break 7451 } 7452 v.reset(OpConst32) 7453 v.AuxInt = -1 7454 return true 7455 } 7456 // match: (Or32 x (Or32 x y)) 7457 // cond: 7458 // result: (Or32 x y) 7459 for { 7460 x := v.Args[0] 7461 v_1 := v.Args[1] 7462 if v_1.Op != OpOr32 { 7463 break 7464 } 7465 if x != v_1.Args[0] { 7466 break 7467 } 7468 y := v_1.Args[1] 7469 v.reset(OpOr32) 7470 v.AddArg(x) 7471 v.AddArg(y) 7472 return true 7473 } 7474 // match: (Or32 x (Or32 y x)) 7475 // cond: 7476 // result: (Or32 x y) 7477 for { 7478 x := v.Args[0] 7479 v_1 := v.Args[1] 7480 if v_1.Op != OpOr32 { 7481 break 7482 } 7483 y := v_1.Args[0] 7484 if x != v_1.Args[1] { 7485 break 7486 } 7487 v.reset(OpOr32) 7488 v.AddArg(x) 7489 v.AddArg(y) 7490 return true 7491 } 7492 // match: (Or32 (Or32 x y) x) 7493 // cond: 7494 // result: (Or32 x y) 7495 for { 7496 v_0 := v.Args[0] 7497 if v_0.Op != OpOr32 { 7498 break 7499 } 7500 x := v_0.Args[0] 7501 y := v_0.Args[1] 7502 if x != v.Args[1] { 7503 break 7504 } 7505 v.reset(OpOr32) 7506 v.AddArg(x) 7507 v.AddArg(y) 7508 return true 7509 } 7510 // match: (Or32 (Or32 x y) y) 7511 // cond: 7512 // result: (Or32 x y) 7513 for { 7514 v_0 := v.Args[0] 7515 if v_0.Op != OpOr32 { 7516 break 7517 } 7518 x := v_0.Args[0] 7519 y := v_0.Args[1] 7520 if y != v.Args[1] { 7521 break 7522 } 7523 v.reset(OpOr32) 7524 v.AddArg(x) 7525 v.AddArg(y) 7526 return true 7527 } 7528 return false 7529 } 7530 func rewriteValuegeneric_OpOr64(v *Value, config *Config) bool { 7531 b := v.Block 7532 _ = b 7533 // match: (Or64 x (Const64 <t> [c])) 7534 // cond: x.Op != OpConst64 7535 // result: (Or64 (Const64 <t> [c]) x) 7536 for { 7537 x := v.Args[0] 7538 v_1 := v.Args[1] 7539 if v_1.Op != OpConst64 { 7540 break 7541 } 7542 t := v_1.Type 7543 c := v_1.AuxInt 7544 if !(x.Op != OpConst64) { 7545 break 7546 } 7547 v.reset(OpOr64) 7548 v0 := b.NewValue0(v.Line, OpConst64, t) 7549 v0.AuxInt = c 7550 v.AddArg(v0) 7551 v.AddArg(x) 7552 return true 7553 } 7554 // match: (Or64 x x) 7555 // cond: 7556 // result: x 7557 for { 7558 x := v.Args[0] 7559 if x != v.Args[1] { 7560 break 7561 } 7562 v.reset(OpCopy) 7563 v.Type = x.Type 7564 v.AddArg(x) 7565 return true 7566 } 7567 // match: (Or64 (Const64 [0]) x) 7568 // cond: 7569 // result: x 7570 for { 7571 v_0 := v.Args[0] 7572 if v_0.Op != OpConst64 { 7573 break 7574 } 7575 if v_0.AuxInt != 0 { 7576 break 7577 } 7578 x := v.Args[1] 7579 v.reset(OpCopy) 7580 v.Type = x.Type 7581 v.AddArg(x) 7582 return true 7583 } 7584 // match: (Or64 (Const64 [-1]) _) 7585 // cond: 7586 // result: (Const64 [-1]) 7587 for { 7588 v_0 := v.Args[0] 7589 if v_0.Op != OpConst64 { 7590 break 7591 } 7592 if v_0.AuxInt != -1 { 7593 break 7594 } 7595 v.reset(OpConst64) 7596 v.AuxInt = -1 7597 return true 7598 } 7599 // match: (Or64 x (Or64 x y)) 7600 // cond: 7601 // result: (Or64 x y) 7602 for { 7603 x := v.Args[0] 7604 v_1 := v.Args[1] 7605 if v_1.Op != OpOr64 { 7606 break 7607 } 7608 if x != v_1.Args[0] { 7609 break 7610 } 7611 y := v_1.Args[1] 7612 v.reset(OpOr64) 7613 v.AddArg(x) 7614 v.AddArg(y) 7615 return true 7616 } 7617 // match: (Or64 x (Or64 y x)) 7618 // cond: 7619 // result: (Or64 x y) 7620 for { 7621 x := v.Args[0] 7622 v_1 := v.Args[1] 7623 if v_1.Op != OpOr64 { 7624 break 7625 } 7626 y := v_1.Args[0] 7627 if x != v_1.Args[1] { 7628 break 7629 } 7630 v.reset(OpOr64) 7631 v.AddArg(x) 7632 v.AddArg(y) 7633 return true 7634 } 7635 // match: (Or64 (Or64 x y) x) 7636 // cond: 7637 // result: (Or64 x y) 7638 for { 7639 v_0 := v.Args[0] 7640 if v_0.Op != OpOr64 { 7641 break 7642 } 7643 x := v_0.Args[0] 7644 y := v_0.Args[1] 7645 if x != v.Args[1] { 7646 break 7647 } 7648 v.reset(OpOr64) 7649 v.AddArg(x) 7650 v.AddArg(y) 7651 return true 7652 } 7653 // match: (Or64 (Or64 x y) y) 7654 // cond: 7655 // result: (Or64 x y) 7656 for { 7657 v_0 := v.Args[0] 7658 if v_0.Op != OpOr64 { 7659 break 7660 } 7661 x := v_0.Args[0] 7662 y := v_0.Args[1] 7663 if y != v.Args[1] { 7664 break 7665 } 7666 v.reset(OpOr64) 7667 v.AddArg(x) 7668 v.AddArg(y) 7669 return true 7670 } 7671 return false 7672 } 7673 func rewriteValuegeneric_OpOr8(v *Value, config *Config) bool { 7674 b := v.Block 7675 _ = b 7676 // match: (Or8 x (Const8 <t> [c])) 7677 // cond: x.Op != OpConst8 7678 // result: (Or8 (Const8 <t> [c]) x) 7679 for { 7680 x := v.Args[0] 7681 v_1 := v.Args[1] 7682 if v_1.Op != OpConst8 { 7683 break 7684 } 7685 t := v_1.Type 7686 c := v_1.AuxInt 7687 if !(x.Op != OpConst8) { 7688 break 7689 } 7690 v.reset(OpOr8) 7691 v0 := b.NewValue0(v.Line, OpConst8, t) 7692 v0.AuxInt = c 7693 v.AddArg(v0) 7694 v.AddArg(x) 7695 return true 7696 } 7697 // match: (Or8 x x) 7698 // cond: 7699 // result: x 7700 for { 7701 x := v.Args[0] 7702 if x != v.Args[1] { 7703 break 7704 } 7705 v.reset(OpCopy) 7706 v.Type = x.Type 7707 v.AddArg(x) 7708 return true 7709 } 7710 // match: (Or8 (Const8 [0]) x) 7711 // cond: 7712 // result: x 7713 for { 7714 v_0 := v.Args[0] 7715 if v_0.Op != OpConst8 { 7716 break 7717 } 7718 if v_0.AuxInt != 0 { 7719 break 7720 } 7721 x := v.Args[1] 7722 v.reset(OpCopy) 7723 v.Type = x.Type 7724 v.AddArg(x) 7725 return true 7726 } 7727 // match: (Or8 (Const8 [-1]) _) 7728 // cond: 7729 // result: (Const8 [-1]) 7730 for { 7731 v_0 := v.Args[0] 7732 if v_0.Op != OpConst8 { 7733 break 7734 } 7735 if v_0.AuxInt != -1 { 7736 break 7737 } 7738 v.reset(OpConst8) 7739 v.AuxInt = -1 7740 return true 7741 } 7742 // match: (Or8 x (Or8 x y)) 7743 // cond: 7744 // result: (Or8 x y) 7745 for { 7746 x := v.Args[0] 7747 v_1 := v.Args[1] 7748 if v_1.Op != OpOr8 { 7749 break 7750 } 7751 if x != v_1.Args[0] { 7752 break 7753 } 7754 y := v_1.Args[1] 7755 v.reset(OpOr8) 7756 v.AddArg(x) 7757 v.AddArg(y) 7758 return true 7759 } 7760 // match: (Or8 x (Or8 y x)) 7761 // cond: 7762 // result: (Or8 x y) 7763 for { 7764 x := v.Args[0] 7765 v_1 := v.Args[1] 7766 if v_1.Op != OpOr8 { 7767 break 7768 } 7769 y := v_1.Args[0] 7770 if x != v_1.Args[1] { 7771 break 7772 } 7773 v.reset(OpOr8) 7774 v.AddArg(x) 7775 v.AddArg(y) 7776 return true 7777 } 7778 // match: (Or8 (Or8 x y) x) 7779 // cond: 7780 // result: (Or8 x y) 7781 for { 7782 v_0 := v.Args[0] 7783 if v_0.Op != OpOr8 { 7784 break 7785 } 7786 x := v_0.Args[0] 7787 y := v_0.Args[1] 7788 if x != v.Args[1] { 7789 break 7790 } 7791 v.reset(OpOr8) 7792 v.AddArg(x) 7793 v.AddArg(y) 7794 return true 7795 } 7796 // match: (Or8 (Or8 x y) y) 7797 // cond: 7798 // result: (Or8 x y) 7799 for { 7800 v_0 := v.Args[0] 7801 if v_0.Op != OpOr8 { 7802 break 7803 } 7804 x := v_0.Args[0] 7805 y := v_0.Args[1] 7806 if y != v.Args[1] { 7807 break 7808 } 7809 v.reset(OpOr8) 7810 v.AddArg(x) 7811 v.AddArg(y) 7812 return true 7813 } 7814 return false 7815 } 7816 func rewriteValuegeneric_OpPhi(v *Value, config *Config) bool { 7817 b := v.Block 7818 _ = b 7819 // match: (Phi (Const8 [c]) (Const8 [c])) 7820 // cond: 7821 // result: (Const8 [c]) 7822 for { 7823 v_0 := v.Args[0] 7824 if v_0.Op != OpConst8 { 7825 break 7826 } 7827 c := v_0.AuxInt 7828 v_1 := v.Args[1] 7829 if v_1.Op != OpConst8 { 7830 break 7831 } 7832 if v_1.AuxInt != c { 7833 break 7834 } 7835 if len(v.Args) != 2 { 7836 break 7837 } 7838 v.reset(OpConst8) 7839 v.AuxInt = c 7840 return true 7841 } 7842 // match: (Phi (Const16 [c]) (Const16 [c])) 7843 // cond: 7844 // result: (Const16 [c]) 7845 for { 7846 v_0 := v.Args[0] 7847 if v_0.Op != OpConst16 { 7848 break 7849 } 7850 c := v_0.AuxInt 7851 v_1 := v.Args[1] 7852 if v_1.Op != OpConst16 { 7853 break 7854 } 7855 if v_1.AuxInt != c { 7856 break 7857 } 7858 if len(v.Args) != 2 { 7859 break 7860 } 7861 v.reset(OpConst16) 7862 v.AuxInt = c 7863 return true 7864 } 7865 // match: (Phi (Const32 [c]) (Const32 [c])) 7866 // cond: 7867 // result: (Const32 [c]) 7868 for { 7869 v_0 := v.Args[0] 7870 if v_0.Op != OpConst32 { 7871 break 7872 } 7873 c := v_0.AuxInt 7874 v_1 := v.Args[1] 7875 if v_1.Op != OpConst32 { 7876 break 7877 } 7878 if v_1.AuxInt != c { 7879 break 7880 } 7881 if len(v.Args) != 2 { 7882 break 7883 } 7884 v.reset(OpConst32) 7885 v.AuxInt = c 7886 return true 7887 } 7888 // match: (Phi (Const64 [c]) (Const64 [c])) 7889 // cond: 7890 // result: (Const64 [c]) 7891 for { 7892 v_0 := v.Args[0] 7893 if v_0.Op != OpConst64 { 7894 break 7895 } 7896 c := v_0.AuxInt 7897 v_1 := v.Args[1] 7898 if v_1.Op != OpConst64 { 7899 break 7900 } 7901 if v_1.AuxInt != c { 7902 break 7903 } 7904 if len(v.Args) != 2 { 7905 break 7906 } 7907 v.reset(OpConst64) 7908 v.AuxInt = c 7909 return true 7910 } 7911 return false 7912 } 7913 func rewriteValuegeneric_OpPtrIndex(v *Value, config *Config) bool { 7914 b := v.Block 7915 _ = b 7916 // match: (PtrIndex <t> ptr idx) 7917 // cond: config.PtrSize == 4 7918 // result: (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.ElemType().Size()]))) 7919 for { 7920 t := v.Type 7921 ptr := v.Args[0] 7922 idx := v.Args[1] 7923 if !(config.PtrSize == 4) { 7924 break 7925 } 7926 v.reset(OpAddPtr) 7927 v.AddArg(ptr) 7928 v0 := b.NewValue0(v.Line, OpMul32, config.fe.TypeInt()) 7929 v0.AddArg(idx) 7930 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 7931 v1.AuxInt = t.ElemType().Size() 7932 v0.AddArg(v1) 7933 v.AddArg(v0) 7934 return true 7935 } 7936 // match: (PtrIndex <t> ptr idx) 7937 // cond: config.PtrSize == 8 7938 // result: (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.ElemType().Size()]))) 7939 for { 7940 t := v.Type 7941 ptr := v.Args[0] 7942 idx := v.Args[1] 7943 if !(config.PtrSize == 8) { 7944 break 7945 } 7946 v.reset(OpAddPtr) 7947 v.AddArg(ptr) 7948 v0 := b.NewValue0(v.Line, OpMul64, config.fe.TypeInt()) 7949 v0.AddArg(idx) 7950 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 7951 v1.AuxInt = t.ElemType().Size() 7952 v0.AddArg(v1) 7953 v.AddArg(v0) 7954 return true 7955 } 7956 return false 7957 } 7958 func rewriteValuegeneric_OpRsh16Ux16(v *Value, config *Config) bool { 7959 b := v.Block 7960 _ = b 7961 // match: (Rsh16Ux16 <t> x (Const16 [c])) 7962 // cond: 7963 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))])) 7964 for { 7965 t := v.Type 7966 x := v.Args[0] 7967 v_1 := v.Args[1] 7968 if v_1.Op != OpConst16 { 7969 break 7970 } 7971 c := v_1.AuxInt 7972 v.reset(OpRsh16Ux64) 7973 v.AddArg(x) 7974 v0 := b.NewValue0(v.Line, OpConst64, t) 7975 v0.AuxInt = int64(uint16(c)) 7976 v.AddArg(v0) 7977 return true 7978 } 7979 // match: (Rsh16Ux16 (Const16 [0]) _) 7980 // cond: 7981 // result: (Const16 [0]) 7982 for { 7983 v_0 := v.Args[0] 7984 if v_0.Op != OpConst16 { 7985 break 7986 } 7987 if v_0.AuxInt != 0 { 7988 break 7989 } 7990 v.reset(OpConst16) 7991 v.AuxInt = 0 7992 return true 7993 } 7994 return false 7995 } 7996 func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool { 7997 b := v.Block 7998 _ = b 7999 // match: (Rsh16Ux32 <t> x (Const32 [c])) 8000 // cond: 8001 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))])) 8002 for { 8003 t := v.Type 8004 x := v.Args[0] 8005 v_1 := v.Args[1] 8006 if v_1.Op != OpConst32 { 8007 break 8008 } 8009 c := v_1.AuxInt 8010 v.reset(OpRsh16Ux64) 8011 v.AddArg(x) 8012 v0 := b.NewValue0(v.Line, OpConst64, t) 8013 v0.AuxInt = int64(uint32(c)) 8014 v.AddArg(v0) 8015 return true 8016 } 8017 // match: (Rsh16Ux32 (Const16 [0]) _) 8018 // cond: 8019 // result: (Const16 [0]) 8020 for { 8021 v_0 := v.Args[0] 8022 if v_0.Op != OpConst16 { 8023 break 8024 } 8025 if v_0.AuxInt != 0 { 8026 break 8027 } 8028 v.reset(OpConst16) 8029 v.AuxInt = 0 8030 return true 8031 } 8032 return false 8033 } 8034 func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { 8035 b := v.Block 8036 _ = b 8037 // match: (Rsh16Ux64 (Const16 [c]) (Const64 [d])) 8038 // cond: 8039 // result: (Const16 [int64(int16(uint16(c) >> uint64(d)))]) 8040 for { 8041 v_0 := v.Args[0] 8042 if v_0.Op != OpConst16 { 8043 break 8044 } 8045 c := v_0.AuxInt 8046 v_1 := v.Args[1] 8047 if v_1.Op != OpConst64 { 8048 break 8049 } 8050 d := v_1.AuxInt 8051 v.reset(OpConst16) 8052 v.AuxInt = int64(int16(uint16(c) >> uint64(d))) 8053 return true 8054 } 8055 // match: (Rsh16Ux64 x (Const64 [0])) 8056 // cond: 8057 // result: x 8058 for { 8059 x := v.Args[0] 8060 v_1 := v.Args[1] 8061 if v_1.Op != OpConst64 { 8062 break 8063 } 8064 if v_1.AuxInt != 0 { 8065 break 8066 } 8067 v.reset(OpCopy) 8068 v.Type = x.Type 8069 v.AddArg(x) 8070 return true 8071 } 8072 // match: (Rsh16Ux64 (Const16 [0]) _) 8073 // cond: 8074 // result: (Const16 [0]) 8075 for { 8076 v_0 := v.Args[0] 8077 if v_0.Op != OpConst16 { 8078 break 8079 } 8080 if v_0.AuxInt != 0 { 8081 break 8082 } 8083 v.reset(OpConst16) 8084 v.AuxInt = 0 8085 return true 8086 } 8087 // match: (Rsh16Ux64 _ (Const64 [c])) 8088 // cond: uint64(c) >= 16 8089 // result: (Const16 [0]) 8090 for { 8091 v_1 := v.Args[1] 8092 if v_1.Op != OpConst64 { 8093 break 8094 } 8095 c := v_1.AuxInt 8096 if !(uint64(c) >= 16) { 8097 break 8098 } 8099 v.reset(OpConst16) 8100 v.AuxInt = 0 8101 return true 8102 } 8103 // match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) 8104 // cond: !uaddOvf(c,d) 8105 // result: (Rsh16Ux64 x (Const64 <t> [c+d])) 8106 for { 8107 t := v.Type 8108 v_0 := v.Args[0] 8109 if v_0.Op != OpRsh16Ux64 { 8110 break 8111 } 8112 x := v_0.Args[0] 8113 v_0_1 := v_0.Args[1] 8114 if v_0_1.Op != OpConst64 { 8115 break 8116 } 8117 c := v_0_1.AuxInt 8118 v_1 := v.Args[1] 8119 if v_1.Op != OpConst64 { 8120 break 8121 } 8122 d := v_1.AuxInt 8123 if !(!uaddOvf(c, d)) { 8124 break 8125 } 8126 v.reset(OpRsh16Ux64) 8127 v.AddArg(x) 8128 v0 := b.NewValue0(v.Line, OpConst64, t) 8129 v0.AuxInt = c + d 8130 v.AddArg(v0) 8131 return true 8132 } 8133 // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 8134 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 8135 // result: (Rsh16Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 8136 for { 8137 v_0 := v.Args[0] 8138 if v_0.Op != OpLsh16x64 { 8139 break 8140 } 8141 v_0_0 := v_0.Args[0] 8142 if v_0_0.Op != OpRsh16Ux64 { 8143 break 8144 } 8145 x := v_0_0.Args[0] 8146 v_0_0_1 := v_0_0.Args[1] 8147 if v_0_0_1.Op != OpConst64 { 8148 break 8149 } 8150 c1 := v_0_0_1.AuxInt 8151 v_0_1 := v_0.Args[1] 8152 if v_0_1.Op != OpConst64 { 8153 break 8154 } 8155 c2 := v_0_1.AuxInt 8156 v_1 := v.Args[1] 8157 if v_1.Op != OpConst64 { 8158 break 8159 } 8160 c3 := v_1.AuxInt 8161 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 8162 break 8163 } 8164 v.reset(OpRsh16Ux64) 8165 v.AddArg(x) 8166 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 8167 v0.AuxInt = c1 - c2 + c3 8168 v.AddArg(v0) 8169 return true 8170 } 8171 return false 8172 } 8173 func rewriteValuegeneric_OpRsh16Ux8(v *Value, config *Config) bool { 8174 b := v.Block 8175 _ = b 8176 // match: (Rsh16Ux8 <t> x (Const8 [c])) 8177 // cond: 8178 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))])) 8179 for { 8180 t := v.Type 8181 x := v.Args[0] 8182 v_1 := v.Args[1] 8183 if v_1.Op != OpConst8 { 8184 break 8185 } 8186 c := v_1.AuxInt 8187 v.reset(OpRsh16Ux64) 8188 v.AddArg(x) 8189 v0 := b.NewValue0(v.Line, OpConst64, t) 8190 v0.AuxInt = int64(uint8(c)) 8191 v.AddArg(v0) 8192 return true 8193 } 8194 // match: (Rsh16Ux8 (Const16 [0]) _) 8195 // cond: 8196 // result: (Const16 [0]) 8197 for { 8198 v_0 := v.Args[0] 8199 if v_0.Op != OpConst16 { 8200 break 8201 } 8202 if v_0.AuxInt != 0 { 8203 break 8204 } 8205 v.reset(OpConst16) 8206 v.AuxInt = 0 8207 return true 8208 } 8209 return false 8210 } 8211 func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool { 8212 b := v.Block 8213 _ = b 8214 // match: (Rsh16x16 <t> x (Const16 [c])) 8215 // cond: 8216 // result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))])) 8217 for { 8218 t := v.Type 8219 x := v.Args[0] 8220 v_1 := v.Args[1] 8221 if v_1.Op != OpConst16 { 8222 break 8223 } 8224 c := v_1.AuxInt 8225 v.reset(OpRsh16x64) 8226 v.AddArg(x) 8227 v0 := b.NewValue0(v.Line, OpConst64, t) 8228 v0.AuxInt = int64(uint16(c)) 8229 v.AddArg(v0) 8230 return true 8231 } 8232 // match: (Rsh16x16 (Const16 [0]) _) 8233 // cond: 8234 // result: (Const16 [0]) 8235 for { 8236 v_0 := v.Args[0] 8237 if v_0.Op != OpConst16 { 8238 break 8239 } 8240 if v_0.AuxInt != 0 { 8241 break 8242 } 8243 v.reset(OpConst16) 8244 v.AuxInt = 0 8245 return true 8246 } 8247 return false 8248 } 8249 func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool { 8250 b := v.Block 8251 _ = b 8252 // match: (Rsh16x32 <t> x (Const32 [c])) 8253 // cond: 8254 // result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))])) 8255 for { 8256 t := v.Type 8257 x := v.Args[0] 8258 v_1 := v.Args[1] 8259 if v_1.Op != OpConst32 { 8260 break 8261 } 8262 c := v_1.AuxInt 8263 v.reset(OpRsh16x64) 8264 v.AddArg(x) 8265 v0 := b.NewValue0(v.Line, OpConst64, t) 8266 v0.AuxInt = int64(uint32(c)) 8267 v.AddArg(v0) 8268 return true 8269 } 8270 // match: (Rsh16x32 (Const16 [0]) _) 8271 // cond: 8272 // result: (Const16 [0]) 8273 for { 8274 v_0 := v.Args[0] 8275 if v_0.Op != OpConst16 { 8276 break 8277 } 8278 if v_0.AuxInt != 0 { 8279 break 8280 } 8281 v.reset(OpConst16) 8282 v.AuxInt = 0 8283 return true 8284 } 8285 return false 8286 } 8287 func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool { 8288 b := v.Block 8289 _ = b 8290 // match: (Rsh16x64 (Const16 [c]) (Const64 [d])) 8291 // cond: 8292 // result: (Const16 [int64(int16(c) >> uint64(d))]) 8293 for { 8294 v_0 := v.Args[0] 8295 if v_0.Op != OpConst16 { 8296 break 8297 } 8298 c := v_0.AuxInt 8299 v_1 := v.Args[1] 8300 if v_1.Op != OpConst64 { 8301 break 8302 } 8303 d := v_1.AuxInt 8304 v.reset(OpConst16) 8305 v.AuxInt = int64(int16(c) >> uint64(d)) 8306 return true 8307 } 8308 // match: (Rsh16x64 x (Const64 [0])) 8309 // cond: 8310 // result: x 8311 for { 8312 x := v.Args[0] 8313 v_1 := v.Args[1] 8314 if v_1.Op != OpConst64 { 8315 break 8316 } 8317 if v_1.AuxInt != 0 { 8318 break 8319 } 8320 v.reset(OpCopy) 8321 v.Type = x.Type 8322 v.AddArg(x) 8323 return true 8324 } 8325 // match: (Rsh16x64 (Const16 [0]) _) 8326 // cond: 8327 // result: (Const16 [0]) 8328 for { 8329 v_0 := v.Args[0] 8330 if v_0.Op != OpConst16 { 8331 break 8332 } 8333 if v_0.AuxInt != 0 { 8334 break 8335 } 8336 v.reset(OpConst16) 8337 v.AuxInt = 0 8338 return true 8339 } 8340 // match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d])) 8341 // cond: !uaddOvf(c,d) 8342 // result: (Rsh16x64 x (Const64 <t> [c+d])) 8343 for { 8344 t := v.Type 8345 v_0 := v.Args[0] 8346 if v_0.Op != OpRsh16x64 { 8347 break 8348 } 8349 x := v_0.Args[0] 8350 v_0_1 := v_0.Args[1] 8351 if v_0_1.Op != OpConst64 { 8352 break 8353 } 8354 c := v_0_1.AuxInt 8355 v_1 := v.Args[1] 8356 if v_1.Op != OpConst64 { 8357 break 8358 } 8359 d := v_1.AuxInt 8360 if !(!uaddOvf(c, d)) { 8361 break 8362 } 8363 v.reset(OpRsh16x64) 8364 v.AddArg(x) 8365 v0 := b.NewValue0(v.Line, OpConst64, t) 8366 v0.AuxInt = c + d 8367 v.AddArg(v0) 8368 return true 8369 } 8370 return false 8371 } 8372 func rewriteValuegeneric_OpRsh16x8(v *Value, config *Config) bool { 8373 b := v.Block 8374 _ = b 8375 // match: (Rsh16x8 <t> x (Const8 [c])) 8376 // cond: 8377 // result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))])) 8378 for { 8379 t := v.Type 8380 x := v.Args[0] 8381 v_1 := v.Args[1] 8382 if v_1.Op != OpConst8 { 8383 break 8384 } 8385 c := v_1.AuxInt 8386 v.reset(OpRsh16x64) 8387 v.AddArg(x) 8388 v0 := b.NewValue0(v.Line, OpConst64, t) 8389 v0.AuxInt = int64(uint8(c)) 8390 v.AddArg(v0) 8391 return true 8392 } 8393 // match: (Rsh16x8 (Const16 [0]) _) 8394 // cond: 8395 // result: (Const16 [0]) 8396 for { 8397 v_0 := v.Args[0] 8398 if v_0.Op != OpConst16 { 8399 break 8400 } 8401 if v_0.AuxInt != 0 { 8402 break 8403 } 8404 v.reset(OpConst16) 8405 v.AuxInt = 0 8406 return true 8407 } 8408 return false 8409 } 8410 func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool { 8411 b := v.Block 8412 _ = b 8413 // match: (Rsh32Ux16 <t> x (Const16 [c])) 8414 // cond: 8415 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))])) 8416 for { 8417 t := v.Type 8418 x := v.Args[0] 8419 v_1 := v.Args[1] 8420 if v_1.Op != OpConst16 { 8421 break 8422 } 8423 c := v_1.AuxInt 8424 v.reset(OpRsh32Ux64) 8425 v.AddArg(x) 8426 v0 := b.NewValue0(v.Line, OpConst64, t) 8427 v0.AuxInt = int64(uint16(c)) 8428 v.AddArg(v0) 8429 return true 8430 } 8431 // match: (Rsh32Ux16 (Const32 [0]) _) 8432 // cond: 8433 // result: (Const32 [0]) 8434 for { 8435 v_0 := v.Args[0] 8436 if v_0.Op != OpConst32 { 8437 break 8438 } 8439 if v_0.AuxInt != 0 { 8440 break 8441 } 8442 v.reset(OpConst32) 8443 v.AuxInt = 0 8444 return true 8445 } 8446 return false 8447 } 8448 func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool { 8449 b := v.Block 8450 _ = b 8451 // match: (Rsh32Ux32 <t> x (Const32 [c])) 8452 // cond: 8453 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))])) 8454 for { 8455 t := v.Type 8456 x := v.Args[0] 8457 v_1 := v.Args[1] 8458 if v_1.Op != OpConst32 { 8459 break 8460 } 8461 c := v_1.AuxInt 8462 v.reset(OpRsh32Ux64) 8463 v.AddArg(x) 8464 v0 := b.NewValue0(v.Line, OpConst64, t) 8465 v0.AuxInt = int64(uint32(c)) 8466 v.AddArg(v0) 8467 return true 8468 } 8469 // match: (Rsh32Ux32 (Const32 [0]) _) 8470 // cond: 8471 // result: (Const32 [0]) 8472 for { 8473 v_0 := v.Args[0] 8474 if v_0.Op != OpConst32 { 8475 break 8476 } 8477 if v_0.AuxInt != 0 { 8478 break 8479 } 8480 v.reset(OpConst32) 8481 v.AuxInt = 0 8482 return true 8483 } 8484 return false 8485 } 8486 func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { 8487 b := v.Block 8488 _ = b 8489 // match: (Rsh32Ux64 (Const32 [c]) (Const64 [d])) 8490 // cond: 8491 // result: (Const32 [int64(int32(uint32(c) >> uint64(d)))]) 8492 for { 8493 v_0 := v.Args[0] 8494 if v_0.Op != OpConst32 { 8495 break 8496 } 8497 c := v_0.AuxInt 8498 v_1 := v.Args[1] 8499 if v_1.Op != OpConst64 { 8500 break 8501 } 8502 d := v_1.AuxInt 8503 v.reset(OpConst32) 8504 v.AuxInt = int64(int32(uint32(c) >> uint64(d))) 8505 return true 8506 } 8507 // match: (Rsh32Ux64 x (Const64 [0])) 8508 // cond: 8509 // result: x 8510 for { 8511 x := v.Args[0] 8512 v_1 := v.Args[1] 8513 if v_1.Op != OpConst64 { 8514 break 8515 } 8516 if v_1.AuxInt != 0 { 8517 break 8518 } 8519 v.reset(OpCopy) 8520 v.Type = x.Type 8521 v.AddArg(x) 8522 return true 8523 } 8524 // match: (Rsh32Ux64 (Const32 [0]) _) 8525 // cond: 8526 // result: (Const32 [0]) 8527 for { 8528 v_0 := v.Args[0] 8529 if v_0.Op != OpConst32 { 8530 break 8531 } 8532 if v_0.AuxInt != 0 { 8533 break 8534 } 8535 v.reset(OpConst32) 8536 v.AuxInt = 0 8537 return true 8538 } 8539 // match: (Rsh32Ux64 _ (Const64 [c])) 8540 // cond: uint64(c) >= 32 8541 // result: (Const32 [0]) 8542 for { 8543 v_1 := v.Args[1] 8544 if v_1.Op != OpConst64 { 8545 break 8546 } 8547 c := v_1.AuxInt 8548 if !(uint64(c) >= 32) { 8549 break 8550 } 8551 v.reset(OpConst32) 8552 v.AuxInt = 0 8553 return true 8554 } 8555 // match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) 8556 // cond: !uaddOvf(c,d) 8557 // result: (Rsh32Ux64 x (Const64 <t> [c+d])) 8558 for { 8559 t := v.Type 8560 v_0 := v.Args[0] 8561 if v_0.Op != OpRsh32Ux64 { 8562 break 8563 } 8564 x := v_0.Args[0] 8565 v_0_1 := v_0.Args[1] 8566 if v_0_1.Op != OpConst64 { 8567 break 8568 } 8569 c := v_0_1.AuxInt 8570 v_1 := v.Args[1] 8571 if v_1.Op != OpConst64 { 8572 break 8573 } 8574 d := v_1.AuxInt 8575 if !(!uaddOvf(c, d)) { 8576 break 8577 } 8578 v.reset(OpRsh32Ux64) 8579 v.AddArg(x) 8580 v0 := b.NewValue0(v.Line, OpConst64, t) 8581 v0.AuxInt = c + d 8582 v.AddArg(v0) 8583 return true 8584 } 8585 // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 8586 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 8587 // result: (Rsh32Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 8588 for { 8589 v_0 := v.Args[0] 8590 if v_0.Op != OpLsh32x64 { 8591 break 8592 } 8593 v_0_0 := v_0.Args[0] 8594 if v_0_0.Op != OpRsh32Ux64 { 8595 break 8596 } 8597 x := v_0_0.Args[0] 8598 v_0_0_1 := v_0_0.Args[1] 8599 if v_0_0_1.Op != OpConst64 { 8600 break 8601 } 8602 c1 := v_0_0_1.AuxInt 8603 v_0_1 := v_0.Args[1] 8604 if v_0_1.Op != OpConst64 { 8605 break 8606 } 8607 c2 := v_0_1.AuxInt 8608 v_1 := v.Args[1] 8609 if v_1.Op != OpConst64 { 8610 break 8611 } 8612 c3 := v_1.AuxInt 8613 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 8614 break 8615 } 8616 v.reset(OpRsh32Ux64) 8617 v.AddArg(x) 8618 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 8619 v0.AuxInt = c1 - c2 + c3 8620 v.AddArg(v0) 8621 return true 8622 } 8623 return false 8624 } 8625 func rewriteValuegeneric_OpRsh32Ux8(v *Value, config *Config) bool { 8626 b := v.Block 8627 _ = b 8628 // match: (Rsh32Ux8 <t> x (Const8 [c])) 8629 // cond: 8630 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))])) 8631 for { 8632 t := v.Type 8633 x := v.Args[0] 8634 v_1 := v.Args[1] 8635 if v_1.Op != OpConst8 { 8636 break 8637 } 8638 c := v_1.AuxInt 8639 v.reset(OpRsh32Ux64) 8640 v.AddArg(x) 8641 v0 := b.NewValue0(v.Line, OpConst64, t) 8642 v0.AuxInt = int64(uint8(c)) 8643 v.AddArg(v0) 8644 return true 8645 } 8646 // match: (Rsh32Ux8 (Const32 [0]) _) 8647 // cond: 8648 // result: (Const32 [0]) 8649 for { 8650 v_0 := v.Args[0] 8651 if v_0.Op != OpConst32 { 8652 break 8653 } 8654 if v_0.AuxInt != 0 { 8655 break 8656 } 8657 v.reset(OpConst32) 8658 v.AuxInt = 0 8659 return true 8660 } 8661 return false 8662 } 8663 func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool { 8664 b := v.Block 8665 _ = b 8666 // match: (Rsh32x16 <t> x (Const16 [c])) 8667 // cond: 8668 // result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))])) 8669 for { 8670 t := v.Type 8671 x := v.Args[0] 8672 v_1 := v.Args[1] 8673 if v_1.Op != OpConst16 { 8674 break 8675 } 8676 c := v_1.AuxInt 8677 v.reset(OpRsh32x64) 8678 v.AddArg(x) 8679 v0 := b.NewValue0(v.Line, OpConst64, t) 8680 v0.AuxInt = int64(uint16(c)) 8681 v.AddArg(v0) 8682 return true 8683 } 8684 // match: (Rsh32x16 (Const32 [0]) _) 8685 // cond: 8686 // result: (Const32 [0]) 8687 for { 8688 v_0 := v.Args[0] 8689 if v_0.Op != OpConst32 { 8690 break 8691 } 8692 if v_0.AuxInt != 0 { 8693 break 8694 } 8695 v.reset(OpConst32) 8696 v.AuxInt = 0 8697 return true 8698 } 8699 return false 8700 } 8701 func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool { 8702 b := v.Block 8703 _ = b 8704 // match: (Rsh32x32 <t> x (Const32 [c])) 8705 // cond: 8706 // result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))])) 8707 for { 8708 t := v.Type 8709 x := v.Args[0] 8710 v_1 := v.Args[1] 8711 if v_1.Op != OpConst32 { 8712 break 8713 } 8714 c := v_1.AuxInt 8715 v.reset(OpRsh32x64) 8716 v.AddArg(x) 8717 v0 := b.NewValue0(v.Line, OpConst64, t) 8718 v0.AuxInt = int64(uint32(c)) 8719 v.AddArg(v0) 8720 return true 8721 } 8722 // match: (Rsh32x32 (Const32 [0]) _) 8723 // cond: 8724 // result: (Const32 [0]) 8725 for { 8726 v_0 := v.Args[0] 8727 if v_0.Op != OpConst32 { 8728 break 8729 } 8730 if v_0.AuxInt != 0 { 8731 break 8732 } 8733 v.reset(OpConst32) 8734 v.AuxInt = 0 8735 return true 8736 } 8737 return false 8738 } 8739 func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool { 8740 b := v.Block 8741 _ = b 8742 // match: (Rsh32x64 (Const32 [c]) (Const64 [d])) 8743 // cond: 8744 // result: (Const32 [int64(int32(c) >> uint64(d))]) 8745 for { 8746 v_0 := v.Args[0] 8747 if v_0.Op != OpConst32 { 8748 break 8749 } 8750 c := v_0.AuxInt 8751 v_1 := v.Args[1] 8752 if v_1.Op != OpConst64 { 8753 break 8754 } 8755 d := v_1.AuxInt 8756 v.reset(OpConst32) 8757 v.AuxInt = int64(int32(c) >> uint64(d)) 8758 return true 8759 } 8760 // match: (Rsh32x64 x (Const64 [0])) 8761 // cond: 8762 // result: x 8763 for { 8764 x := v.Args[0] 8765 v_1 := v.Args[1] 8766 if v_1.Op != OpConst64 { 8767 break 8768 } 8769 if v_1.AuxInt != 0 { 8770 break 8771 } 8772 v.reset(OpCopy) 8773 v.Type = x.Type 8774 v.AddArg(x) 8775 return true 8776 } 8777 // match: (Rsh32x64 (Const32 [0]) _) 8778 // cond: 8779 // result: (Const32 [0]) 8780 for { 8781 v_0 := v.Args[0] 8782 if v_0.Op != OpConst32 { 8783 break 8784 } 8785 if v_0.AuxInt != 0 { 8786 break 8787 } 8788 v.reset(OpConst32) 8789 v.AuxInt = 0 8790 return true 8791 } 8792 // match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d])) 8793 // cond: !uaddOvf(c,d) 8794 // result: (Rsh32x64 x (Const64 <t> [c+d])) 8795 for { 8796 t := v.Type 8797 v_0 := v.Args[0] 8798 if v_0.Op != OpRsh32x64 { 8799 break 8800 } 8801 x := v_0.Args[0] 8802 v_0_1 := v_0.Args[1] 8803 if v_0_1.Op != OpConst64 { 8804 break 8805 } 8806 c := v_0_1.AuxInt 8807 v_1 := v.Args[1] 8808 if v_1.Op != OpConst64 { 8809 break 8810 } 8811 d := v_1.AuxInt 8812 if !(!uaddOvf(c, d)) { 8813 break 8814 } 8815 v.reset(OpRsh32x64) 8816 v.AddArg(x) 8817 v0 := b.NewValue0(v.Line, OpConst64, t) 8818 v0.AuxInt = c + d 8819 v.AddArg(v0) 8820 return true 8821 } 8822 return false 8823 } 8824 func rewriteValuegeneric_OpRsh32x8(v *Value, config *Config) bool { 8825 b := v.Block 8826 _ = b 8827 // match: (Rsh32x8 <t> x (Const8 [c])) 8828 // cond: 8829 // result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))])) 8830 for { 8831 t := v.Type 8832 x := v.Args[0] 8833 v_1 := v.Args[1] 8834 if v_1.Op != OpConst8 { 8835 break 8836 } 8837 c := v_1.AuxInt 8838 v.reset(OpRsh32x64) 8839 v.AddArg(x) 8840 v0 := b.NewValue0(v.Line, OpConst64, t) 8841 v0.AuxInt = int64(uint8(c)) 8842 v.AddArg(v0) 8843 return true 8844 } 8845 // match: (Rsh32x8 (Const32 [0]) _) 8846 // cond: 8847 // result: (Const32 [0]) 8848 for { 8849 v_0 := v.Args[0] 8850 if v_0.Op != OpConst32 { 8851 break 8852 } 8853 if v_0.AuxInt != 0 { 8854 break 8855 } 8856 v.reset(OpConst32) 8857 v.AuxInt = 0 8858 return true 8859 } 8860 return false 8861 } 8862 func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool { 8863 b := v.Block 8864 _ = b 8865 // match: (Rsh64Ux16 <t> x (Const16 [c])) 8866 // cond: 8867 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))])) 8868 for { 8869 t := v.Type 8870 x := v.Args[0] 8871 v_1 := v.Args[1] 8872 if v_1.Op != OpConst16 { 8873 break 8874 } 8875 c := v_1.AuxInt 8876 v.reset(OpRsh64Ux64) 8877 v.AddArg(x) 8878 v0 := b.NewValue0(v.Line, OpConst64, t) 8879 v0.AuxInt = int64(uint16(c)) 8880 v.AddArg(v0) 8881 return true 8882 } 8883 // match: (Rsh64Ux16 (Const64 [0]) _) 8884 // cond: 8885 // result: (Const64 [0]) 8886 for { 8887 v_0 := v.Args[0] 8888 if v_0.Op != OpConst64 { 8889 break 8890 } 8891 if v_0.AuxInt != 0 { 8892 break 8893 } 8894 v.reset(OpConst64) 8895 v.AuxInt = 0 8896 return true 8897 } 8898 return false 8899 } 8900 func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool { 8901 b := v.Block 8902 _ = b 8903 // match: (Rsh64Ux32 <t> x (Const32 [c])) 8904 // cond: 8905 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))])) 8906 for { 8907 t := v.Type 8908 x := v.Args[0] 8909 v_1 := v.Args[1] 8910 if v_1.Op != OpConst32 { 8911 break 8912 } 8913 c := v_1.AuxInt 8914 v.reset(OpRsh64Ux64) 8915 v.AddArg(x) 8916 v0 := b.NewValue0(v.Line, OpConst64, t) 8917 v0.AuxInt = int64(uint32(c)) 8918 v.AddArg(v0) 8919 return true 8920 } 8921 // match: (Rsh64Ux32 (Const64 [0]) _) 8922 // cond: 8923 // result: (Const64 [0]) 8924 for { 8925 v_0 := v.Args[0] 8926 if v_0.Op != OpConst64 { 8927 break 8928 } 8929 if v_0.AuxInt != 0 { 8930 break 8931 } 8932 v.reset(OpConst64) 8933 v.AuxInt = 0 8934 return true 8935 } 8936 return false 8937 } 8938 func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool { 8939 b := v.Block 8940 _ = b 8941 // match: (Rsh64Ux64 (Const64 [c]) (Const64 [d])) 8942 // cond: 8943 // result: (Const64 [int64(uint64(c) >> uint64(d))]) 8944 for { 8945 v_0 := v.Args[0] 8946 if v_0.Op != OpConst64 { 8947 break 8948 } 8949 c := v_0.AuxInt 8950 v_1 := v.Args[1] 8951 if v_1.Op != OpConst64 { 8952 break 8953 } 8954 d := v_1.AuxInt 8955 v.reset(OpConst64) 8956 v.AuxInt = int64(uint64(c) >> uint64(d)) 8957 return true 8958 } 8959 // match: (Rsh64Ux64 x (Const64 [0])) 8960 // cond: 8961 // result: x 8962 for { 8963 x := v.Args[0] 8964 v_1 := v.Args[1] 8965 if v_1.Op != OpConst64 { 8966 break 8967 } 8968 if v_1.AuxInt != 0 { 8969 break 8970 } 8971 v.reset(OpCopy) 8972 v.Type = x.Type 8973 v.AddArg(x) 8974 return true 8975 } 8976 // match: (Rsh64Ux64 (Const64 [0]) _) 8977 // cond: 8978 // result: (Const64 [0]) 8979 for { 8980 v_0 := v.Args[0] 8981 if v_0.Op != OpConst64 { 8982 break 8983 } 8984 if v_0.AuxInt != 0 { 8985 break 8986 } 8987 v.reset(OpConst64) 8988 v.AuxInt = 0 8989 return true 8990 } 8991 // match: (Rsh64Ux64 _ (Const64 [c])) 8992 // cond: uint64(c) >= 64 8993 // result: (Const64 [0]) 8994 for { 8995 v_1 := v.Args[1] 8996 if v_1.Op != OpConst64 { 8997 break 8998 } 8999 c := v_1.AuxInt 9000 if !(uint64(c) >= 64) { 9001 break 9002 } 9003 v.reset(OpConst64) 9004 v.AuxInt = 0 9005 return true 9006 } 9007 // match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) 9008 // cond: !uaddOvf(c,d) 9009 // result: (Rsh64Ux64 x (Const64 <t> [c+d])) 9010 for { 9011 t := v.Type 9012 v_0 := v.Args[0] 9013 if v_0.Op != OpRsh64Ux64 { 9014 break 9015 } 9016 x := v_0.Args[0] 9017 v_0_1 := v_0.Args[1] 9018 if v_0_1.Op != OpConst64 { 9019 break 9020 } 9021 c := v_0_1.AuxInt 9022 v_1 := v.Args[1] 9023 if v_1.Op != OpConst64 { 9024 break 9025 } 9026 d := v_1.AuxInt 9027 if !(!uaddOvf(c, d)) { 9028 break 9029 } 9030 v.reset(OpRsh64Ux64) 9031 v.AddArg(x) 9032 v0 := b.NewValue0(v.Line, OpConst64, t) 9033 v0.AuxInt = c + d 9034 v.AddArg(v0) 9035 return true 9036 } 9037 // match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 9038 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 9039 // result: (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 9040 for { 9041 v_0 := v.Args[0] 9042 if v_0.Op != OpLsh64x64 { 9043 break 9044 } 9045 v_0_0 := v_0.Args[0] 9046 if v_0_0.Op != OpRsh64Ux64 { 9047 break 9048 } 9049 x := v_0_0.Args[0] 9050 v_0_0_1 := v_0_0.Args[1] 9051 if v_0_0_1.Op != OpConst64 { 9052 break 9053 } 9054 c1 := v_0_0_1.AuxInt 9055 v_0_1 := v_0.Args[1] 9056 if v_0_1.Op != OpConst64 { 9057 break 9058 } 9059 c2 := v_0_1.AuxInt 9060 v_1 := v.Args[1] 9061 if v_1.Op != OpConst64 { 9062 break 9063 } 9064 c3 := v_1.AuxInt 9065 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 9066 break 9067 } 9068 v.reset(OpRsh64Ux64) 9069 v.AddArg(x) 9070 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 9071 v0.AuxInt = c1 - c2 + c3 9072 v.AddArg(v0) 9073 return true 9074 } 9075 return false 9076 } 9077 func rewriteValuegeneric_OpRsh64Ux8(v *Value, config *Config) bool { 9078 b := v.Block 9079 _ = b 9080 // match: (Rsh64Ux8 <t> x (Const8 [c])) 9081 // cond: 9082 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))])) 9083 for { 9084 t := v.Type 9085 x := v.Args[0] 9086 v_1 := v.Args[1] 9087 if v_1.Op != OpConst8 { 9088 break 9089 } 9090 c := v_1.AuxInt 9091 v.reset(OpRsh64Ux64) 9092 v.AddArg(x) 9093 v0 := b.NewValue0(v.Line, OpConst64, t) 9094 v0.AuxInt = int64(uint8(c)) 9095 v.AddArg(v0) 9096 return true 9097 } 9098 // match: (Rsh64Ux8 (Const64 [0]) _) 9099 // cond: 9100 // result: (Const64 [0]) 9101 for { 9102 v_0 := v.Args[0] 9103 if v_0.Op != OpConst64 { 9104 break 9105 } 9106 if v_0.AuxInt != 0 { 9107 break 9108 } 9109 v.reset(OpConst64) 9110 v.AuxInt = 0 9111 return true 9112 } 9113 return false 9114 } 9115 func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool { 9116 b := v.Block 9117 _ = b 9118 // match: (Rsh64x16 <t> x (Const16 [c])) 9119 // cond: 9120 // result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))])) 9121 for { 9122 t := v.Type 9123 x := v.Args[0] 9124 v_1 := v.Args[1] 9125 if v_1.Op != OpConst16 { 9126 break 9127 } 9128 c := v_1.AuxInt 9129 v.reset(OpRsh64x64) 9130 v.AddArg(x) 9131 v0 := b.NewValue0(v.Line, OpConst64, t) 9132 v0.AuxInt = int64(uint16(c)) 9133 v.AddArg(v0) 9134 return true 9135 } 9136 // match: (Rsh64x16 (Const64 [0]) _) 9137 // cond: 9138 // result: (Const64 [0]) 9139 for { 9140 v_0 := v.Args[0] 9141 if v_0.Op != OpConst64 { 9142 break 9143 } 9144 if v_0.AuxInt != 0 { 9145 break 9146 } 9147 v.reset(OpConst64) 9148 v.AuxInt = 0 9149 return true 9150 } 9151 return false 9152 } 9153 func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool { 9154 b := v.Block 9155 _ = b 9156 // match: (Rsh64x32 <t> x (Const32 [c])) 9157 // cond: 9158 // result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))])) 9159 for { 9160 t := v.Type 9161 x := v.Args[0] 9162 v_1 := v.Args[1] 9163 if v_1.Op != OpConst32 { 9164 break 9165 } 9166 c := v_1.AuxInt 9167 v.reset(OpRsh64x64) 9168 v.AddArg(x) 9169 v0 := b.NewValue0(v.Line, OpConst64, t) 9170 v0.AuxInt = int64(uint32(c)) 9171 v.AddArg(v0) 9172 return true 9173 } 9174 // match: (Rsh64x32 (Const64 [0]) _) 9175 // cond: 9176 // result: (Const64 [0]) 9177 for { 9178 v_0 := v.Args[0] 9179 if v_0.Op != OpConst64 { 9180 break 9181 } 9182 if v_0.AuxInt != 0 { 9183 break 9184 } 9185 v.reset(OpConst64) 9186 v.AuxInt = 0 9187 return true 9188 } 9189 return false 9190 } 9191 func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool { 9192 b := v.Block 9193 _ = b 9194 // match: (Rsh64x64 (Const64 [c]) (Const64 [d])) 9195 // cond: 9196 // result: (Const64 [c >> uint64(d)]) 9197 for { 9198 v_0 := v.Args[0] 9199 if v_0.Op != OpConst64 { 9200 break 9201 } 9202 c := v_0.AuxInt 9203 v_1 := v.Args[1] 9204 if v_1.Op != OpConst64 { 9205 break 9206 } 9207 d := v_1.AuxInt 9208 v.reset(OpConst64) 9209 v.AuxInt = c >> uint64(d) 9210 return true 9211 } 9212 // match: (Rsh64x64 x (Const64 [0])) 9213 // cond: 9214 // result: x 9215 for { 9216 x := v.Args[0] 9217 v_1 := v.Args[1] 9218 if v_1.Op != OpConst64 { 9219 break 9220 } 9221 if v_1.AuxInt != 0 { 9222 break 9223 } 9224 v.reset(OpCopy) 9225 v.Type = x.Type 9226 v.AddArg(x) 9227 return true 9228 } 9229 // match: (Rsh64x64 (Const64 [0]) _) 9230 // cond: 9231 // result: (Const64 [0]) 9232 for { 9233 v_0 := v.Args[0] 9234 if v_0.Op != OpConst64 { 9235 break 9236 } 9237 if v_0.AuxInt != 0 { 9238 break 9239 } 9240 v.reset(OpConst64) 9241 v.AuxInt = 0 9242 return true 9243 } 9244 // match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d])) 9245 // cond: !uaddOvf(c,d) 9246 // result: (Rsh64x64 x (Const64 <t> [c+d])) 9247 for { 9248 t := v.Type 9249 v_0 := v.Args[0] 9250 if v_0.Op != OpRsh64x64 { 9251 break 9252 } 9253 x := v_0.Args[0] 9254 v_0_1 := v_0.Args[1] 9255 if v_0_1.Op != OpConst64 { 9256 break 9257 } 9258 c := v_0_1.AuxInt 9259 v_1 := v.Args[1] 9260 if v_1.Op != OpConst64 { 9261 break 9262 } 9263 d := v_1.AuxInt 9264 if !(!uaddOvf(c, d)) { 9265 break 9266 } 9267 v.reset(OpRsh64x64) 9268 v.AddArg(x) 9269 v0 := b.NewValue0(v.Line, OpConst64, t) 9270 v0.AuxInt = c + d 9271 v.AddArg(v0) 9272 return true 9273 } 9274 return false 9275 } 9276 func rewriteValuegeneric_OpRsh64x8(v *Value, config *Config) bool { 9277 b := v.Block 9278 _ = b 9279 // match: (Rsh64x8 <t> x (Const8 [c])) 9280 // cond: 9281 // result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))])) 9282 for { 9283 t := v.Type 9284 x := v.Args[0] 9285 v_1 := v.Args[1] 9286 if v_1.Op != OpConst8 { 9287 break 9288 } 9289 c := v_1.AuxInt 9290 v.reset(OpRsh64x64) 9291 v.AddArg(x) 9292 v0 := b.NewValue0(v.Line, OpConst64, t) 9293 v0.AuxInt = int64(uint8(c)) 9294 v.AddArg(v0) 9295 return true 9296 } 9297 // match: (Rsh64x8 (Const64 [0]) _) 9298 // cond: 9299 // result: (Const64 [0]) 9300 for { 9301 v_0 := v.Args[0] 9302 if v_0.Op != OpConst64 { 9303 break 9304 } 9305 if v_0.AuxInt != 0 { 9306 break 9307 } 9308 v.reset(OpConst64) 9309 v.AuxInt = 0 9310 return true 9311 } 9312 return false 9313 } 9314 func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool { 9315 b := v.Block 9316 _ = b 9317 // match: (Rsh8Ux16 <t> x (Const16 [c])) 9318 // cond: 9319 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))])) 9320 for { 9321 t := v.Type 9322 x := v.Args[0] 9323 v_1 := v.Args[1] 9324 if v_1.Op != OpConst16 { 9325 break 9326 } 9327 c := v_1.AuxInt 9328 v.reset(OpRsh8Ux64) 9329 v.AddArg(x) 9330 v0 := b.NewValue0(v.Line, OpConst64, t) 9331 v0.AuxInt = int64(uint16(c)) 9332 v.AddArg(v0) 9333 return true 9334 } 9335 // match: (Rsh8Ux16 (Const8 [0]) _) 9336 // cond: 9337 // result: (Const8 [0]) 9338 for { 9339 v_0 := v.Args[0] 9340 if v_0.Op != OpConst8 { 9341 break 9342 } 9343 if v_0.AuxInt != 0 { 9344 break 9345 } 9346 v.reset(OpConst8) 9347 v.AuxInt = 0 9348 return true 9349 } 9350 return false 9351 } 9352 func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool { 9353 b := v.Block 9354 _ = b 9355 // match: (Rsh8Ux32 <t> x (Const32 [c])) 9356 // cond: 9357 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))])) 9358 for { 9359 t := v.Type 9360 x := v.Args[0] 9361 v_1 := v.Args[1] 9362 if v_1.Op != OpConst32 { 9363 break 9364 } 9365 c := v_1.AuxInt 9366 v.reset(OpRsh8Ux64) 9367 v.AddArg(x) 9368 v0 := b.NewValue0(v.Line, OpConst64, t) 9369 v0.AuxInt = int64(uint32(c)) 9370 v.AddArg(v0) 9371 return true 9372 } 9373 // match: (Rsh8Ux32 (Const8 [0]) _) 9374 // cond: 9375 // result: (Const8 [0]) 9376 for { 9377 v_0 := v.Args[0] 9378 if v_0.Op != OpConst8 { 9379 break 9380 } 9381 if v_0.AuxInt != 0 { 9382 break 9383 } 9384 v.reset(OpConst8) 9385 v.AuxInt = 0 9386 return true 9387 } 9388 return false 9389 } 9390 func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { 9391 b := v.Block 9392 _ = b 9393 // match: (Rsh8Ux64 (Const8 [c]) (Const64 [d])) 9394 // cond: 9395 // result: (Const8 [int64(int8(uint8(c) >> uint64(d)))]) 9396 for { 9397 v_0 := v.Args[0] 9398 if v_0.Op != OpConst8 { 9399 break 9400 } 9401 c := v_0.AuxInt 9402 v_1 := v.Args[1] 9403 if v_1.Op != OpConst64 { 9404 break 9405 } 9406 d := v_1.AuxInt 9407 v.reset(OpConst8) 9408 v.AuxInt = int64(int8(uint8(c) >> uint64(d))) 9409 return true 9410 } 9411 // match: (Rsh8Ux64 x (Const64 [0])) 9412 // cond: 9413 // result: x 9414 for { 9415 x := v.Args[0] 9416 v_1 := v.Args[1] 9417 if v_1.Op != OpConst64 { 9418 break 9419 } 9420 if v_1.AuxInt != 0 { 9421 break 9422 } 9423 v.reset(OpCopy) 9424 v.Type = x.Type 9425 v.AddArg(x) 9426 return true 9427 } 9428 // match: (Rsh8Ux64 (Const8 [0]) _) 9429 // cond: 9430 // result: (Const8 [0]) 9431 for { 9432 v_0 := v.Args[0] 9433 if v_0.Op != OpConst8 { 9434 break 9435 } 9436 if v_0.AuxInt != 0 { 9437 break 9438 } 9439 v.reset(OpConst8) 9440 v.AuxInt = 0 9441 return true 9442 } 9443 // match: (Rsh8Ux64 _ (Const64 [c])) 9444 // cond: uint64(c) >= 8 9445 // result: (Const8 [0]) 9446 for { 9447 v_1 := v.Args[1] 9448 if v_1.Op != OpConst64 { 9449 break 9450 } 9451 c := v_1.AuxInt 9452 if !(uint64(c) >= 8) { 9453 break 9454 } 9455 v.reset(OpConst8) 9456 v.AuxInt = 0 9457 return true 9458 } 9459 // match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) 9460 // cond: !uaddOvf(c,d) 9461 // result: (Rsh8Ux64 x (Const64 <t> [c+d])) 9462 for { 9463 t := v.Type 9464 v_0 := v.Args[0] 9465 if v_0.Op != OpRsh8Ux64 { 9466 break 9467 } 9468 x := v_0.Args[0] 9469 v_0_1 := v_0.Args[1] 9470 if v_0_1.Op != OpConst64 { 9471 break 9472 } 9473 c := v_0_1.AuxInt 9474 v_1 := v.Args[1] 9475 if v_1.Op != OpConst64 { 9476 break 9477 } 9478 d := v_1.AuxInt 9479 if !(!uaddOvf(c, d)) { 9480 break 9481 } 9482 v.reset(OpRsh8Ux64) 9483 v.AddArg(x) 9484 v0 := b.NewValue0(v.Line, OpConst64, t) 9485 v0.AuxInt = c + d 9486 v.AddArg(v0) 9487 return true 9488 } 9489 // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 9490 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 9491 // result: (Rsh8Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 9492 for { 9493 v_0 := v.Args[0] 9494 if v_0.Op != OpLsh8x64 { 9495 break 9496 } 9497 v_0_0 := v_0.Args[0] 9498 if v_0_0.Op != OpRsh8Ux64 { 9499 break 9500 } 9501 x := v_0_0.Args[0] 9502 v_0_0_1 := v_0_0.Args[1] 9503 if v_0_0_1.Op != OpConst64 { 9504 break 9505 } 9506 c1 := v_0_0_1.AuxInt 9507 v_0_1 := v_0.Args[1] 9508 if v_0_1.Op != OpConst64 { 9509 break 9510 } 9511 c2 := v_0_1.AuxInt 9512 v_1 := v.Args[1] 9513 if v_1.Op != OpConst64 { 9514 break 9515 } 9516 c3 := v_1.AuxInt 9517 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 9518 break 9519 } 9520 v.reset(OpRsh8Ux64) 9521 v.AddArg(x) 9522 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 9523 v0.AuxInt = c1 - c2 + c3 9524 v.AddArg(v0) 9525 return true 9526 } 9527 return false 9528 } 9529 func rewriteValuegeneric_OpRsh8Ux8(v *Value, config *Config) bool { 9530 b := v.Block 9531 _ = b 9532 // match: (Rsh8Ux8 <t> x (Const8 [c])) 9533 // cond: 9534 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))])) 9535 for { 9536 t := v.Type 9537 x := v.Args[0] 9538 v_1 := v.Args[1] 9539 if v_1.Op != OpConst8 { 9540 break 9541 } 9542 c := v_1.AuxInt 9543 v.reset(OpRsh8Ux64) 9544 v.AddArg(x) 9545 v0 := b.NewValue0(v.Line, OpConst64, t) 9546 v0.AuxInt = int64(uint8(c)) 9547 v.AddArg(v0) 9548 return true 9549 } 9550 // match: (Rsh8Ux8 (Const8 [0]) _) 9551 // cond: 9552 // result: (Const8 [0]) 9553 for { 9554 v_0 := v.Args[0] 9555 if v_0.Op != OpConst8 { 9556 break 9557 } 9558 if v_0.AuxInt != 0 { 9559 break 9560 } 9561 v.reset(OpConst8) 9562 v.AuxInt = 0 9563 return true 9564 } 9565 return false 9566 } 9567 func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool { 9568 b := v.Block 9569 _ = b 9570 // match: (Rsh8x16 <t> x (Const16 [c])) 9571 // cond: 9572 // result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))])) 9573 for { 9574 t := v.Type 9575 x := v.Args[0] 9576 v_1 := v.Args[1] 9577 if v_1.Op != OpConst16 { 9578 break 9579 } 9580 c := v_1.AuxInt 9581 v.reset(OpRsh8x64) 9582 v.AddArg(x) 9583 v0 := b.NewValue0(v.Line, OpConst64, t) 9584 v0.AuxInt = int64(uint16(c)) 9585 v.AddArg(v0) 9586 return true 9587 } 9588 // match: (Rsh8x16 (Const8 [0]) _) 9589 // cond: 9590 // result: (Const8 [0]) 9591 for { 9592 v_0 := v.Args[0] 9593 if v_0.Op != OpConst8 { 9594 break 9595 } 9596 if v_0.AuxInt != 0 { 9597 break 9598 } 9599 v.reset(OpConst8) 9600 v.AuxInt = 0 9601 return true 9602 } 9603 return false 9604 } 9605 func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool { 9606 b := v.Block 9607 _ = b 9608 // match: (Rsh8x32 <t> x (Const32 [c])) 9609 // cond: 9610 // result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))])) 9611 for { 9612 t := v.Type 9613 x := v.Args[0] 9614 v_1 := v.Args[1] 9615 if v_1.Op != OpConst32 { 9616 break 9617 } 9618 c := v_1.AuxInt 9619 v.reset(OpRsh8x64) 9620 v.AddArg(x) 9621 v0 := b.NewValue0(v.Line, OpConst64, t) 9622 v0.AuxInt = int64(uint32(c)) 9623 v.AddArg(v0) 9624 return true 9625 } 9626 // match: (Rsh8x32 (Const8 [0]) _) 9627 // cond: 9628 // result: (Const8 [0]) 9629 for { 9630 v_0 := v.Args[0] 9631 if v_0.Op != OpConst8 { 9632 break 9633 } 9634 if v_0.AuxInt != 0 { 9635 break 9636 } 9637 v.reset(OpConst8) 9638 v.AuxInt = 0 9639 return true 9640 } 9641 return false 9642 } 9643 func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool { 9644 b := v.Block 9645 _ = b 9646 // match: (Rsh8x64 (Const8 [c]) (Const64 [d])) 9647 // cond: 9648 // result: (Const8 [int64(int8(c) >> uint64(d))]) 9649 for { 9650 v_0 := v.Args[0] 9651 if v_0.Op != OpConst8 { 9652 break 9653 } 9654 c := v_0.AuxInt 9655 v_1 := v.Args[1] 9656 if v_1.Op != OpConst64 { 9657 break 9658 } 9659 d := v_1.AuxInt 9660 v.reset(OpConst8) 9661 v.AuxInt = int64(int8(c) >> uint64(d)) 9662 return true 9663 } 9664 // match: (Rsh8x64 x (Const64 [0])) 9665 // cond: 9666 // result: x 9667 for { 9668 x := v.Args[0] 9669 v_1 := v.Args[1] 9670 if v_1.Op != OpConst64 { 9671 break 9672 } 9673 if v_1.AuxInt != 0 { 9674 break 9675 } 9676 v.reset(OpCopy) 9677 v.Type = x.Type 9678 v.AddArg(x) 9679 return true 9680 } 9681 // match: (Rsh8x64 (Const8 [0]) _) 9682 // cond: 9683 // result: (Const8 [0]) 9684 for { 9685 v_0 := v.Args[0] 9686 if v_0.Op != OpConst8 { 9687 break 9688 } 9689 if v_0.AuxInt != 0 { 9690 break 9691 } 9692 v.reset(OpConst8) 9693 v.AuxInt = 0 9694 return true 9695 } 9696 // match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d])) 9697 // cond: !uaddOvf(c,d) 9698 // result: (Rsh8x64 x (Const64 <t> [c+d])) 9699 for { 9700 t := v.Type 9701 v_0 := v.Args[0] 9702 if v_0.Op != OpRsh8x64 { 9703 break 9704 } 9705 x := v_0.Args[0] 9706 v_0_1 := v_0.Args[1] 9707 if v_0_1.Op != OpConst64 { 9708 break 9709 } 9710 c := v_0_1.AuxInt 9711 v_1 := v.Args[1] 9712 if v_1.Op != OpConst64 { 9713 break 9714 } 9715 d := v_1.AuxInt 9716 if !(!uaddOvf(c, d)) { 9717 break 9718 } 9719 v.reset(OpRsh8x64) 9720 v.AddArg(x) 9721 v0 := b.NewValue0(v.Line, OpConst64, t) 9722 v0.AuxInt = c + d 9723 v.AddArg(v0) 9724 return true 9725 } 9726 return false 9727 } 9728 func rewriteValuegeneric_OpRsh8x8(v *Value, config *Config) bool { 9729 b := v.Block 9730 _ = b 9731 // match: (Rsh8x8 <t> x (Const8 [c])) 9732 // cond: 9733 // result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))])) 9734 for { 9735 t := v.Type 9736 x := v.Args[0] 9737 v_1 := v.Args[1] 9738 if v_1.Op != OpConst8 { 9739 break 9740 } 9741 c := v_1.AuxInt 9742 v.reset(OpRsh8x64) 9743 v.AddArg(x) 9744 v0 := b.NewValue0(v.Line, OpConst64, t) 9745 v0.AuxInt = int64(uint8(c)) 9746 v.AddArg(v0) 9747 return true 9748 } 9749 // match: (Rsh8x8 (Const8 [0]) _) 9750 // cond: 9751 // result: (Const8 [0]) 9752 for { 9753 v_0 := v.Args[0] 9754 if v_0.Op != OpConst8 { 9755 break 9756 } 9757 if v_0.AuxInt != 0 { 9758 break 9759 } 9760 v.reset(OpConst8) 9761 v.AuxInt = 0 9762 return true 9763 } 9764 return false 9765 } 9766 func rewriteValuegeneric_OpSignExt16to32(v *Value, config *Config) bool { 9767 b := v.Block 9768 _ = b 9769 // match: (SignExt16to32 (Trunc32to16 x:(Rsh32x64 _ (Const64 [s])))) 9770 // cond: s >= 16 9771 // result: x 9772 for { 9773 v_0 := v.Args[0] 9774 if v_0.Op != OpTrunc32to16 { 9775 break 9776 } 9777 x := v_0.Args[0] 9778 if x.Op != OpRsh32x64 { 9779 break 9780 } 9781 x_1 := x.Args[1] 9782 if x_1.Op != OpConst64 { 9783 break 9784 } 9785 s := x_1.AuxInt 9786 if !(s >= 16) { 9787 break 9788 } 9789 v.reset(OpCopy) 9790 v.Type = x.Type 9791 v.AddArg(x) 9792 return true 9793 } 9794 return false 9795 } 9796 func rewriteValuegeneric_OpSignExt16to64(v *Value, config *Config) bool { 9797 b := v.Block 9798 _ = b 9799 // match: (SignExt16to64 (Trunc64to16 x:(Rsh64x64 _ (Const64 [s])))) 9800 // cond: s >= 48 9801 // result: x 9802 for { 9803 v_0 := v.Args[0] 9804 if v_0.Op != OpTrunc64to16 { 9805 break 9806 } 9807 x := v_0.Args[0] 9808 if x.Op != OpRsh64x64 { 9809 break 9810 } 9811 x_1 := x.Args[1] 9812 if x_1.Op != OpConst64 { 9813 break 9814 } 9815 s := x_1.AuxInt 9816 if !(s >= 48) { 9817 break 9818 } 9819 v.reset(OpCopy) 9820 v.Type = x.Type 9821 v.AddArg(x) 9822 return true 9823 } 9824 return false 9825 } 9826 func rewriteValuegeneric_OpSignExt32to64(v *Value, config *Config) bool { 9827 b := v.Block 9828 _ = b 9829 // match: (SignExt32to64 (Trunc64to32 x:(Rsh64x64 _ (Const64 [s])))) 9830 // cond: s >= 32 9831 // result: x 9832 for { 9833 v_0 := v.Args[0] 9834 if v_0.Op != OpTrunc64to32 { 9835 break 9836 } 9837 x := v_0.Args[0] 9838 if x.Op != OpRsh64x64 { 9839 break 9840 } 9841 x_1 := x.Args[1] 9842 if x_1.Op != OpConst64 { 9843 break 9844 } 9845 s := x_1.AuxInt 9846 if !(s >= 32) { 9847 break 9848 } 9849 v.reset(OpCopy) 9850 v.Type = x.Type 9851 v.AddArg(x) 9852 return true 9853 } 9854 return false 9855 } 9856 func rewriteValuegeneric_OpSignExt8to16(v *Value, config *Config) bool { 9857 b := v.Block 9858 _ = b 9859 // match: (SignExt8to16 (Trunc16to8 x:(Rsh16x64 _ (Const64 [s])))) 9860 // cond: s >= 8 9861 // result: x 9862 for { 9863 v_0 := v.Args[0] 9864 if v_0.Op != OpTrunc16to8 { 9865 break 9866 } 9867 x := v_0.Args[0] 9868 if x.Op != OpRsh16x64 { 9869 break 9870 } 9871 x_1 := x.Args[1] 9872 if x_1.Op != OpConst64 { 9873 break 9874 } 9875 s := x_1.AuxInt 9876 if !(s >= 8) { 9877 break 9878 } 9879 v.reset(OpCopy) 9880 v.Type = x.Type 9881 v.AddArg(x) 9882 return true 9883 } 9884 return false 9885 } 9886 func rewriteValuegeneric_OpSignExt8to32(v *Value, config *Config) bool { 9887 b := v.Block 9888 _ = b 9889 // match: (SignExt8to32 (Trunc32to8 x:(Rsh32x64 _ (Const64 [s])))) 9890 // cond: s >= 24 9891 // result: x 9892 for { 9893 v_0 := v.Args[0] 9894 if v_0.Op != OpTrunc32to8 { 9895 break 9896 } 9897 x := v_0.Args[0] 9898 if x.Op != OpRsh32x64 { 9899 break 9900 } 9901 x_1 := x.Args[1] 9902 if x_1.Op != OpConst64 { 9903 break 9904 } 9905 s := x_1.AuxInt 9906 if !(s >= 24) { 9907 break 9908 } 9909 v.reset(OpCopy) 9910 v.Type = x.Type 9911 v.AddArg(x) 9912 return true 9913 } 9914 return false 9915 } 9916 func rewriteValuegeneric_OpSignExt8to64(v *Value, config *Config) bool { 9917 b := v.Block 9918 _ = b 9919 // match: (SignExt8to64 (Trunc64to8 x:(Rsh64x64 _ (Const64 [s])))) 9920 // cond: s >= 56 9921 // result: x 9922 for { 9923 v_0 := v.Args[0] 9924 if v_0.Op != OpTrunc64to8 { 9925 break 9926 } 9927 x := v_0.Args[0] 9928 if x.Op != OpRsh64x64 { 9929 break 9930 } 9931 x_1 := x.Args[1] 9932 if x_1.Op != OpConst64 { 9933 break 9934 } 9935 s := x_1.AuxInt 9936 if !(s >= 56) { 9937 break 9938 } 9939 v.reset(OpCopy) 9940 v.Type = x.Type 9941 v.AddArg(x) 9942 return true 9943 } 9944 return false 9945 } 9946 func rewriteValuegeneric_OpSliceCap(v *Value, config *Config) bool { 9947 b := v.Block 9948 _ = b 9949 // match: (SliceCap (SliceMake _ _ (Const64 <t> [c]))) 9950 // cond: 9951 // result: (Const64 <t> [c]) 9952 for { 9953 v_0 := v.Args[0] 9954 if v_0.Op != OpSliceMake { 9955 break 9956 } 9957 v_0_2 := v_0.Args[2] 9958 if v_0_2.Op != OpConst64 { 9959 break 9960 } 9961 t := v_0_2.Type 9962 c := v_0_2.AuxInt 9963 v.reset(OpConst64) 9964 v.Type = t 9965 v.AuxInt = c 9966 return true 9967 } 9968 // match: (SliceCap (SliceMake _ _ (Const32 <t> [c]))) 9969 // cond: 9970 // result: (Const32 <t> [c]) 9971 for { 9972 v_0 := v.Args[0] 9973 if v_0.Op != OpSliceMake { 9974 break 9975 } 9976 v_0_2 := v_0.Args[2] 9977 if v_0_2.Op != OpConst32 { 9978 break 9979 } 9980 t := v_0_2.Type 9981 c := v_0_2.AuxInt 9982 v.reset(OpConst32) 9983 v.Type = t 9984 v.AuxInt = c 9985 return true 9986 } 9987 // match: (SliceCap (SliceMake _ _ (SliceCap x))) 9988 // cond: 9989 // result: (SliceCap x) 9990 for { 9991 v_0 := v.Args[0] 9992 if v_0.Op != OpSliceMake { 9993 break 9994 } 9995 v_0_2 := v_0.Args[2] 9996 if v_0_2.Op != OpSliceCap { 9997 break 9998 } 9999 x := v_0_2.Args[0] 10000 v.reset(OpSliceCap) 10001 v.AddArg(x) 10002 return true 10003 } 10004 // match: (SliceCap (SliceMake _ _ (SliceLen x))) 10005 // cond: 10006 // result: (SliceLen x) 10007 for { 10008 v_0 := v.Args[0] 10009 if v_0.Op != OpSliceMake { 10010 break 10011 } 10012 v_0_2 := v_0.Args[2] 10013 if v_0_2.Op != OpSliceLen { 10014 break 10015 } 10016 x := v_0_2.Args[0] 10017 v.reset(OpSliceLen) 10018 v.AddArg(x) 10019 return true 10020 } 10021 return false 10022 } 10023 func rewriteValuegeneric_OpSliceLen(v *Value, config *Config) bool { 10024 b := v.Block 10025 _ = b 10026 // match: (SliceLen (SliceMake _ (Const64 <t> [c]) _)) 10027 // cond: 10028 // result: (Const64 <t> [c]) 10029 for { 10030 v_0 := v.Args[0] 10031 if v_0.Op != OpSliceMake { 10032 break 10033 } 10034 v_0_1 := v_0.Args[1] 10035 if v_0_1.Op != OpConst64 { 10036 break 10037 } 10038 t := v_0_1.Type 10039 c := v_0_1.AuxInt 10040 v.reset(OpConst64) 10041 v.Type = t 10042 v.AuxInt = c 10043 return true 10044 } 10045 // match: (SliceLen (SliceMake _ (Const32 <t> [c]) _)) 10046 // cond: 10047 // result: (Const32 <t> [c]) 10048 for { 10049 v_0 := v.Args[0] 10050 if v_0.Op != OpSliceMake { 10051 break 10052 } 10053 v_0_1 := v_0.Args[1] 10054 if v_0_1.Op != OpConst32 { 10055 break 10056 } 10057 t := v_0_1.Type 10058 c := v_0_1.AuxInt 10059 v.reset(OpConst32) 10060 v.Type = t 10061 v.AuxInt = c 10062 return true 10063 } 10064 // match: (SliceLen (SliceMake _ (SliceLen x) _)) 10065 // cond: 10066 // result: (SliceLen x) 10067 for { 10068 v_0 := v.Args[0] 10069 if v_0.Op != OpSliceMake { 10070 break 10071 } 10072 v_0_1 := v_0.Args[1] 10073 if v_0_1.Op != OpSliceLen { 10074 break 10075 } 10076 x := v_0_1.Args[0] 10077 v.reset(OpSliceLen) 10078 v.AddArg(x) 10079 return true 10080 } 10081 return false 10082 } 10083 func rewriteValuegeneric_OpSlicePtr(v *Value, config *Config) bool { 10084 b := v.Block 10085 _ = b 10086 // match: (SlicePtr (SliceMake (SlicePtr x) _ _)) 10087 // cond: 10088 // result: (SlicePtr x) 10089 for { 10090 v_0 := v.Args[0] 10091 if v_0.Op != OpSliceMake { 10092 break 10093 } 10094 v_0_0 := v_0.Args[0] 10095 if v_0_0.Op != OpSlicePtr { 10096 break 10097 } 10098 x := v_0_0.Args[0] 10099 v.reset(OpSlicePtr) 10100 v.AddArg(x) 10101 return true 10102 } 10103 return false 10104 } 10105 func rewriteValuegeneric_OpSlicemask(v *Value, config *Config) bool { 10106 b := v.Block 10107 _ = b 10108 // match: (Slicemask (Const32 [x])) 10109 // cond: x > 0 10110 // result: (Const32 [-1]) 10111 for { 10112 v_0 := v.Args[0] 10113 if v_0.Op != OpConst32 { 10114 break 10115 } 10116 x := v_0.AuxInt 10117 if !(x > 0) { 10118 break 10119 } 10120 v.reset(OpConst32) 10121 v.AuxInt = -1 10122 return true 10123 } 10124 // match: (Slicemask (Const32 [0])) 10125 // cond: 10126 // result: (Const32 [0]) 10127 for { 10128 v_0 := v.Args[0] 10129 if v_0.Op != OpConst32 { 10130 break 10131 } 10132 if v_0.AuxInt != 0 { 10133 break 10134 } 10135 v.reset(OpConst32) 10136 v.AuxInt = 0 10137 return true 10138 } 10139 // match: (Slicemask (Const64 [x])) 10140 // cond: x > 0 10141 // result: (Const64 [-1]) 10142 for { 10143 v_0 := v.Args[0] 10144 if v_0.Op != OpConst64 { 10145 break 10146 } 10147 x := v_0.AuxInt 10148 if !(x > 0) { 10149 break 10150 } 10151 v.reset(OpConst64) 10152 v.AuxInt = -1 10153 return true 10154 } 10155 // match: (Slicemask (Const64 [0])) 10156 // cond: 10157 // result: (Const64 [0]) 10158 for { 10159 v_0 := v.Args[0] 10160 if v_0.Op != OpConst64 { 10161 break 10162 } 10163 if v_0.AuxInt != 0 { 10164 break 10165 } 10166 v.reset(OpConst64) 10167 v.AuxInt = 0 10168 return true 10169 } 10170 return false 10171 } 10172 func rewriteValuegeneric_OpSqrt(v *Value, config *Config) bool { 10173 b := v.Block 10174 _ = b 10175 // match: (Sqrt (Const64F [c])) 10176 // cond: 10177 // result: (Const64F [f2i(math.Sqrt(i2f(c)))]) 10178 for { 10179 v_0 := v.Args[0] 10180 if v_0.Op != OpConst64F { 10181 break 10182 } 10183 c := v_0.AuxInt 10184 v.reset(OpConst64F) 10185 v.AuxInt = f2i(math.Sqrt(i2f(c))) 10186 return true 10187 } 10188 return false 10189 } 10190 func rewriteValuegeneric_OpStore(v *Value, config *Config) bool { 10191 b := v.Block 10192 _ = b 10193 // match: (Store _ (StructMake0) mem) 10194 // cond: 10195 // result: mem 10196 for { 10197 v_1 := v.Args[1] 10198 if v_1.Op != OpStructMake0 { 10199 break 10200 } 10201 mem := v.Args[2] 10202 v.reset(OpCopy) 10203 v.Type = mem.Type 10204 v.AddArg(mem) 10205 return true 10206 } 10207 // match: (Store dst (StructMake1 <t> f0) mem) 10208 // cond: 10209 // result: (Store [t.FieldType(0).Size()] dst f0 mem) 10210 for { 10211 dst := v.Args[0] 10212 v_1 := v.Args[1] 10213 if v_1.Op != OpStructMake1 { 10214 break 10215 } 10216 t := v_1.Type 10217 f0 := v_1.Args[0] 10218 mem := v.Args[2] 10219 v.reset(OpStore) 10220 v.AuxInt = t.FieldType(0).Size() 10221 v.AddArg(dst) 10222 v.AddArg(f0) 10223 v.AddArg(mem) 10224 return true 10225 } 10226 // match: (Store dst (StructMake2 <t> f0 f1) mem) 10227 // cond: 10228 // result: (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)) 10229 for { 10230 dst := v.Args[0] 10231 v_1 := v.Args[1] 10232 if v_1.Op != OpStructMake2 { 10233 break 10234 } 10235 t := v_1.Type 10236 f0 := v_1.Args[0] 10237 f1 := v_1.Args[1] 10238 mem := v.Args[2] 10239 v.reset(OpStore) 10240 v.AuxInt = t.FieldType(1).Size() 10241 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 10242 v0.AuxInt = t.FieldOff(1) 10243 v0.AddArg(dst) 10244 v.AddArg(v0) 10245 v.AddArg(f1) 10246 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 10247 v1.AuxInt = t.FieldType(0).Size() 10248 v1.AddArg(dst) 10249 v1.AddArg(f0) 10250 v1.AddArg(mem) 10251 v.AddArg(v1) 10252 return true 10253 } 10254 // match: (Store dst (StructMake3 <t> f0 f1 f2) mem) 10255 // cond: 10256 // result: (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem))) 10257 for { 10258 dst := v.Args[0] 10259 v_1 := v.Args[1] 10260 if v_1.Op != OpStructMake3 { 10261 break 10262 } 10263 t := v_1.Type 10264 f0 := v_1.Args[0] 10265 f1 := v_1.Args[1] 10266 f2 := v_1.Args[2] 10267 mem := v.Args[2] 10268 v.reset(OpStore) 10269 v.AuxInt = t.FieldType(2).Size() 10270 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 10271 v0.AuxInt = t.FieldOff(2) 10272 v0.AddArg(dst) 10273 v.AddArg(v0) 10274 v.AddArg(f2) 10275 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 10276 v1.AuxInt = t.FieldType(1).Size() 10277 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 10278 v2.AuxInt = t.FieldOff(1) 10279 v2.AddArg(dst) 10280 v1.AddArg(v2) 10281 v1.AddArg(f1) 10282 v3 := b.NewValue0(v.Line, OpStore, TypeMem) 10283 v3.AuxInt = t.FieldType(0).Size() 10284 v3.AddArg(dst) 10285 v3.AddArg(f0) 10286 v3.AddArg(mem) 10287 v1.AddArg(v3) 10288 v.AddArg(v1) 10289 return true 10290 } 10291 // match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) 10292 // cond: 10293 // result: (Store [t.FieldType(3).Size()] (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)))) 10294 for { 10295 dst := v.Args[0] 10296 v_1 := v.Args[1] 10297 if v_1.Op != OpStructMake4 { 10298 break 10299 } 10300 t := v_1.Type 10301 f0 := v_1.Args[0] 10302 f1 := v_1.Args[1] 10303 f2 := v_1.Args[2] 10304 f3 := v_1.Args[3] 10305 mem := v.Args[2] 10306 v.reset(OpStore) 10307 v.AuxInt = t.FieldType(3).Size() 10308 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) 10309 v0.AuxInt = t.FieldOff(3) 10310 v0.AddArg(dst) 10311 v.AddArg(v0) 10312 v.AddArg(f3) 10313 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 10314 v1.AuxInt = t.FieldType(2).Size() 10315 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 10316 v2.AuxInt = t.FieldOff(2) 10317 v2.AddArg(dst) 10318 v1.AddArg(v2) 10319 v1.AddArg(f2) 10320 v3 := b.NewValue0(v.Line, OpStore, TypeMem) 10321 v3.AuxInt = t.FieldType(1).Size() 10322 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 10323 v4.AuxInt = t.FieldOff(1) 10324 v4.AddArg(dst) 10325 v3.AddArg(v4) 10326 v3.AddArg(f1) 10327 v5 := b.NewValue0(v.Line, OpStore, TypeMem) 10328 v5.AuxInt = t.FieldType(0).Size() 10329 v5.AddArg(dst) 10330 v5.AddArg(f0) 10331 v5.AddArg(mem) 10332 v3.AddArg(v5) 10333 v1.AddArg(v3) 10334 v.AddArg(v1) 10335 return true 10336 } 10337 // match: (Store [size] dst (Load <t> src mem) mem) 10338 // cond: !config.fe.CanSSA(t) 10339 // result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src mem) 10340 for { 10341 size := v.AuxInt 10342 dst := v.Args[0] 10343 v_1 := v.Args[1] 10344 if v_1.Op != OpLoad { 10345 break 10346 } 10347 t := v_1.Type 10348 src := v_1.Args[0] 10349 mem := v_1.Args[1] 10350 if mem != v.Args[2] { 10351 break 10352 } 10353 if !(!config.fe.CanSSA(t)) { 10354 break 10355 } 10356 v.reset(OpMove) 10357 v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64() 10358 v.AddArg(dst) 10359 v.AddArg(src) 10360 v.AddArg(mem) 10361 return true 10362 } 10363 // match: (Store [size] dst (Load <t> src mem) (VarDef {x} mem)) 10364 // cond: !config.fe.CanSSA(t) 10365 // result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src (VarDef {x} mem)) 10366 for { 10367 size := v.AuxInt 10368 dst := v.Args[0] 10369 v_1 := v.Args[1] 10370 if v_1.Op != OpLoad { 10371 break 10372 } 10373 t := v_1.Type 10374 src := v_1.Args[0] 10375 mem := v_1.Args[1] 10376 v_2 := v.Args[2] 10377 if v_2.Op != OpVarDef { 10378 break 10379 } 10380 x := v_2.Aux 10381 if mem != v_2.Args[0] { 10382 break 10383 } 10384 if !(!config.fe.CanSSA(t)) { 10385 break 10386 } 10387 v.reset(OpMove) 10388 v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64() 10389 v.AddArg(dst) 10390 v.AddArg(src) 10391 v0 := b.NewValue0(v.Line, OpVarDef, TypeMem) 10392 v0.Aux = x 10393 v0.AddArg(mem) 10394 v.AddArg(v0) 10395 return true 10396 } 10397 // match: (Store _ (ArrayMake0) mem) 10398 // cond: 10399 // result: mem 10400 for { 10401 v_1 := v.Args[1] 10402 if v_1.Op != OpArrayMake0 { 10403 break 10404 } 10405 mem := v.Args[2] 10406 v.reset(OpCopy) 10407 v.Type = mem.Type 10408 v.AddArg(mem) 10409 return true 10410 } 10411 // match: (Store [size] dst (ArrayMake1 e) mem) 10412 // cond: 10413 // result: (Store [size] dst e mem) 10414 for { 10415 size := v.AuxInt 10416 dst := v.Args[0] 10417 v_1 := v.Args[1] 10418 if v_1.Op != OpArrayMake1 { 10419 break 10420 } 10421 e := v_1.Args[0] 10422 mem := v.Args[2] 10423 v.reset(OpStore) 10424 v.AuxInt = size 10425 v.AddArg(dst) 10426 v.AddArg(e) 10427 v.AddArg(mem) 10428 return true 10429 } 10430 return false 10431 } 10432 func rewriteValuegeneric_OpStringLen(v *Value, config *Config) bool { 10433 b := v.Block 10434 _ = b 10435 // match: (StringLen (StringMake _ (Const64 <t> [c]))) 10436 // cond: 10437 // result: (Const64 <t> [c]) 10438 for { 10439 v_0 := v.Args[0] 10440 if v_0.Op != OpStringMake { 10441 break 10442 } 10443 v_0_1 := v_0.Args[1] 10444 if v_0_1.Op != OpConst64 { 10445 break 10446 } 10447 t := v_0_1.Type 10448 c := v_0_1.AuxInt 10449 v.reset(OpConst64) 10450 v.Type = t 10451 v.AuxInt = c 10452 return true 10453 } 10454 return false 10455 } 10456 func rewriteValuegeneric_OpStringPtr(v *Value, config *Config) bool { 10457 b := v.Block 10458 _ = b 10459 // match: (StringPtr (StringMake (Const64 <t> [c]) _)) 10460 // cond: 10461 // result: (Const64 <t> [c]) 10462 for { 10463 v_0 := v.Args[0] 10464 if v_0.Op != OpStringMake { 10465 break 10466 } 10467 v_0_0 := v_0.Args[0] 10468 if v_0_0.Op != OpConst64 { 10469 break 10470 } 10471 t := v_0_0.Type 10472 c := v_0_0.AuxInt 10473 v.reset(OpConst64) 10474 v.Type = t 10475 v.AuxInt = c 10476 return true 10477 } 10478 return false 10479 } 10480 func rewriteValuegeneric_OpStructSelect(v *Value, config *Config) bool { 10481 b := v.Block 10482 _ = b 10483 // match: (StructSelect (StructMake1 x)) 10484 // cond: 10485 // result: x 10486 for { 10487 v_0 := v.Args[0] 10488 if v_0.Op != OpStructMake1 { 10489 break 10490 } 10491 x := v_0.Args[0] 10492 v.reset(OpCopy) 10493 v.Type = x.Type 10494 v.AddArg(x) 10495 return true 10496 } 10497 // match: (StructSelect [0] (StructMake2 x _)) 10498 // cond: 10499 // result: x 10500 for { 10501 if v.AuxInt != 0 { 10502 break 10503 } 10504 v_0 := v.Args[0] 10505 if v_0.Op != OpStructMake2 { 10506 break 10507 } 10508 x := v_0.Args[0] 10509 v.reset(OpCopy) 10510 v.Type = x.Type 10511 v.AddArg(x) 10512 return true 10513 } 10514 // match: (StructSelect [1] (StructMake2 _ x)) 10515 // cond: 10516 // result: x 10517 for { 10518 if v.AuxInt != 1 { 10519 break 10520 } 10521 v_0 := v.Args[0] 10522 if v_0.Op != OpStructMake2 { 10523 break 10524 } 10525 x := v_0.Args[1] 10526 v.reset(OpCopy) 10527 v.Type = x.Type 10528 v.AddArg(x) 10529 return true 10530 } 10531 // match: (StructSelect [0] (StructMake3 x _ _)) 10532 // cond: 10533 // result: x 10534 for { 10535 if v.AuxInt != 0 { 10536 break 10537 } 10538 v_0 := v.Args[0] 10539 if v_0.Op != OpStructMake3 { 10540 break 10541 } 10542 x := v_0.Args[0] 10543 v.reset(OpCopy) 10544 v.Type = x.Type 10545 v.AddArg(x) 10546 return true 10547 } 10548 // match: (StructSelect [1] (StructMake3 _ x _)) 10549 // cond: 10550 // result: x 10551 for { 10552 if v.AuxInt != 1 { 10553 break 10554 } 10555 v_0 := v.Args[0] 10556 if v_0.Op != OpStructMake3 { 10557 break 10558 } 10559 x := v_0.Args[1] 10560 v.reset(OpCopy) 10561 v.Type = x.Type 10562 v.AddArg(x) 10563 return true 10564 } 10565 // match: (StructSelect [2] (StructMake3 _ _ x)) 10566 // cond: 10567 // result: x 10568 for { 10569 if v.AuxInt != 2 { 10570 break 10571 } 10572 v_0 := v.Args[0] 10573 if v_0.Op != OpStructMake3 { 10574 break 10575 } 10576 x := v_0.Args[2] 10577 v.reset(OpCopy) 10578 v.Type = x.Type 10579 v.AddArg(x) 10580 return true 10581 } 10582 // match: (StructSelect [0] (StructMake4 x _ _ _)) 10583 // cond: 10584 // result: x 10585 for { 10586 if v.AuxInt != 0 { 10587 break 10588 } 10589 v_0 := v.Args[0] 10590 if v_0.Op != OpStructMake4 { 10591 break 10592 } 10593 x := v_0.Args[0] 10594 v.reset(OpCopy) 10595 v.Type = x.Type 10596 v.AddArg(x) 10597 return true 10598 } 10599 // match: (StructSelect [1] (StructMake4 _ x _ _)) 10600 // cond: 10601 // result: x 10602 for { 10603 if v.AuxInt != 1 { 10604 break 10605 } 10606 v_0 := v.Args[0] 10607 if v_0.Op != OpStructMake4 { 10608 break 10609 } 10610 x := v_0.Args[1] 10611 v.reset(OpCopy) 10612 v.Type = x.Type 10613 v.AddArg(x) 10614 return true 10615 } 10616 // match: (StructSelect [2] (StructMake4 _ _ x _)) 10617 // cond: 10618 // result: x 10619 for { 10620 if v.AuxInt != 2 { 10621 break 10622 } 10623 v_0 := v.Args[0] 10624 if v_0.Op != OpStructMake4 { 10625 break 10626 } 10627 x := v_0.Args[2] 10628 v.reset(OpCopy) 10629 v.Type = x.Type 10630 v.AddArg(x) 10631 return true 10632 } 10633 // match: (StructSelect [3] (StructMake4 _ _ _ x)) 10634 // cond: 10635 // result: x 10636 for { 10637 if v.AuxInt != 3 { 10638 break 10639 } 10640 v_0 := v.Args[0] 10641 if v_0.Op != OpStructMake4 { 10642 break 10643 } 10644 x := v_0.Args[3] 10645 v.reset(OpCopy) 10646 v.Type = x.Type 10647 v.AddArg(x) 10648 return true 10649 } 10650 // match: (StructSelect [i] x:(Load <t> ptr mem)) 10651 // cond: !config.fe.CanSSA(t) 10652 // result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem) 10653 for { 10654 i := v.AuxInt 10655 x := v.Args[0] 10656 if x.Op != OpLoad { 10657 break 10658 } 10659 t := x.Type 10660 ptr := x.Args[0] 10661 mem := x.Args[1] 10662 if !(!config.fe.CanSSA(t)) { 10663 break 10664 } 10665 b = x.Block 10666 v0 := b.NewValue0(v.Line, OpLoad, v.Type) 10667 v.reset(OpCopy) 10668 v.AddArg(v0) 10669 v1 := b.NewValue0(v.Line, OpOffPtr, v.Type.PtrTo()) 10670 v1.AuxInt = t.FieldOff(int(i)) 10671 v1.AddArg(ptr) 10672 v0.AddArg(v1) 10673 v0.AddArg(mem) 10674 return true 10675 } 10676 return false 10677 } 10678 func rewriteValuegeneric_OpSub16(v *Value, config *Config) bool { 10679 b := v.Block 10680 _ = b 10681 // match: (Sub16 (Const16 [c]) (Const16 [d])) 10682 // cond: 10683 // result: (Const16 [int64(int16(c-d))]) 10684 for { 10685 v_0 := v.Args[0] 10686 if v_0.Op != OpConst16 { 10687 break 10688 } 10689 c := v_0.AuxInt 10690 v_1 := v.Args[1] 10691 if v_1.Op != OpConst16 { 10692 break 10693 } 10694 d := v_1.AuxInt 10695 v.reset(OpConst16) 10696 v.AuxInt = int64(int16(c - d)) 10697 return true 10698 } 10699 // match: (Sub16 x (Const16 <t> [c])) 10700 // cond: x.Op != OpConst16 10701 // result: (Add16 (Const16 <t> [int64(int16(-c))]) x) 10702 for { 10703 x := v.Args[0] 10704 v_1 := v.Args[1] 10705 if v_1.Op != OpConst16 { 10706 break 10707 } 10708 t := v_1.Type 10709 c := v_1.AuxInt 10710 if !(x.Op != OpConst16) { 10711 break 10712 } 10713 v.reset(OpAdd16) 10714 v0 := b.NewValue0(v.Line, OpConst16, t) 10715 v0.AuxInt = int64(int16(-c)) 10716 v.AddArg(v0) 10717 v.AddArg(x) 10718 return true 10719 } 10720 // match: (Sub16 x x) 10721 // cond: 10722 // result: (Const16 [0]) 10723 for { 10724 x := v.Args[0] 10725 if x != v.Args[1] { 10726 break 10727 } 10728 v.reset(OpConst16) 10729 v.AuxInt = 0 10730 return true 10731 } 10732 // match: (Sub16 (Add16 x y) x) 10733 // cond: 10734 // result: y 10735 for { 10736 v_0 := v.Args[0] 10737 if v_0.Op != OpAdd16 { 10738 break 10739 } 10740 x := v_0.Args[0] 10741 y := v_0.Args[1] 10742 if x != v.Args[1] { 10743 break 10744 } 10745 v.reset(OpCopy) 10746 v.Type = y.Type 10747 v.AddArg(y) 10748 return true 10749 } 10750 // match: (Sub16 (Add16 x y) y) 10751 // cond: 10752 // result: x 10753 for { 10754 v_0 := v.Args[0] 10755 if v_0.Op != OpAdd16 { 10756 break 10757 } 10758 x := v_0.Args[0] 10759 y := v_0.Args[1] 10760 if y != v.Args[1] { 10761 break 10762 } 10763 v.reset(OpCopy) 10764 v.Type = x.Type 10765 v.AddArg(x) 10766 return true 10767 } 10768 return false 10769 } 10770 func rewriteValuegeneric_OpSub32(v *Value, config *Config) bool { 10771 b := v.Block 10772 _ = b 10773 // match: (Sub32 (Const32 [c]) (Const32 [d])) 10774 // cond: 10775 // result: (Const32 [int64(int32(c-d))]) 10776 for { 10777 v_0 := v.Args[0] 10778 if v_0.Op != OpConst32 { 10779 break 10780 } 10781 c := v_0.AuxInt 10782 v_1 := v.Args[1] 10783 if v_1.Op != OpConst32 { 10784 break 10785 } 10786 d := v_1.AuxInt 10787 v.reset(OpConst32) 10788 v.AuxInt = int64(int32(c - d)) 10789 return true 10790 } 10791 // match: (Sub32 x (Const32 <t> [c])) 10792 // cond: x.Op != OpConst32 10793 // result: (Add32 (Const32 <t> [int64(int32(-c))]) x) 10794 for { 10795 x := v.Args[0] 10796 v_1 := v.Args[1] 10797 if v_1.Op != OpConst32 { 10798 break 10799 } 10800 t := v_1.Type 10801 c := v_1.AuxInt 10802 if !(x.Op != OpConst32) { 10803 break 10804 } 10805 v.reset(OpAdd32) 10806 v0 := b.NewValue0(v.Line, OpConst32, t) 10807 v0.AuxInt = int64(int32(-c)) 10808 v.AddArg(v0) 10809 v.AddArg(x) 10810 return true 10811 } 10812 // match: (Sub32 x x) 10813 // cond: 10814 // result: (Const32 [0]) 10815 for { 10816 x := v.Args[0] 10817 if x != v.Args[1] { 10818 break 10819 } 10820 v.reset(OpConst32) 10821 v.AuxInt = 0 10822 return true 10823 } 10824 // match: (Sub32 (Add32 x y) x) 10825 // cond: 10826 // result: y 10827 for { 10828 v_0 := v.Args[0] 10829 if v_0.Op != OpAdd32 { 10830 break 10831 } 10832 x := v_0.Args[0] 10833 y := v_0.Args[1] 10834 if x != v.Args[1] { 10835 break 10836 } 10837 v.reset(OpCopy) 10838 v.Type = y.Type 10839 v.AddArg(y) 10840 return true 10841 } 10842 // match: (Sub32 (Add32 x y) y) 10843 // cond: 10844 // result: x 10845 for { 10846 v_0 := v.Args[0] 10847 if v_0.Op != OpAdd32 { 10848 break 10849 } 10850 x := v_0.Args[0] 10851 y := v_0.Args[1] 10852 if y != v.Args[1] { 10853 break 10854 } 10855 v.reset(OpCopy) 10856 v.Type = x.Type 10857 v.AddArg(x) 10858 return true 10859 } 10860 return false 10861 } 10862 func rewriteValuegeneric_OpSub32F(v *Value, config *Config) bool { 10863 b := v.Block 10864 _ = b 10865 // match: (Sub32F (Const32F [c]) (Const32F [d])) 10866 // cond: 10867 // result: (Const32F [f2i(float64(i2f32(c) - i2f32(d)))]) 10868 for { 10869 v_0 := v.Args[0] 10870 if v_0.Op != OpConst32F { 10871 break 10872 } 10873 c := v_0.AuxInt 10874 v_1 := v.Args[1] 10875 if v_1.Op != OpConst32F { 10876 break 10877 } 10878 d := v_1.AuxInt 10879 v.reset(OpConst32F) 10880 v.AuxInt = f2i(float64(i2f32(c) - i2f32(d))) 10881 return true 10882 } 10883 // match: (Sub32F x (Const32F [0])) 10884 // cond: 10885 // result: x 10886 for { 10887 x := v.Args[0] 10888 v_1 := v.Args[1] 10889 if v_1.Op != OpConst32F { 10890 break 10891 } 10892 if v_1.AuxInt != 0 { 10893 break 10894 } 10895 v.reset(OpCopy) 10896 v.Type = x.Type 10897 v.AddArg(x) 10898 return true 10899 } 10900 return false 10901 } 10902 func rewriteValuegeneric_OpSub64(v *Value, config *Config) bool { 10903 b := v.Block 10904 _ = b 10905 // match: (Sub64 (Const64 [c]) (Const64 [d])) 10906 // cond: 10907 // result: (Const64 [c-d]) 10908 for { 10909 v_0 := v.Args[0] 10910 if v_0.Op != OpConst64 { 10911 break 10912 } 10913 c := v_0.AuxInt 10914 v_1 := v.Args[1] 10915 if v_1.Op != OpConst64 { 10916 break 10917 } 10918 d := v_1.AuxInt 10919 v.reset(OpConst64) 10920 v.AuxInt = c - d 10921 return true 10922 } 10923 // match: (Sub64 x (Const64 <t> [c])) 10924 // cond: x.Op != OpConst64 10925 // result: (Add64 (Const64 <t> [-c]) x) 10926 for { 10927 x := v.Args[0] 10928 v_1 := v.Args[1] 10929 if v_1.Op != OpConst64 { 10930 break 10931 } 10932 t := v_1.Type 10933 c := v_1.AuxInt 10934 if !(x.Op != OpConst64) { 10935 break 10936 } 10937 v.reset(OpAdd64) 10938 v0 := b.NewValue0(v.Line, OpConst64, t) 10939 v0.AuxInt = -c 10940 v.AddArg(v0) 10941 v.AddArg(x) 10942 return true 10943 } 10944 // match: (Sub64 x x) 10945 // cond: 10946 // result: (Const64 [0]) 10947 for { 10948 x := v.Args[0] 10949 if x != v.Args[1] { 10950 break 10951 } 10952 v.reset(OpConst64) 10953 v.AuxInt = 0 10954 return true 10955 } 10956 // match: (Sub64 (Add64 x y) x) 10957 // cond: 10958 // result: y 10959 for { 10960 v_0 := v.Args[0] 10961 if v_0.Op != OpAdd64 { 10962 break 10963 } 10964 x := v_0.Args[0] 10965 y := v_0.Args[1] 10966 if x != v.Args[1] { 10967 break 10968 } 10969 v.reset(OpCopy) 10970 v.Type = y.Type 10971 v.AddArg(y) 10972 return true 10973 } 10974 // match: (Sub64 (Add64 x y) y) 10975 // cond: 10976 // result: x 10977 for { 10978 v_0 := v.Args[0] 10979 if v_0.Op != OpAdd64 { 10980 break 10981 } 10982 x := v_0.Args[0] 10983 y := v_0.Args[1] 10984 if y != v.Args[1] { 10985 break 10986 } 10987 v.reset(OpCopy) 10988 v.Type = x.Type 10989 v.AddArg(x) 10990 return true 10991 } 10992 return false 10993 } 10994 func rewriteValuegeneric_OpSub64F(v *Value, config *Config) bool { 10995 b := v.Block 10996 _ = b 10997 // match: (Sub64F (Const64F [c]) (Const64F [d])) 10998 // cond: 10999 // result: (Const64F [f2i(i2f(c) - i2f(d))]) 11000 for { 11001 v_0 := v.Args[0] 11002 if v_0.Op != OpConst64F { 11003 break 11004 } 11005 c := v_0.AuxInt 11006 v_1 := v.Args[1] 11007 if v_1.Op != OpConst64F { 11008 break 11009 } 11010 d := v_1.AuxInt 11011 v.reset(OpConst64F) 11012 v.AuxInt = f2i(i2f(c) - i2f(d)) 11013 return true 11014 } 11015 // match: (Sub64F x (Const64F [0])) 11016 // cond: 11017 // result: x 11018 for { 11019 x := v.Args[0] 11020 v_1 := v.Args[1] 11021 if v_1.Op != OpConst64F { 11022 break 11023 } 11024 if v_1.AuxInt != 0 { 11025 break 11026 } 11027 v.reset(OpCopy) 11028 v.Type = x.Type 11029 v.AddArg(x) 11030 return true 11031 } 11032 return false 11033 } 11034 func rewriteValuegeneric_OpSub8(v *Value, config *Config) bool { 11035 b := v.Block 11036 _ = b 11037 // match: (Sub8 (Const8 [c]) (Const8 [d])) 11038 // cond: 11039 // result: (Const8 [int64(int8(c-d))]) 11040 for { 11041 v_0 := v.Args[0] 11042 if v_0.Op != OpConst8 { 11043 break 11044 } 11045 c := v_0.AuxInt 11046 v_1 := v.Args[1] 11047 if v_1.Op != OpConst8 { 11048 break 11049 } 11050 d := v_1.AuxInt 11051 v.reset(OpConst8) 11052 v.AuxInt = int64(int8(c - d)) 11053 return true 11054 } 11055 // match: (Sub8 x (Const8 <t> [c])) 11056 // cond: x.Op != OpConst8 11057 // result: (Add8 (Const8 <t> [int64(int8(-c))]) x) 11058 for { 11059 x := v.Args[0] 11060 v_1 := v.Args[1] 11061 if v_1.Op != OpConst8 { 11062 break 11063 } 11064 t := v_1.Type 11065 c := v_1.AuxInt 11066 if !(x.Op != OpConst8) { 11067 break 11068 } 11069 v.reset(OpAdd8) 11070 v0 := b.NewValue0(v.Line, OpConst8, t) 11071 v0.AuxInt = int64(int8(-c)) 11072 v.AddArg(v0) 11073 v.AddArg(x) 11074 return true 11075 } 11076 // match: (Sub8 x x) 11077 // cond: 11078 // result: (Const8 [0]) 11079 for { 11080 x := v.Args[0] 11081 if x != v.Args[1] { 11082 break 11083 } 11084 v.reset(OpConst8) 11085 v.AuxInt = 0 11086 return true 11087 } 11088 // match: (Sub8 (Add8 x y) x) 11089 // cond: 11090 // result: y 11091 for { 11092 v_0 := v.Args[0] 11093 if v_0.Op != OpAdd8 { 11094 break 11095 } 11096 x := v_0.Args[0] 11097 y := v_0.Args[1] 11098 if x != v.Args[1] { 11099 break 11100 } 11101 v.reset(OpCopy) 11102 v.Type = y.Type 11103 v.AddArg(y) 11104 return true 11105 } 11106 // match: (Sub8 (Add8 x y) y) 11107 // cond: 11108 // result: x 11109 for { 11110 v_0 := v.Args[0] 11111 if v_0.Op != OpAdd8 { 11112 break 11113 } 11114 x := v_0.Args[0] 11115 y := v_0.Args[1] 11116 if y != v.Args[1] { 11117 break 11118 } 11119 v.reset(OpCopy) 11120 v.Type = x.Type 11121 v.AddArg(x) 11122 return true 11123 } 11124 return false 11125 } 11126 func rewriteValuegeneric_OpTrunc16to8(v *Value, config *Config) bool { 11127 b := v.Block 11128 _ = b 11129 // match: (Trunc16to8 (Const16 [c])) 11130 // cond: 11131 // result: (Const8 [int64(int8(c))]) 11132 for { 11133 v_0 := v.Args[0] 11134 if v_0.Op != OpConst16 { 11135 break 11136 } 11137 c := v_0.AuxInt 11138 v.reset(OpConst8) 11139 v.AuxInt = int64(int8(c)) 11140 return true 11141 } 11142 // match: (Trunc16to8 (ZeroExt8to16 x)) 11143 // cond: 11144 // result: x 11145 for { 11146 v_0 := v.Args[0] 11147 if v_0.Op != OpZeroExt8to16 { 11148 break 11149 } 11150 x := v_0.Args[0] 11151 v.reset(OpCopy) 11152 v.Type = x.Type 11153 v.AddArg(x) 11154 return true 11155 } 11156 // match: (Trunc16to8 (SignExt8to16 x)) 11157 // cond: 11158 // result: x 11159 for { 11160 v_0 := v.Args[0] 11161 if v_0.Op != OpSignExt8to16 { 11162 break 11163 } 11164 x := v_0.Args[0] 11165 v.reset(OpCopy) 11166 v.Type = x.Type 11167 v.AddArg(x) 11168 return true 11169 } 11170 // match: (Trunc16to8 (And16 (Const16 [y]) x)) 11171 // cond: y&0xFF == 0xFF 11172 // result: (Trunc16to8 x) 11173 for { 11174 v_0 := v.Args[0] 11175 if v_0.Op != OpAnd16 { 11176 break 11177 } 11178 v_0_0 := v_0.Args[0] 11179 if v_0_0.Op != OpConst16 { 11180 break 11181 } 11182 y := v_0_0.AuxInt 11183 x := v_0.Args[1] 11184 if !(y&0xFF == 0xFF) { 11185 break 11186 } 11187 v.reset(OpTrunc16to8) 11188 v.AddArg(x) 11189 return true 11190 } 11191 return false 11192 } 11193 func rewriteValuegeneric_OpTrunc32to16(v *Value, config *Config) bool { 11194 b := v.Block 11195 _ = b 11196 // match: (Trunc32to16 (Const32 [c])) 11197 // cond: 11198 // result: (Const16 [int64(int16(c))]) 11199 for { 11200 v_0 := v.Args[0] 11201 if v_0.Op != OpConst32 { 11202 break 11203 } 11204 c := v_0.AuxInt 11205 v.reset(OpConst16) 11206 v.AuxInt = int64(int16(c)) 11207 return true 11208 } 11209 // match: (Trunc32to16 (ZeroExt8to32 x)) 11210 // cond: 11211 // result: (ZeroExt8to16 x) 11212 for { 11213 v_0 := v.Args[0] 11214 if v_0.Op != OpZeroExt8to32 { 11215 break 11216 } 11217 x := v_0.Args[0] 11218 v.reset(OpZeroExt8to16) 11219 v.AddArg(x) 11220 return true 11221 } 11222 // match: (Trunc32to16 (ZeroExt16to32 x)) 11223 // cond: 11224 // result: x 11225 for { 11226 v_0 := v.Args[0] 11227 if v_0.Op != OpZeroExt16to32 { 11228 break 11229 } 11230 x := v_0.Args[0] 11231 v.reset(OpCopy) 11232 v.Type = x.Type 11233 v.AddArg(x) 11234 return true 11235 } 11236 // match: (Trunc32to16 (SignExt8to32 x)) 11237 // cond: 11238 // result: (SignExt8to16 x) 11239 for { 11240 v_0 := v.Args[0] 11241 if v_0.Op != OpSignExt8to32 { 11242 break 11243 } 11244 x := v_0.Args[0] 11245 v.reset(OpSignExt8to16) 11246 v.AddArg(x) 11247 return true 11248 } 11249 // match: (Trunc32to16 (SignExt16to32 x)) 11250 // cond: 11251 // result: x 11252 for { 11253 v_0 := v.Args[0] 11254 if v_0.Op != OpSignExt16to32 { 11255 break 11256 } 11257 x := v_0.Args[0] 11258 v.reset(OpCopy) 11259 v.Type = x.Type 11260 v.AddArg(x) 11261 return true 11262 } 11263 // match: (Trunc32to16 (And32 (Const32 [y]) x)) 11264 // cond: y&0xFFFF == 0xFFFF 11265 // result: (Trunc32to16 x) 11266 for { 11267 v_0 := v.Args[0] 11268 if v_0.Op != OpAnd32 { 11269 break 11270 } 11271 v_0_0 := v_0.Args[0] 11272 if v_0_0.Op != OpConst32 { 11273 break 11274 } 11275 y := v_0_0.AuxInt 11276 x := v_0.Args[1] 11277 if !(y&0xFFFF == 0xFFFF) { 11278 break 11279 } 11280 v.reset(OpTrunc32to16) 11281 v.AddArg(x) 11282 return true 11283 } 11284 return false 11285 } 11286 func rewriteValuegeneric_OpTrunc32to8(v *Value, config *Config) bool { 11287 b := v.Block 11288 _ = b 11289 // match: (Trunc32to8 (Const32 [c])) 11290 // cond: 11291 // result: (Const8 [int64(int8(c))]) 11292 for { 11293 v_0 := v.Args[0] 11294 if v_0.Op != OpConst32 { 11295 break 11296 } 11297 c := v_0.AuxInt 11298 v.reset(OpConst8) 11299 v.AuxInt = int64(int8(c)) 11300 return true 11301 } 11302 // match: (Trunc32to8 (ZeroExt8to32 x)) 11303 // cond: 11304 // result: x 11305 for { 11306 v_0 := v.Args[0] 11307 if v_0.Op != OpZeroExt8to32 { 11308 break 11309 } 11310 x := v_0.Args[0] 11311 v.reset(OpCopy) 11312 v.Type = x.Type 11313 v.AddArg(x) 11314 return true 11315 } 11316 // match: (Trunc32to8 (SignExt8to32 x)) 11317 // cond: 11318 // result: x 11319 for { 11320 v_0 := v.Args[0] 11321 if v_0.Op != OpSignExt8to32 { 11322 break 11323 } 11324 x := v_0.Args[0] 11325 v.reset(OpCopy) 11326 v.Type = x.Type 11327 v.AddArg(x) 11328 return true 11329 } 11330 // match: (Trunc32to8 (And32 (Const32 [y]) x)) 11331 // cond: y&0xFF == 0xFF 11332 // result: (Trunc32to8 x) 11333 for { 11334 v_0 := v.Args[0] 11335 if v_0.Op != OpAnd32 { 11336 break 11337 } 11338 v_0_0 := v_0.Args[0] 11339 if v_0_0.Op != OpConst32 { 11340 break 11341 } 11342 y := v_0_0.AuxInt 11343 x := v_0.Args[1] 11344 if !(y&0xFF == 0xFF) { 11345 break 11346 } 11347 v.reset(OpTrunc32to8) 11348 v.AddArg(x) 11349 return true 11350 } 11351 return false 11352 } 11353 func rewriteValuegeneric_OpTrunc64to16(v *Value, config *Config) bool { 11354 b := v.Block 11355 _ = b 11356 // match: (Trunc64to16 (Const64 [c])) 11357 // cond: 11358 // result: (Const16 [int64(int16(c))]) 11359 for { 11360 v_0 := v.Args[0] 11361 if v_0.Op != OpConst64 { 11362 break 11363 } 11364 c := v_0.AuxInt 11365 v.reset(OpConst16) 11366 v.AuxInt = int64(int16(c)) 11367 return true 11368 } 11369 // match: (Trunc64to16 (ZeroExt8to64 x)) 11370 // cond: 11371 // result: (ZeroExt8to16 x) 11372 for { 11373 v_0 := v.Args[0] 11374 if v_0.Op != OpZeroExt8to64 { 11375 break 11376 } 11377 x := v_0.Args[0] 11378 v.reset(OpZeroExt8to16) 11379 v.AddArg(x) 11380 return true 11381 } 11382 // match: (Trunc64to16 (ZeroExt16to64 x)) 11383 // cond: 11384 // result: x 11385 for { 11386 v_0 := v.Args[0] 11387 if v_0.Op != OpZeroExt16to64 { 11388 break 11389 } 11390 x := v_0.Args[0] 11391 v.reset(OpCopy) 11392 v.Type = x.Type 11393 v.AddArg(x) 11394 return true 11395 } 11396 // match: (Trunc64to16 (SignExt8to64 x)) 11397 // cond: 11398 // result: (SignExt8to16 x) 11399 for { 11400 v_0 := v.Args[0] 11401 if v_0.Op != OpSignExt8to64 { 11402 break 11403 } 11404 x := v_0.Args[0] 11405 v.reset(OpSignExt8to16) 11406 v.AddArg(x) 11407 return true 11408 } 11409 // match: (Trunc64to16 (SignExt16to64 x)) 11410 // cond: 11411 // result: x 11412 for { 11413 v_0 := v.Args[0] 11414 if v_0.Op != OpSignExt16to64 { 11415 break 11416 } 11417 x := v_0.Args[0] 11418 v.reset(OpCopy) 11419 v.Type = x.Type 11420 v.AddArg(x) 11421 return true 11422 } 11423 // match: (Trunc64to16 (And64 (Const64 [y]) x)) 11424 // cond: y&0xFFFF == 0xFFFF 11425 // result: (Trunc64to16 x) 11426 for { 11427 v_0 := v.Args[0] 11428 if v_0.Op != OpAnd64 { 11429 break 11430 } 11431 v_0_0 := v_0.Args[0] 11432 if v_0_0.Op != OpConst64 { 11433 break 11434 } 11435 y := v_0_0.AuxInt 11436 x := v_0.Args[1] 11437 if !(y&0xFFFF == 0xFFFF) { 11438 break 11439 } 11440 v.reset(OpTrunc64to16) 11441 v.AddArg(x) 11442 return true 11443 } 11444 return false 11445 } 11446 func rewriteValuegeneric_OpTrunc64to32(v *Value, config *Config) bool { 11447 b := v.Block 11448 _ = b 11449 // match: (Trunc64to32 (Const64 [c])) 11450 // cond: 11451 // result: (Const32 [int64(int32(c))]) 11452 for { 11453 v_0 := v.Args[0] 11454 if v_0.Op != OpConst64 { 11455 break 11456 } 11457 c := v_0.AuxInt 11458 v.reset(OpConst32) 11459 v.AuxInt = int64(int32(c)) 11460 return true 11461 } 11462 // match: (Trunc64to32 (ZeroExt8to64 x)) 11463 // cond: 11464 // result: (ZeroExt8to32 x) 11465 for { 11466 v_0 := v.Args[0] 11467 if v_0.Op != OpZeroExt8to64 { 11468 break 11469 } 11470 x := v_0.Args[0] 11471 v.reset(OpZeroExt8to32) 11472 v.AddArg(x) 11473 return true 11474 } 11475 // match: (Trunc64to32 (ZeroExt16to64 x)) 11476 // cond: 11477 // result: (ZeroExt16to32 x) 11478 for { 11479 v_0 := v.Args[0] 11480 if v_0.Op != OpZeroExt16to64 { 11481 break 11482 } 11483 x := v_0.Args[0] 11484 v.reset(OpZeroExt16to32) 11485 v.AddArg(x) 11486 return true 11487 } 11488 // match: (Trunc64to32 (ZeroExt32to64 x)) 11489 // cond: 11490 // result: x 11491 for { 11492 v_0 := v.Args[0] 11493 if v_0.Op != OpZeroExt32to64 { 11494 break 11495 } 11496 x := v_0.Args[0] 11497 v.reset(OpCopy) 11498 v.Type = x.Type 11499 v.AddArg(x) 11500 return true 11501 } 11502 // match: (Trunc64to32 (SignExt8to64 x)) 11503 // cond: 11504 // result: (SignExt8to32 x) 11505 for { 11506 v_0 := v.Args[0] 11507 if v_0.Op != OpSignExt8to64 { 11508 break 11509 } 11510 x := v_0.Args[0] 11511 v.reset(OpSignExt8to32) 11512 v.AddArg(x) 11513 return true 11514 } 11515 // match: (Trunc64to32 (SignExt16to64 x)) 11516 // cond: 11517 // result: (SignExt16to32 x) 11518 for { 11519 v_0 := v.Args[0] 11520 if v_0.Op != OpSignExt16to64 { 11521 break 11522 } 11523 x := v_0.Args[0] 11524 v.reset(OpSignExt16to32) 11525 v.AddArg(x) 11526 return true 11527 } 11528 // match: (Trunc64to32 (SignExt32to64 x)) 11529 // cond: 11530 // result: x 11531 for { 11532 v_0 := v.Args[0] 11533 if v_0.Op != OpSignExt32to64 { 11534 break 11535 } 11536 x := v_0.Args[0] 11537 v.reset(OpCopy) 11538 v.Type = x.Type 11539 v.AddArg(x) 11540 return true 11541 } 11542 // match: (Trunc64to32 (And64 (Const64 [y]) x)) 11543 // cond: y&0xFFFFFFFF == 0xFFFFFFFF 11544 // result: (Trunc64to32 x) 11545 for { 11546 v_0 := v.Args[0] 11547 if v_0.Op != OpAnd64 { 11548 break 11549 } 11550 v_0_0 := v_0.Args[0] 11551 if v_0_0.Op != OpConst64 { 11552 break 11553 } 11554 y := v_0_0.AuxInt 11555 x := v_0.Args[1] 11556 if !(y&0xFFFFFFFF == 0xFFFFFFFF) { 11557 break 11558 } 11559 v.reset(OpTrunc64to32) 11560 v.AddArg(x) 11561 return true 11562 } 11563 return false 11564 } 11565 func rewriteValuegeneric_OpTrunc64to8(v *Value, config *Config) bool { 11566 b := v.Block 11567 _ = b 11568 // match: (Trunc64to8 (Const64 [c])) 11569 // cond: 11570 // result: (Const8 [int64(int8(c))]) 11571 for { 11572 v_0 := v.Args[0] 11573 if v_0.Op != OpConst64 { 11574 break 11575 } 11576 c := v_0.AuxInt 11577 v.reset(OpConst8) 11578 v.AuxInt = int64(int8(c)) 11579 return true 11580 } 11581 // match: (Trunc64to8 (ZeroExt8to64 x)) 11582 // cond: 11583 // result: x 11584 for { 11585 v_0 := v.Args[0] 11586 if v_0.Op != OpZeroExt8to64 { 11587 break 11588 } 11589 x := v_0.Args[0] 11590 v.reset(OpCopy) 11591 v.Type = x.Type 11592 v.AddArg(x) 11593 return true 11594 } 11595 // match: (Trunc64to8 (SignExt8to64 x)) 11596 // cond: 11597 // result: x 11598 for { 11599 v_0 := v.Args[0] 11600 if v_0.Op != OpSignExt8to64 { 11601 break 11602 } 11603 x := v_0.Args[0] 11604 v.reset(OpCopy) 11605 v.Type = x.Type 11606 v.AddArg(x) 11607 return true 11608 } 11609 // match: (Trunc64to8 (And64 (Const64 [y]) x)) 11610 // cond: y&0xFF == 0xFF 11611 // result: (Trunc64to8 x) 11612 for { 11613 v_0 := v.Args[0] 11614 if v_0.Op != OpAnd64 { 11615 break 11616 } 11617 v_0_0 := v_0.Args[0] 11618 if v_0_0.Op != OpConst64 { 11619 break 11620 } 11621 y := v_0_0.AuxInt 11622 x := v_0.Args[1] 11623 if !(y&0xFF == 0xFF) { 11624 break 11625 } 11626 v.reset(OpTrunc64to8) 11627 v.AddArg(x) 11628 return true 11629 } 11630 return false 11631 } 11632 func rewriteValuegeneric_OpXor16(v *Value, config *Config) bool { 11633 b := v.Block 11634 _ = b 11635 // match: (Xor16 x (Const16 <t> [c])) 11636 // cond: x.Op != OpConst16 11637 // result: (Xor16 (Const16 <t> [c]) x) 11638 for { 11639 x := v.Args[0] 11640 v_1 := v.Args[1] 11641 if v_1.Op != OpConst16 { 11642 break 11643 } 11644 t := v_1.Type 11645 c := v_1.AuxInt 11646 if !(x.Op != OpConst16) { 11647 break 11648 } 11649 v.reset(OpXor16) 11650 v0 := b.NewValue0(v.Line, OpConst16, t) 11651 v0.AuxInt = c 11652 v.AddArg(v0) 11653 v.AddArg(x) 11654 return true 11655 } 11656 // match: (Xor16 x x) 11657 // cond: 11658 // result: (Const16 [0]) 11659 for { 11660 x := v.Args[0] 11661 if x != v.Args[1] { 11662 break 11663 } 11664 v.reset(OpConst16) 11665 v.AuxInt = 0 11666 return true 11667 } 11668 // match: (Xor16 (Const16 [0]) x) 11669 // cond: 11670 // result: x 11671 for { 11672 v_0 := v.Args[0] 11673 if v_0.Op != OpConst16 { 11674 break 11675 } 11676 if v_0.AuxInt != 0 { 11677 break 11678 } 11679 x := v.Args[1] 11680 v.reset(OpCopy) 11681 v.Type = x.Type 11682 v.AddArg(x) 11683 return true 11684 } 11685 // match: (Xor16 x (Xor16 x y)) 11686 // cond: 11687 // result: y 11688 for { 11689 x := v.Args[0] 11690 v_1 := v.Args[1] 11691 if v_1.Op != OpXor16 { 11692 break 11693 } 11694 if x != v_1.Args[0] { 11695 break 11696 } 11697 y := v_1.Args[1] 11698 v.reset(OpCopy) 11699 v.Type = y.Type 11700 v.AddArg(y) 11701 return true 11702 } 11703 // match: (Xor16 x (Xor16 y x)) 11704 // cond: 11705 // result: y 11706 for { 11707 x := v.Args[0] 11708 v_1 := v.Args[1] 11709 if v_1.Op != OpXor16 { 11710 break 11711 } 11712 y := v_1.Args[0] 11713 if x != v_1.Args[1] { 11714 break 11715 } 11716 v.reset(OpCopy) 11717 v.Type = y.Type 11718 v.AddArg(y) 11719 return true 11720 } 11721 // match: (Xor16 (Xor16 x y) x) 11722 // cond: 11723 // result: y 11724 for { 11725 v_0 := v.Args[0] 11726 if v_0.Op != OpXor16 { 11727 break 11728 } 11729 x := v_0.Args[0] 11730 y := v_0.Args[1] 11731 if x != v.Args[1] { 11732 break 11733 } 11734 v.reset(OpCopy) 11735 v.Type = y.Type 11736 v.AddArg(y) 11737 return true 11738 } 11739 // match: (Xor16 (Xor16 x y) y) 11740 // cond: 11741 // result: x 11742 for { 11743 v_0 := v.Args[0] 11744 if v_0.Op != OpXor16 { 11745 break 11746 } 11747 x := v_0.Args[0] 11748 y := v_0.Args[1] 11749 if y != v.Args[1] { 11750 break 11751 } 11752 v.reset(OpCopy) 11753 v.Type = x.Type 11754 v.AddArg(x) 11755 return true 11756 } 11757 return false 11758 } 11759 func rewriteValuegeneric_OpXor32(v *Value, config *Config) bool { 11760 b := v.Block 11761 _ = b 11762 // match: (Xor32 x (Const32 <t> [c])) 11763 // cond: x.Op != OpConst32 11764 // result: (Xor32 (Const32 <t> [c]) x) 11765 for { 11766 x := v.Args[0] 11767 v_1 := v.Args[1] 11768 if v_1.Op != OpConst32 { 11769 break 11770 } 11771 t := v_1.Type 11772 c := v_1.AuxInt 11773 if !(x.Op != OpConst32) { 11774 break 11775 } 11776 v.reset(OpXor32) 11777 v0 := b.NewValue0(v.Line, OpConst32, t) 11778 v0.AuxInt = c 11779 v.AddArg(v0) 11780 v.AddArg(x) 11781 return true 11782 } 11783 // match: (Xor32 x x) 11784 // cond: 11785 // result: (Const32 [0]) 11786 for { 11787 x := v.Args[0] 11788 if x != v.Args[1] { 11789 break 11790 } 11791 v.reset(OpConst32) 11792 v.AuxInt = 0 11793 return true 11794 } 11795 // match: (Xor32 (Const32 [0]) x) 11796 // cond: 11797 // result: x 11798 for { 11799 v_0 := v.Args[0] 11800 if v_0.Op != OpConst32 { 11801 break 11802 } 11803 if v_0.AuxInt != 0 { 11804 break 11805 } 11806 x := v.Args[1] 11807 v.reset(OpCopy) 11808 v.Type = x.Type 11809 v.AddArg(x) 11810 return true 11811 } 11812 // match: (Xor32 x (Xor32 x y)) 11813 // cond: 11814 // result: y 11815 for { 11816 x := v.Args[0] 11817 v_1 := v.Args[1] 11818 if v_1.Op != OpXor32 { 11819 break 11820 } 11821 if x != v_1.Args[0] { 11822 break 11823 } 11824 y := v_1.Args[1] 11825 v.reset(OpCopy) 11826 v.Type = y.Type 11827 v.AddArg(y) 11828 return true 11829 } 11830 // match: (Xor32 x (Xor32 y x)) 11831 // cond: 11832 // result: y 11833 for { 11834 x := v.Args[0] 11835 v_1 := v.Args[1] 11836 if v_1.Op != OpXor32 { 11837 break 11838 } 11839 y := v_1.Args[0] 11840 if x != v_1.Args[1] { 11841 break 11842 } 11843 v.reset(OpCopy) 11844 v.Type = y.Type 11845 v.AddArg(y) 11846 return true 11847 } 11848 // match: (Xor32 (Xor32 x y) x) 11849 // cond: 11850 // result: y 11851 for { 11852 v_0 := v.Args[0] 11853 if v_0.Op != OpXor32 { 11854 break 11855 } 11856 x := v_0.Args[0] 11857 y := v_0.Args[1] 11858 if x != v.Args[1] { 11859 break 11860 } 11861 v.reset(OpCopy) 11862 v.Type = y.Type 11863 v.AddArg(y) 11864 return true 11865 } 11866 // match: (Xor32 (Xor32 x y) y) 11867 // cond: 11868 // result: x 11869 for { 11870 v_0 := v.Args[0] 11871 if v_0.Op != OpXor32 { 11872 break 11873 } 11874 x := v_0.Args[0] 11875 y := v_0.Args[1] 11876 if y != v.Args[1] { 11877 break 11878 } 11879 v.reset(OpCopy) 11880 v.Type = x.Type 11881 v.AddArg(x) 11882 return true 11883 } 11884 return false 11885 } 11886 func rewriteValuegeneric_OpXor64(v *Value, config *Config) bool { 11887 b := v.Block 11888 _ = b 11889 // match: (Xor64 x (Const64 <t> [c])) 11890 // cond: x.Op != OpConst64 11891 // result: (Xor64 (Const64 <t> [c]) x) 11892 for { 11893 x := v.Args[0] 11894 v_1 := v.Args[1] 11895 if v_1.Op != OpConst64 { 11896 break 11897 } 11898 t := v_1.Type 11899 c := v_1.AuxInt 11900 if !(x.Op != OpConst64) { 11901 break 11902 } 11903 v.reset(OpXor64) 11904 v0 := b.NewValue0(v.Line, OpConst64, t) 11905 v0.AuxInt = c 11906 v.AddArg(v0) 11907 v.AddArg(x) 11908 return true 11909 } 11910 // match: (Xor64 x x) 11911 // cond: 11912 // result: (Const64 [0]) 11913 for { 11914 x := v.Args[0] 11915 if x != v.Args[1] { 11916 break 11917 } 11918 v.reset(OpConst64) 11919 v.AuxInt = 0 11920 return true 11921 } 11922 // match: (Xor64 (Const64 [0]) x) 11923 // cond: 11924 // result: x 11925 for { 11926 v_0 := v.Args[0] 11927 if v_0.Op != OpConst64 { 11928 break 11929 } 11930 if v_0.AuxInt != 0 { 11931 break 11932 } 11933 x := v.Args[1] 11934 v.reset(OpCopy) 11935 v.Type = x.Type 11936 v.AddArg(x) 11937 return true 11938 } 11939 // match: (Xor64 x (Xor64 x y)) 11940 // cond: 11941 // result: y 11942 for { 11943 x := v.Args[0] 11944 v_1 := v.Args[1] 11945 if v_1.Op != OpXor64 { 11946 break 11947 } 11948 if x != v_1.Args[0] { 11949 break 11950 } 11951 y := v_1.Args[1] 11952 v.reset(OpCopy) 11953 v.Type = y.Type 11954 v.AddArg(y) 11955 return true 11956 } 11957 // match: (Xor64 x (Xor64 y x)) 11958 // cond: 11959 // result: y 11960 for { 11961 x := v.Args[0] 11962 v_1 := v.Args[1] 11963 if v_1.Op != OpXor64 { 11964 break 11965 } 11966 y := v_1.Args[0] 11967 if x != v_1.Args[1] { 11968 break 11969 } 11970 v.reset(OpCopy) 11971 v.Type = y.Type 11972 v.AddArg(y) 11973 return true 11974 } 11975 // match: (Xor64 (Xor64 x y) x) 11976 // cond: 11977 // result: y 11978 for { 11979 v_0 := v.Args[0] 11980 if v_0.Op != OpXor64 { 11981 break 11982 } 11983 x := v_0.Args[0] 11984 y := v_0.Args[1] 11985 if x != v.Args[1] { 11986 break 11987 } 11988 v.reset(OpCopy) 11989 v.Type = y.Type 11990 v.AddArg(y) 11991 return true 11992 } 11993 // match: (Xor64 (Xor64 x y) y) 11994 // cond: 11995 // result: x 11996 for { 11997 v_0 := v.Args[0] 11998 if v_0.Op != OpXor64 { 11999 break 12000 } 12001 x := v_0.Args[0] 12002 y := v_0.Args[1] 12003 if y != v.Args[1] { 12004 break 12005 } 12006 v.reset(OpCopy) 12007 v.Type = x.Type 12008 v.AddArg(x) 12009 return true 12010 } 12011 return false 12012 } 12013 func rewriteValuegeneric_OpXor8(v *Value, config *Config) bool { 12014 b := v.Block 12015 _ = b 12016 // match: (Xor8 x (Const8 <t> [c])) 12017 // cond: x.Op != OpConst8 12018 // result: (Xor8 (Const8 <t> [c]) x) 12019 for { 12020 x := v.Args[0] 12021 v_1 := v.Args[1] 12022 if v_1.Op != OpConst8 { 12023 break 12024 } 12025 t := v_1.Type 12026 c := v_1.AuxInt 12027 if !(x.Op != OpConst8) { 12028 break 12029 } 12030 v.reset(OpXor8) 12031 v0 := b.NewValue0(v.Line, OpConst8, t) 12032 v0.AuxInt = c 12033 v.AddArg(v0) 12034 v.AddArg(x) 12035 return true 12036 } 12037 // match: (Xor8 x x) 12038 // cond: 12039 // result: (Const8 [0]) 12040 for { 12041 x := v.Args[0] 12042 if x != v.Args[1] { 12043 break 12044 } 12045 v.reset(OpConst8) 12046 v.AuxInt = 0 12047 return true 12048 } 12049 // match: (Xor8 (Const8 [0]) x) 12050 // cond: 12051 // result: x 12052 for { 12053 v_0 := v.Args[0] 12054 if v_0.Op != OpConst8 { 12055 break 12056 } 12057 if v_0.AuxInt != 0 { 12058 break 12059 } 12060 x := v.Args[1] 12061 v.reset(OpCopy) 12062 v.Type = x.Type 12063 v.AddArg(x) 12064 return true 12065 } 12066 // match: (Xor8 x (Xor8 x y)) 12067 // cond: 12068 // result: y 12069 for { 12070 x := v.Args[0] 12071 v_1 := v.Args[1] 12072 if v_1.Op != OpXor8 { 12073 break 12074 } 12075 if x != v_1.Args[0] { 12076 break 12077 } 12078 y := v_1.Args[1] 12079 v.reset(OpCopy) 12080 v.Type = y.Type 12081 v.AddArg(y) 12082 return true 12083 } 12084 // match: (Xor8 x (Xor8 y x)) 12085 // cond: 12086 // result: y 12087 for { 12088 x := v.Args[0] 12089 v_1 := v.Args[1] 12090 if v_1.Op != OpXor8 { 12091 break 12092 } 12093 y := v_1.Args[0] 12094 if x != v_1.Args[1] { 12095 break 12096 } 12097 v.reset(OpCopy) 12098 v.Type = y.Type 12099 v.AddArg(y) 12100 return true 12101 } 12102 // match: (Xor8 (Xor8 x y) x) 12103 // cond: 12104 // result: y 12105 for { 12106 v_0 := v.Args[0] 12107 if v_0.Op != OpXor8 { 12108 break 12109 } 12110 x := v_0.Args[0] 12111 y := v_0.Args[1] 12112 if x != v.Args[1] { 12113 break 12114 } 12115 v.reset(OpCopy) 12116 v.Type = y.Type 12117 v.AddArg(y) 12118 return true 12119 } 12120 // match: (Xor8 (Xor8 x y) y) 12121 // cond: 12122 // result: x 12123 for { 12124 v_0 := v.Args[0] 12125 if v_0.Op != OpXor8 { 12126 break 12127 } 12128 x := v_0.Args[0] 12129 y := v_0.Args[1] 12130 if y != v.Args[1] { 12131 break 12132 } 12133 v.reset(OpCopy) 12134 v.Type = x.Type 12135 v.AddArg(x) 12136 return true 12137 } 12138 return false 12139 } 12140 func rewriteValuegeneric_OpZero(v *Value, config *Config) bool { 12141 b := v.Block 12142 _ = b 12143 // match: (Zero (Load (OffPtr [c] (SP)) mem) mem) 12144 // cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.PtrSize 12145 // result: mem 12146 for { 12147 v_0 := v.Args[0] 12148 if v_0.Op != OpLoad { 12149 break 12150 } 12151 v_0_0 := v_0.Args[0] 12152 if v_0_0.Op != OpOffPtr { 12153 break 12154 } 12155 c := v_0_0.AuxInt 12156 v_0_0_0 := v_0_0.Args[0] 12157 if v_0_0_0.Op != OpSP { 12158 break 12159 } 12160 mem := v_0.Args[1] 12161 if mem != v.Args[1] { 12162 break 12163 } 12164 if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.PtrSize) { 12165 break 12166 } 12167 v.reset(OpCopy) 12168 v.Type = mem.Type 12169 v.AddArg(mem) 12170 return true 12171 } 12172 return false 12173 } 12174 func rewriteValuegeneric_OpZeroExt16to32(v *Value, config *Config) bool { 12175 b := v.Block 12176 _ = b 12177 // match: (ZeroExt16to32 (Trunc32to16 x:(Rsh32Ux64 _ (Const64 [s])))) 12178 // cond: s >= 16 12179 // result: x 12180 for { 12181 v_0 := v.Args[0] 12182 if v_0.Op != OpTrunc32to16 { 12183 break 12184 } 12185 x := v_0.Args[0] 12186 if x.Op != OpRsh32Ux64 { 12187 break 12188 } 12189 x_1 := x.Args[1] 12190 if x_1.Op != OpConst64 { 12191 break 12192 } 12193 s := x_1.AuxInt 12194 if !(s >= 16) { 12195 break 12196 } 12197 v.reset(OpCopy) 12198 v.Type = x.Type 12199 v.AddArg(x) 12200 return true 12201 } 12202 return false 12203 } 12204 func rewriteValuegeneric_OpZeroExt16to64(v *Value, config *Config) bool { 12205 b := v.Block 12206 _ = b 12207 // match: (ZeroExt16to64 (Trunc64to16 x:(Rsh64Ux64 _ (Const64 [s])))) 12208 // cond: s >= 48 12209 // result: x 12210 for { 12211 v_0 := v.Args[0] 12212 if v_0.Op != OpTrunc64to16 { 12213 break 12214 } 12215 x := v_0.Args[0] 12216 if x.Op != OpRsh64Ux64 { 12217 break 12218 } 12219 x_1 := x.Args[1] 12220 if x_1.Op != OpConst64 { 12221 break 12222 } 12223 s := x_1.AuxInt 12224 if !(s >= 48) { 12225 break 12226 } 12227 v.reset(OpCopy) 12228 v.Type = x.Type 12229 v.AddArg(x) 12230 return true 12231 } 12232 return false 12233 } 12234 func rewriteValuegeneric_OpZeroExt32to64(v *Value, config *Config) bool { 12235 b := v.Block 12236 _ = b 12237 // match: (ZeroExt32to64 (Trunc64to32 x:(Rsh64Ux64 _ (Const64 [s])))) 12238 // cond: s >= 32 12239 // result: x 12240 for { 12241 v_0 := v.Args[0] 12242 if v_0.Op != OpTrunc64to32 { 12243 break 12244 } 12245 x := v_0.Args[0] 12246 if x.Op != OpRsh64Ux64 { 12247 break 12248 } 12249 x_1 := x.Args[1] 12250 if x_1.Op != OpConst64 { 12251 break 12252 } 12253 s := x_1.AuxInt 12254 if !(s >= 32) { 12255 break 12256 } 12257 v.reset(OpCopy) 12258 v.Type = x.Type 12259 v.AddArg(x) 12260 return true 12261 } 12262 return false 12263 } 12264 func rewriteValuegeneric_OpZeroExt8to16(v *Value, config *Config) bool { 12265 b := v.Block 12266 _ = b 12267 // match: (ZeroExt8to16 (Trunc16to8 x:(Rsh16Ux64 _ (Const64 [s])))) 12268 // cond: s >= 8 12269 // result: x 12270 for { 12271 v_0 := v.Args[0] 12272 if v_0.Op != OpTrunc16to8 { 12273 break 12274 } 12275 x := v_0.Args[0] 12276 if x.Op != OpRsh16Ux64 { 12277 break 12278 } 12279 x_1 := x.Args[1] 12280 if x_1.Op != OpConst64 { 12281 break 12282 } 12283 s := x_1.AuxInt 12284 if !(s >= 8) { 12285 break 12286 } 12287 v.reset(OpCopy) 12288 v.Type = x.Type 12289 v.AddArg(x) 12290 return true 12291 } 12292 return false 12293 } 12294 func rewriteValuegeneric_OpZeroExt8to32(v *Value, config *Config) bool { 12295 b := v.Block 12296 _ = b 12297 // match: (ZeroExt8to32 (Trunc32to8 x:(Rsh32Ux64 _ (Const64 [s])))) 12298 // cond: s >= 24 12299 // result: x 12300 for { 12301 v_0 := v.Args[0] 12302 if v_0.Op != OpTrunc32to8 { 12303 break 12304 } 12305 x := v_0.Args[0] 12306 if x.Op != OpRsh32Ux64 { 12307 break 12308 } 12309 x_1 := x.Args[1] 12310 if x_1.Op != OpConst64 { 12311 break 12312 } 12313 s := x_1.AuxInt 12314 if !(s >= 24) { 12315 break 12316 } 12317 v.reset(OpCopy) 12318 v.Type = x.Type 12319 v.AddArg(x) 12320 return true 12321 } 12322 return false 12323 } 12324 func rewriteValuegeneric_OpZeroExt8to64(v *Value, config *Config) bool { 12325 b := v.Block 12326 _ = b 12327 // match: (ZeroExt8to64 (Trunc64to8 x:(Rsh64Ux64 _ (Const64 [s])))) 12328 // cond: s >= 56 12329 // result: x 12330 for { 12331 v_0 := v.Args[0] 12332 if v_0.Op != OpTrunc64to8 { 12333 break 12334 } 12335 x := v_0.Args[0] 12336 if x.Op != OpRsh64Ux64 { 12337 break 12338 } 12339 x_1 := x.Args[1] 12340 if x_1.Op != OpConst64 { 12341 break 12342 } 12343 s := x_1.AuxInt 12344 if !(s >= 56) { 12345 break 12346 } 12347 v.reset(OpCopy) 12348 v.Type = x.Type 12349 v.AddArg(x) 12350 return true 12351 } 12352 return false 12353 } 12354 func rewriteBlockgeneric(b *Block, config *Config) bool { 12355 switch b.Kind { 12356 case BlockIf: 12357 // match: (If (Not cond) yes no) 12358 // cond: 12359 // result: (If cond no yes) 12360 for { 12361 v := b.Control 12362 if v.Op != OpNot { 12363 break 12364 } 12365 cond := v.Args[0] 12366 yes := b.Succs[0] 12367 no := b.Succs[1] 12368 b.Kind = BlockIf 12369 b.SetControl(cond) 12370 b.swapSuccessors() 12371 _ = no 12372 _ = yes 12373 return true 12374 } 12375 // match: (If (ConstBool [c]) yes no) 12376 // cond: c == 1 12377 // result: (First nil yes no) 12378 for { 12379 v := b.Control 12380 if v.Op != OpConstBool { 12381 break 12382 } 12383 c := v.AuxInt 12384 yes := b.Succs[0] 12385 no := b.Succs[1] 12386 if !(c == 1) { 12387 break 12388 } 12389 b.Kind = BlockFirst 12390 b.SetControl(nil) 12391 _ = yes 12392 _ = no 12393 return true 12394 } 12395 // match: (If (ConstBool [c]) yes no) 12396 // cond: c == 0 12397 // result: (First nil no yes) 12398 for { 12399 v := b.Control 12400 if v.Op != OpConstBool { 12401 break 12402 } 12403 c := v.AuxInt 12404 yes := b.Succs[0] 12405 no := b.Succs[1] 12406 if !(c == 0) { 12407 break 12408 } 12409 b.Kind = BlockFirst 12410 b.SetControl(nil) 12411 b.swapSuccessors() 12412 _ = no 12413 _ = yes 12414 return true 12415 } 12416 } 12417 return false 12418 }