github.com/sanprasirt/go@v0.0.0-20170607001320-a027466e4b6d/src/cmd/compile/internal/ssa/rewritegeneric.go (about) 1 // Code generated from gen/generic.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 import "cmd/internal/obj" 8 import "cmd/internal/objabi" 9 import "cmd/compile/internal/types" 10 11 var _ = math.MinInt8 // in case not otherwise used 12 var _ = obj.ANOP // in case not otherwise used 13 var _ = objabi.GOROOT // in case not otherwise used 14 var _ = types.TypeMem // in case not otherwise used 15 16 func rewriteValuegeneric(v *Value) bool { 17 switch v.Op { 18 case OpAdd16: 19 return rewriteValuegeneric_OpAdd16_0(v) || rewriteValuegeneric_OpAdd16_10(v) || rewriteValuegeneric_OpAdd16_20(v) 20 case OpAdd32: 21 return rewriteValuegeneric_OpAdd32_0(v) || rewriteValuegeneric_OpAdd32_10(v) || rewriteValuegeneric_OpAdd32_20(v) 22 case OpAdd32F: 23 return rewriteValuegeneric_OpAdd32F_0(v) 24 case OpAdd64: 25 return rewriteValuegeneric_OpAdd64_0(v) || rewriteValuegeneric_OpAdd64_10(v) || rewriteValuegeneric_OpAdd64_20(v) 26 case OpAdd64F: 27 return rewriteValuegeneric_OpAdd64F_0(v) 28 case OpAdd8: 29 return rewriteValuegeneric_OpAdd8_0(v) || rewriteValuegeneric_OpAdd8_10(v) || rewriteValuegeneric_OpAdd8_20(v) 30 case OpAddPtr: 31 return rewriteValuegeneric_OpAddPtr_0(v) 32 case OpAnd16: 33 return rewriteValuegeneric_OpAnd16_0(v) || rewriteValuegeneric_OpAnd16_10(v) 34 case OpAnd32: 35 return rewriteValuegeneric_OpAnd32_0(v) || rewriteValuegeneric_OpAnd32_10(v) 36 case OpAnd64: 37 return rewriteValuegeneric_OpAnd64_0(v) || rewriteValuegeneric_OpAnd64_10(v) || rewriteValuegeneric_OpAnd64_20(v) 38 case OpAnd8: 39 return rewriteValuegeneric_OpAnd8_0(v) || rewriteValuegeneric_OpAnd8_10(v) 40 case OpArg: 41 return rewriteValuegeneric_OpArg_0(v) || rewriteValuegeneric_OpArg_10(v) 42 case OpArraySelect: 43 return rewriteValuegeneric_OpArraySelect_0(v) 44 case OpCom16: 45 return rewriteValuegeneric_OpCom16_0(v) 46 case OpCom32: 47 return rewriteValuegeneric_OpCom32_0(v) 48 case OpCom64: 49 return rewriteValuegeneric_OpCom64_0(v) 50 case OpCom8: 51 return rewriteValuegeneric_OpCom8_0(v) 52 case OpConstInterface: 53 return rewriteValuegeneric_OpConstInterface_0(v) 54 case OpConstSlice: 55 return rewriteValuegeneric_OpConstSlice_0(v) 56 case OpConstString: 57 return rewriteValuegeneric_OpConstString_0(v) 58 case OpConvert: 59 return rewriteValuegeneric_OpConvert_0(v) 60 case OpCvt32Fto64F: 61 return rewriteValuegeneric_OpCvt32Fto64F_0(v) 62 case OpCvt64Fto32F: 63 return rewriteValuegeneric_OpCvt64Fto32F_0(v) 64 case OpDiv16: 65 return rewriteValuegeneric_OpDiv16_0(v) 66 case OpDiv16u: 67 return rewriteValuegeneric_OpDiv16u_0(v) 68 case OpDiv32: 69 return rewriteValuegeneric_OpDiv32_0(v) 70 case OpDiv32F: 71 return rewriteValuegeneric_OpDiv32F_0(v) 72 case OpDiv32u: 73 return rewriteValuegeneric_OpDiv32u_0(v) 74 case OpDiv64: 75 return rewriteValuegeneric_OpDiv64_0(v) 76 case OpDiv64F: 77 return rewriteValuegeneric_OpDiv64F_0(v) 78 case OpDiv64u: 79 return rewriteValuegeneric_OpDiv64u_0(v) 80 case OpDiv8: 81 return rewriteValuegeneric_OpDiv8_0(v) 82 case OpDiv8u: 83 return rewriteValuegeneric_OpDiv8u_0(v) 84 case OpEq16: 85 return rewriteValuegeneric_OpEq16_0(v) 86 case OpEq32: 87 return rewriteValuegeneric_OpEq32_0(v) 88 case OpEq64: 89 return rewriteValuegeneric_OpEq64_0(v) 90 case OpEq8: 91 return rewriteValuegeneric_OpEq8_0(v) 92 case OpEqB: 93 return rewriteValuegeneric_OpEqB_0(v) 94 case OpEqInter: 95 return rewriteValuegeneric_OpEqInter_0(v) 96 case OpEqPtr: 97 return rewriteValuegeneric_OpEqPtr_0(v) 98 case OpEqSlice: 99 return rewriteValuegeneric_OpEqSlice_0(v) 100 case OpGeq16: 101 return rewriteValuegeneric_OpGeq16_0(v) 102 case OpGeq16U: 103 return rewriteValuegeneric_OpGeq16U_0(v) 104 case OpGeq32: 105 return rewriteValuegeneric_OpGeq32_0(v) 106 case OpGeq32U: 107 return rewriteValuegeneric_OpGeq32U_0(v) 108 case OpGeq64: 109 return rewriteValuegeneric_OpGeq64_0(v) 110 case OpGeq64U: 111 return rewriteValuegeneric_OpGeq64U_0(v) 112 case OpGeq8: 113 return rewriteValuegeneric_OpGeq8_0(v) 114 case OpGeq8U: 115 return rewriteValuegeneric_OpGeq8U_0(v) 116 case OpGreater16: 117 return rewriteValuegeneric_OpGreater16_0(v) 118 case OpGreater16U: 119 return rewriteValuegeneric_OpGreater16U_0(v) 120 case OpGreater32: 121 return rewriteValuegeneric_OpGreater32_0(v) 122 case OpGreater32U: 123 return rewriteValuegeneric_OpGreater32U_0(v) 124 case OpGreater64: 125 return rewriteValuegeneric_OpGreater64_0(v) 126 case OpGreater64U: 127 return rewriteValuegeneric_OpGreater64U_0(v) 128 case OpGreater8: 129 return rewriteValuegeneric_OpGreater8_0(v) 130 case OpGreater8U: 131 return rewriteValuegeneric_OpGreater8U_0(v) 132 case OpIMake: 133 return rewriteValuegeneric_OpIMake_0(v) 134 case OpInterCall: 135 return rewriteValuegeneric_OpInterCall_0(v) 136 case OpIsInBounds: 137 return rewriteValuegeneric_OpIsInBounds_0(v) || rewriteValuegeneric_OpIsInBounds_10(v) || rewriteValuegeneric_OpIsInBounds_20(v) 138 case OpIsNonNil: 139 return rewriteValuegeneric_OpIsNonNil_0(v) 140 case OpIsSliceInBounds: 141 return rewriteValuegeneric_OpIsSliceInBounds_0(v) 142 case OpLeq16: 143 return rewriteValuegeneric_OpLeq16_0(v) 144 case OpLeq16U: 145 return rewriteValuegeneric_OpLeq16U_0(v) 146 case OpLeq32: 147 return rewriteValuegeneric_OpLeq32_0(v) 148 case OpLeq32U: 149 return rewriteValuegeneric_OpLeq32U_0(v) 150 case OpLeq64: 151 return rewriteValuegeneric_OpLeq64_0(v) 152 case OpLeq64U: 153 return rewriteValuegeneric_OpLeq64U_0(v) 154 case OpLeq8: 155 return rewriteValuegeneric_OpLeq8_0(v) 156 case OpLeq8U: 157 return rewriteValuegeneric_OpLeq8U_0(v) 158 case OpLess16: 159 return rewriteValuegeneric_OpLess16_0(v) 160 case OpLess16U: 161 return rewriteValuegeneric_OpLess16U_0(v) 162 case OpLess32: 163 return rewriteValuegeneric_OpLess32_0(v) 164 case OpLess32U: 165 return rewriteValuegeneric_OpLess32U_0(v) 166 case OpLess64: 167 return rewriteValuegeneric_OpLess64_0(v) 168 case OpLess64U: 169 return rewriteValuegeneric_OpLess64U_0(v) 170 case OpLess8: 171 return rewriteValuegeneric_OpLess8_0(v) 172 case OpLess8U: 173 return rewriteValuegeneric_OpLess8U_0(v) 174 case OpLoad: 175 return rewriteValuegeneric_OpLoad_0(v) 176 case OpLsh16x16: 177 return rewriteValuegeneric_OpLsh16x16_0(v) 178 case OpLsh16x32: 179 return rewriteValuegeneric_OpLsh16x32_0(v) 180 case OpLsh16x64: 181 return rewriteValuegeneric_OpLsh16x64_0(v) 182 case OpLsh16x8: 183 return rewriteValuegeneric_OpLsh16x8_0(v) 184 case OpLsh32x16: 185 return rewriteValuegeneric_OpLsh32x16_0(v) 186 case OpLsh32x32: 187 return rewriteValuegeneric_OpLsh32x32_0(v) 188 case OpLsh32x64: 189 return rewriteValuegeneric_OpLsh32x64_0(v) 190 case OpLsh32x8: 191 return rewriteValuegeneric_OpLsh32x8_0(v) 192 case OpLsh64x16: 193 return rewriteValuegeneric_OpLsh64x16_0(v) 194 case OpLsh64x32: 195 return rewriteValuegeneric_OpLsh64x32_0(v) 196 case OpLsh64x64: 197 return rewriteValuegeneric_OpLsh64x64_0(v) 198 case OpLsh64x8: 199 return rewriteValuegeneric_OpLsh64x8_0(v) 200 case OpLsh8x16: 201 return rewriteValuegeneric_OpLsh8x16_0(v) 202 case OpLsh8x32: 203 return rewriteValuegeneric_OpLsh8x32_0(v) 204 case OpLsh8x64: 205 return rewriteValuegeneric_OpLsh8x64_0(v) 206 case OpLsh8x8: 207 return rewriteValuegeneric_OpLsh8x8_0(v) 208 case OpMod16: 209 return rewriteValuegeneric_OpMod16_0(v) 210 case OpMod16u: 211 return rewriteValuegeneric_OpMod16u_0(v) 212 case OpMod32: 213 return rewriteValuegeneric_OpMod32_0(v) 214 case OpMod32u: 215 return rewriteValuegeneric_OpMod32u_0(v) 216 case OpMod64: 217 return rewriteValuegeneric_OpMod64_0(v) 218 case OpMod64u: 219 return rewriteValuegeneric_OpMod64u_0(v) 220 case OpMod8: 221 return rewriteValuegeneric_OpMod8_0(v) 222 case OpMod8u: 223 return rewriteValuegeneric_OpMod8u_0(v) 224 case OpMul16: 225 return rewriteValuegeneric_OpMul16_0(v) || rewriteValuegeneric_OpMul16_10(v) 226 case OpMul32: 227 return rewriteValuegeneric_OpMul32_0(v) || rewriteValuegeneric_OpMul32_10(v) 228 case OpMul32F: 229 return rewriteValuegeneric_OpMul32F_0(v) 230 case OpMul64: 231 return rewriteValuegeneric_OpMul64_0(v) || rewriteValuegeneric_OpMul64_10(v) 232 case OpMul64F: 233 return rewriteValuegeneric_OpMul64F_0(v) 234 case OpMul8: 235 return rewriteValuegeneric_OpMul8_0(v) || rewriteValuegeneric_OpMul8_10(v) 236 case OpNeg16: 237 return rewriteValuegeneric_OpNeg16_0(v) 238 case OpNeg32: 239 return rewriteValuegeneric_OpNeg32_0(v) 240 case OpNeg32F: 241 return rewriteValuegeneric_OpNeg32F_0(v) 242 case OpNeg64: 243 return rewriteValuegeneric_OpNeg64_0(v) 244 case OpNeg64F: 245 return rewriteValuegeneric_OpNeg64F_0(v) 246 case OpNeg8: 247 return rewriteValuegeneric_OpNeg8_0(v) 248 case OpNeq16: 249 return rewriteValuegeneric_OpNeq16_0(v) 250 case OpNeq32: 251 return rewriteValuegeneric_OpNeq32_0(v) 252 case OpNeq64: 253 return rewriteValuegeneric_OpNeq64_0(v) 254 case OpNeq8: 255 return rewriteValuegeneric_OpNeq8_0(v) 256 case OpNeqB: 257 return rewriteValuegeneric_OpNeqB_0(v) 258 case OpNeqInter: 259 return rewriteValuegeneric_OpNeqInter_0(v) 260 case OpNeqPtr: 261 return rewriteValuegeneric_OpNeqPtr_0(v) 262 case OpNeqSlice: 263 return rewriteValuegeneric_OpNeqSlice_0(v) 264 case OpNilCheck: 265 return rewriteValuegeneric_OpNilCheck_0(v) 266 case OpNot: 267 return rewriteValuegeneric_OpNot_0(v) || rewriteValuegeneric_OpNot_10(v) || rewriteValuegeneric_OpNot_20(v) || rewriteValuegeneric_OpNot_30(v) || rewriteValuegeneric_OpNot_40(v) 268 case OpOffPtr: 269 return rewriteValuegeneric_OpOffPtr_0(v) 270 case OpOr16: 271 return rewriteValuegeneric_OpOr16_0(v) || rewriteValuegeneric_OpOr16_10(v) || rewriteValuegeneric_OpOr16_20(v) 272 case OpOr32: 273 return rewriteValuegeneric_OpOr32_0(v) || rewriteValuegeneric_OpOr32_10(v) || rewriteValuegeneric_OpOr32_20(v) 274 case OpOr64: 275 return rewriteValuegeneric_OpOr64_0(v) || rewriteValuegeneric_OpOr64_10(v) || rewriteValuegeneric_OpOr64_20(v) 276 case OpOr8: 277 return rewriteValuegeneric_OpOr8_0(v) || rewriteValuegeneric_OpOr8_10(v) || rewriteValuegeneric_OpOr8_20(v) 278 case OpPhi: 279 return rewriteValuegeneric_OpPhi_0(v) 280 case OpPtrIndex: 281 return rewriteValuegeneric_OpPtrIndex_0(v) 282 case OpRound32F: 283 return rewriteValuegeneric_OpRound32F_0(v) 284 case OpRound64F: 285 return rewriteValuegeneric_OpRound64F_0(v) 286 case OpRsh16Ux16: 287 return rewriteValuegeneric_OpRsh16Ux16_0(v) 288 case OpRsh16Ux32: 289 return rewriteValuegeneric_OpRsh16Ux32_0(v) 290 case OpRsh16Ux64: 291 return rewriteValuegeneric_OpRsh16Ux64_0(v) 292 case OpRsh16Ux8: 293 return rewriteValuegeneric_OpRsh16Ux8_0(v) 294 case OpRsh16x16: 295 return rewriteValuegeneric_OpRsh16x16_0(v) 296 case OpRsh16x32: 297 return rewriteValuegeneric_OpRsh16x32_0(v) 298 case OpRsh16x64: 299 return rewriteValuegeneric_OpRsh16x64_0(v) 300 case OpRsh16x8: 301 return rewriteValuegeneric_OpRsh16x8_0(v) 302 case OpRsh32Ux16: 303 return rewriteValuegeneric_OpRsh32Ux16_0(v) 304 case OpRsh32Ux32: 305 return rewriteValuegeneric_OpRsh32Ux32_0(v) 306 case OpRsh32Ux64: 307 return rewriteValuegeneric_OpRsh32Ux64_0(v) 308 case OpRsh32Ux8: 309 return rewriteValuegeneric_OpRsh32Ux8_0(v) 310 case OpRsh32x16: 311 return rewriteValuegeneric_OpRsh32x16_0(v) 312 case OpRsh32x32: 313 return rewriteValuegeneric_OpRsh32x32_0(v) 314 case OpRsh32x64: 315 return rewriteValuegeneric_OpRsh32x64_0(v) 316 case OpRsh32x8: 317 return rewriteValuegeneric_OpRsh32x8_0(v) 318 case OpRsh64Ux16: 319 return rewriteValuegeneric_OpRsh64Ux16_0(v) 320 case OpRsh64Ux32: 321 return rewriteValuegeneric_OpRsh64Ux32_0(v) 322 case OpRsh64Ux64: 323 return rewriteValuegeneric_OpRsh64Ux64_0(v) 324 case OpRsh64Ux8: 325 return rewriteValuegeneric_OpRsh64Ux8_0(v) 326 case OpRsh64x16: 327 return rewriteValuegeneric_OpRsh64x16_0(v) 328 case OpRsh64x32: 329 return rewriteValuegeneric_OpRsh64x32_0(v) 330 case OpRsh64x64: 331 return rewriteValuegeneric_OpRsh64x64_0(v) 332 case OpRsh64x8: 333 return rewriteValuegeneric_OpRsh64x8_0(v) 334 case OpRsh8Ux16: 335 return rewriteValuegeneric_OpRsh8Ux16_0(v) 336 case OpRsh8Ux32: 337 return rewriteValuegeneric_OpRsh8Ux32_0(v) 338 case OpRsh8Ux64: 339 return rewriteValuegeneric_OpRsh8Ux64_0(v) 340 case OpRsh8Ux8: 341 return rewriteValuegeneric_OpRsh8Ux8_0(v) 342 case OpRsh8x16: 343 return rewriteValuegeneric_OpRsh8x16_0(v) 344 case OpRsh8x32: 345 return rewriteValuegeneric_OpRsh8x32_0(v) 346 case OpRsh8x64: 347 return rewriteValuegeneric_OpRsh8x64_0(v) 348 case OpRsh8x8: 349 return rewriteValuegeneric_OpRsh8x8_0(v) 350 case OpSignExt16to32: 351 return rewriteValuegeneric_OpSignExt16to32_0(v) 352 case OpSignExt16to64: 353 return rewriteValuegeneric_OpSignExt16to64_0(v) 354 case OpSignExt32to64: 355 return rewriteValuegeneric_OpSignExt32to64_0(v) 356 case OpSignExt8to16: 357 return rewriteValuegeneric_OpSignExt8to16_0(v) 358 case OpSignExt8to32: 359 return rewriteValuegeneric_OpSignExt8to32_0(v) 360 case OpSignExt8to64: 361 return rewriteValuegeneric_OpSignExt8to64_0(v) 362 case OpSliceCap: 363 return rewriteValuegeneric_OpSliceCap_0(v) 364 case OpSliceLen: 365 return rewriteValuegeneric_OpSliceLen_0(v) 366 case OpSlicePtr: 367 return rewriteValuegeneric_OpSlicePtr_0(v) 368 case OpSlicemask: 369 return rewriteValuegeneric_OpSlicemask_0(v) 370 case OpSqrt: 371 return rewriteValuegeneric_OpSqrt_0(v) 372 case OpStore: 373 return rewriteValuegeneric_OpStore_0(v) || rewriteValuegeneric_OpStore_10(v) 374 case OpStringLen: 375 return rewriteValuegeneric_OpStringLen_0(v) 376 case OpStringPtr: 377 return rewriteValuegeneric_OpStringPtr_0(v) 378 case OpStructSelect: 379 return rewriteValuegeneric_OpStructSelect_0(v) || rewriteValuegeneric_OpStructSelect_10(v) 380 case OpSub16: 381 return rewriteValuegeneric_OpSub16_0(v) || rewriteValuegeneric_OpSub16_10(v) 382 case OpSub32: 383 return rewriteValuegeneric_OpSub32_0(v) || rewriteValuegeneric_OpSub32_10(v) 384 case OpSub32F: 385 return rewriteValuegeneric_OpSub32F_0(v) 386 case OpSub64: 387 return rewriteValuegeneric_OpSub64_0(v) || rewriteValuegeneric_OpSub64_10(v) 388 case OpSub64F: 389 return rewriteValuegeneric_OpSub64F_0(v) 390 case OpSub8: 391 return rewriteValuegeneric_OpSub8_0(v) || rewriteValuegeneric_OpSub8_10(v) 392 case OpTrunc16to8: 393 return rewriteValuegeneric_OpTrunc16to8_0(v) 394 case OpTrunc32to16: 395 return rewriteValuegeneric_OpTrunc32to16_0(v) 396 case OpTrunc32to8: 397 return rewriteValuegeneric_OpTrunc32to8_0(v) 398 case OpTrunc64to16: 399 return rewriteValuegeneric_OpTrunc64to16_0(v) 400 case OpTrunc64to32: 401 return rewriteValuegeneric_OpTrunc64to32_0(v) 402 case OpTrunc64to8: 403 return rewriteValuegeneric_OpTrunc64to8_0(v) 404 case OpXor16: 405 return rewriteValuegeneric_OpXor16_0(v) || rewriteValuegeneric_OpXor16_10(v) 406 case OpXor32: 407 return rewriteValuegeneric_OpXor32_0(v) || rewriteValuegeneric_OpXor32_10(v) 408 case OpXor64: 409 return rewriteValuegeneric_OpXor64_0(v) || rewriteValuegeneric_OpXor64_10(v) 410 case OpXor8: 411 return rewriteValuegeneric_OpXor8_0(v) || rewriteValuegeneric_OpXor8_10(v) 412 case OpZero: 413 return rewriteValuegeneric_OpZero_0(v) 414 case OpZeroExt16to32: 415 return rewriteValuegeneric_OpZeroExt16to32_0(v) 416 case OpZeroExt16to64: 417 return rewriteValuegeneric_OpZeroExt16to64_0(v) 418 case OpZeroExt32to64: 419 return rewriteValuegeneric_OpZeroExt32to64_0(v) 420 case OpZeroExt8to16: 421 return rewriteValuegeneric_OpZeroExt8to16_0(v) 422 case OpZeroExt8to32: 423 return rewriteValuegeneric_OpZeroExt8to32_0(v) 424 case OpZeroExt8to64: 425 return rewriteValuegeneric_OpZeroExt8to64_0(v) 426 } 427 return false 428 } 429 func rewriteValuegeneric_OpAdd16_0(v *Value) bool { 430 b := v.Block 431 _ = b 432 // match: (Add16 (Const16 [c]) (Const16 [d])) 433 // cond: 434 // result: (Const16 [int64(int16(c+d))]) 435 for { 436 _ = v.Args[1] 437 v_0 := v.Args[0] 438 if v_0.Op != OpConst16 { 439 break 440 } 441 c := v_0.AuxInt 442 v_1 := v.Args[1] 443 if v_1.Op != OpConst16 { 444 break 445 } 446 d := v_1.AuxInt 447 v.reset(OpConst16) 448 v.AuxInt = int64(int16(c + d)) 449 return true 450 } 451 // match: (Add16 (Const16 [d]) (Const16 [c])) 452 // cond: 453 // result: (Const16 [int64(int16(c+d))]) 454 for { 455 _ = v.Args[1] 456 v_0 := v.Args[0] 457 if v_0.Op != OpConst16 { 458 break 459 } 460 d := v_0.AuxInt 461 v_1 := v.Args[1] 462 if v_1.Op != OpConst16 { 463 break 464 } 465 c := v_1.AuxInt 466 v.reset(OpConst16) 467 v.AuxInt = int64(int16(c + d)) 468 return true 469 } 470 // match: (Add16 (Const16 [0]) x) 471 // cond: 472 // result: x 473 for { 474 _ = v.Args[1] 475 v_0 := v.Args[0] 476 if v_0.Op != OpConst16 { 477 break 478 } 479 if v_0.AuxInt != 0 { 480 break 481 } 482 x := v.Args[1] 483 v.reset(OpCopy) 484 v.Type = x.Type 485 v.AddArg(x) 486 return true 487 } 488 // match: (Add16 x (Const16 [0])) 489 // cond: 490 // result: x 491 for { 492 _ = v.Args[1] 493 x := v.Args[0] 494 v_1 := v.Args[1] 495 if v_1.Op != OpConst16 { 496 break 497 } 498 if v_1.AuxInt != 0 { 499 break 500 } 501 v.reset(OpCopy) 502 v.Type = x.Type 503 v.AddArg(x) 504 return true 505 } 506 // match: (Add16 (Const16 [1]) (Com16 x)) 507 // cond: 508 // result: (Neg16 x) 509 for { 510 _ = v.Args[1] 511 v_0 := v.Args[0] 512 if v_0.Op != OpConst16 { 513 break 514 } 515 if v_0.AuxInt != 1 { 516 break 517 } 518 v_1 := v.Args[1] 519 if v_1.Op != OpCom16 { 520 break 521 } 522 x := v_1.Args[0] 523 v.reset(OpNeg16) 524 v.AddArg(x) 525 return true 526 } 527 // match: (Add16 (Com16 x) (Const16 [1])) 528 // cond: 529 // result: (Neg16 x) 530 for { 531 _ = v.Args[1] 532 v_0 := v.Args[0] 533 if v_0.Op != OpCom16 { 534 break 535 } 536 x := v_0.Args[0] 537 v_1 := v.Args[1] 538 if v_1.Op != OpConst16 { 539 break 540 } 541 if v_1.AuxInt != 1 { 542 break 543 } 544 v.reset(OpNeg16) 545 v.AddArg(x) 546 return true 547 } 548 // match: (Add16 (Add16 i:(Const16 <t>) z) x) 549 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 550 // result: (Add16 i (Add16 <t> z x)) 551 for { 552 _ = v.Args[1] 553 v_0 := v.Args[0] 554 if v_0.Op != OpAdd16 { 555 break 556 } 557 _ = v_0.Args[1] 558 i := v_0.Args[0] 559 if i.Op != OpConst16 { 560 break 561 } 562 t := i.Type 563 z := v_0.Args[1] 564 x := v.Args[1] 565 if !(z.Op != OpConst16 && x.Op != OpConst16) { 566 break 567 } 568 v.reset(OpAdd16) 569 v.AddArg(i) 570 v0 := b.NewValue0(v.Pos, OpAdd16, t) 571 v0.AddArg(z) 572 v0.AddArg(x) 573 v.AddArg(v0) 574 return true 575 } 576 // match: (Add16 (Add16 z i:(Const16 <t>)) x) 577 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 578 // result: (Add16 i (Add16 <t> z x)) 579 for { 580 _ = v.Args[1] 581 v_0 := v.Args[0] 582 if v_0.Op != OpAdd16 { 583 break 584 } 585 _ = v_0.Args[1] 586 z := v_0.Args[0] 587 i := v_0.Args[1] 588 if i.Op != OpConst16 { 589 break 590 } 591 t := i.Type 592 x := v.Args[1] 593 if !(z.Op != OpConst16 && x.Op != OpConst16) { 594 break 595 } 596 v.reset(OpAdd16) 597 v.AddArg(i) 598 v0 := b.NewValue0(v.Pos, OpAdd16, t) 599 v0.AddArg(z) 600 v0.AddArg(x) 601 v.AddArg(v0) 602 return true 603 } 604 // match: (Add16 x (Add16 i:(Const16 <t>) z)) 605 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 606 // result: (Add16 i (Add16 <t> z x)) 607 for { 608 _ = v.Args[1] 609 x := v.Args[0] 610 v_1 := v.Args[1] 611 if v_1.Op != OpAdd16 { 612 break 613 } 614 _ = v_1.Args[1] 615 i := v_1.Args[0] 616 if i.Op != OpConst16 { 617 break 618 } 619 t := i.Type 620 z := v_1.Args[1] 621 if !(z.Op != OpConst16 && x.Op != OpConst16) { 622 break 623 } 624 v.reset(OpAdd16) 625 v.AddArg(i) 626 v0 := b.NewValue0(v.Pos, OpAdd16, t) 627 v0.AddArg(z) 628 v0.AddArg(x) 629 v.AddArg(v0) 630 return true 631 } 632 // match: (Add16 x (Add16 z i:(Const16 <t>))) 633 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 634 // result: (Add16 i (Add16 <t> z x)) 635 for { 636 _ = v.Args[1] 637 x := v.Args[0] 638 v_1 := v.Args[1] 639 if v_1.Op != OpAdd16 { 640 break 641 } 642 _ = v_1.Args[1] 643 z := v_1.Args[0] 644 i := v_1.Args[1] 645 if i.Op != OpConst16 { 646 break 647 } 648 t := i.Type 649 if !(z.Op != OpConst16 && x.Op != OpConst16) { 650 break 651 } 652 v.reset(OpAdd16) 653 v.AddArg(i) 654 v0 := b.NewValue0(v.Pos, OpAdd16, t) 655 v0.AddArg(z) 656 v0.AddArg(x) 657 v.AddArg(v0) 658 return true 659 } 660 return false 661 } 662 func rewriteValuegeneric_OpAdd16_10(v *Value) bool { 663 b := v.Block 664 _ = b 665 // match: (Add16 (Sub16 i:(Const16 <t>) z) x) 666 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 667 // result: (Add16 i (Sub16 <t> x z)) 668 for { 669 _ = v.Args[1] 670 v_0 := v.Args[0] 671 if v_0.Op != OpSub16 { 672 break 673 } 674 _ = v_0.Args[1] 675 i := v_0.Args[0] 676 if i.Op != OpConst16 { 677 break 678 } 679 t := i.Type 680 z := v_0.Args[1] 681 x := v.Args[1] 682 if !(z.Op != OpConst16 && x.Op != OpConst16) { 683 break 684 } 685 v.reset(OpAdd16) 686 v.AddArg(i) 687 v0 := b.NewValue0(v.Pos, OpSub16, t) 688 v0.AddArg(x) 689 v0.AddArg(z) 690 v.AddArg(v0) 691 return true 692 } 693 // match: (Add16 x (Sub16 i:(Const16 <t>) z)) 694 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 695 // result: (Add16 i (Sub16 <t> x z)) 696 for { 697 _ = v.Args[1] 698 x := v.Args[0] 699 v_1 := v.Args[1] 700 if v_1.Op != OpSub16 { 701 break 702 } 703 _ = v_1.Args[1] 704 i := v_1.Args[0] 705 if i.Op != OpConst16 { 706 break 707 } 708 t := i.Type 709 z := v_1.Args[1] 710 if !(z.Op != OpConst16 && x.Op != OpConst16) { 711 break 712 } 713 v.reset(OpAdd16) 714 v.AddArg(i) 715 v0 := b.NewValue0(v.Pos, OpSub16, t) 716 v0.AddArg(x) 717 v0.AddArg(z) 718 v.AddArg(v0) 719 return true 720 } 721 // match: (Add16 x (Sub16 i:(Const16 <t>) z)) 722 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 723 // result: (Add16 i (Sub16 <t> x z)) 724 for { 725 _ = v.Args[1] 726 x := v.Args[0] 727 v_1 := v.Args[1] 728 if v_1.Op != OpSub16 { 729 break 730 } 731 _ = v_1.Args[1] 732 i := v_1.Args[0] 733 if i.Op != OpConst16 { 734 break 735 } 736 t := i.Type 737 z := v_1.Args[1] 738 if !(z.Op != OpConst16 && x.Op != OpConst16) { 739 break 740 } 741 v.reset(OpAdd16) 742 v.AddArg(i) 743 v0 := b.NewValue0(v.Pos, OpSub16, t) 744 v0.AddArg(x) 745 v0.AddArg(z) 746 v.AddArg(v0) 747 return true 748 } 749 // match: (Add16 (Sub16 i:(Const16 <t>) z) x) 750 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 751 // result: (Add16 i (Sub16 <t> x z)) 752 for { 753 _ = v.Args[1] 754 v_0 := v.Args[0] 755 if v_0.Op != OpSub16 { 756 break 757 } 758 _ = v_0.Args[1] 759 i := v_0.Args[0] 760 if i.Op != OpConst16 { 761 break 762 } 763 t := i.Type 764 z := v_0.Args[1] 765 x := v.Args[1] 766 if !(z.Op != OpConst16 && x.Op != OpConst16) { 767 break 768 } 769 v.reset(OpAdd16) 770 v.AddArg(i) 771 v0 := b.NewValue0(v.Pos, OpSub16, t) 772 v0.AddArg(x) 773 v0.AddArg(z) 774 v.AddArg(v0) 775 return true 776 } 777 // match: (Add16 (Sub16 z i:(Const16 <t>)) x) 778 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 779 // result: (Sub16 (Add16 <t> x z) i) 780 for { 781 _ = v.Args[1] 782 v_0 := v.Args[0] 783 if v_0.Op != OpSub16 { 784 break 785 } 786 _ = v_0.Args[1] 787 z := v_0.Args[0] 788 i := v_0.Args[1] 789 if i.Op != OpConst16 { 790 break 791 } 792 t := i.Type 793 x := v.Args[1] 794 if !(z.Op != OpConst16 && x.Op != OpConst16) { 795 break 796 } 797 v.reset(OpSub16) 798 v0 := b.NewValue0(v.Pos, OpAdd16, t) 799 v0.AddArg(x) 800 v0.AddArg(z) 801 v.AddArg(v0) 802 v.AddArg(i) 803 return true 804 } 805 // match: (Add16 x (Sub16 z i:(Const16 <t>))) 806 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 807 // result: (Sub16 (Add16 <t> x z) i) 808 for { 809 _ = v.Args[1] 810 x := v.Args[0] 811 v_1 := v.Args[1] 812 if v_1.Op != OpSub16 { 813 break 814 } 815 _ = v_1.Args[1] 816 z := v_1.Args[0] 817 i := v_1.Args[1] 818 if i.Op != OpConst16 { 819 break 820 } 821 t := i.Type 822 if !(z.Op != OpConst16 && x.Op != OpConst16) { 823 break 824 } 825 v.reset(OpSub16) 826 v0 := b.NewValue0(v.Pos, OpAdd16, t) 827 v0.AddArg(x) 828 v0.AddArg(z) 829 v.AddArg(v0) 830 v.AddArg(i) 831 return true 832 } 833 // match: (Add16 x (Sub16 z i:(Const16 <t>))) 834 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 835 // result: (Sub16 (Add16 <t> x z) i) 836 for { 837 _ = v.Args[1] 838 x := v.Args[0] 839 v_1 := v.Args[1] 840 if v_1.Op != OpSub16 { 841 break 842 } 843 _ = v_1.Args[1] 844 z := v_1.Args[0] 845 i := v_1.Args[1] 846 if i.Op != OpConst16 { 847 break 848 } 849 t := i.Type 850 if !(z.Op != OpConst16 && x.Op != OpConst16) { 851 break 852 } 853 v.reset(OpSub16) 854 v0 := b.NewValue0(v.Pos, OpAdd16, t) 855 v0.AddArg(x) 856 v0.AddArg(z) 857 v.AddArg(v0) 858 v.AddArg(i) 859 return true 860 } 861 // match: (Add16 (Sub16 z i:(Const16 <t>)) x) 862 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 863 // result: (Sub16 (Add16 <t> x z) i) 864 for { 865 _ = v.Args[1] 866 v_0 := v.Args[0] 867 if v_0.Op != OpSub16 { 868 break 869 } 870 _ = v_0.Args[1] 871 z := v_0.Args[0] 872 i := v_0.Args[1] 873 if i.Op != OpConst16 { 874 break 875 } 876 t := i.Type 877 x := v.Args[1] 878 if !(z.Op != OpConst16 && x.Op != OpConst16) { 879 break 880 } 881 v.reset(OpSub16) 882 v0 := b.NewValue0(v.Pos, OpAdd16, t) 883 v0.AddArg(x) 884 v0.AddArg(z) 885 v.AddArg(v0) 886 v.AddArg(i) 887 return true 888 } 889 // match: (Add16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 890 // cond: 891 // result: (Add16 (Const16 <t> [int64(int16(c+d))]) x) 892 for { 893 _ = v.Args[1] 894 v_0 := v.Args[0] 895 if v_0.Op != OpConst16 { 896 break 897 } 898 t := v_0.Type 899 c := v_0.AuxInt 900 v_1 := v.Args[1] 901 if v_1.Op != OpAdd16 { 902 break 903 } 904 _ = v_1.Args[1] 905 v_1_0 := v_1.Args[0] 906 if v_1_0.Op != OpConst16 { 907 break 908 } 909 if v_1_0.Type != t { 910 break 911 } 912 d := v_1_0.AuxInt 913 x := v_1.Args[1] 914 v.reset(OpAdd16) 915 v0 := b.NewValue0(v.Pos, OpConst16, t) 916 v0.AuxInt = int64(int16(c + d)) 917 v.AddArg(v0) 918 v.AddArg(x) 919 return true 920 } 921 // match: (Add16 (Const16 <t> [c]) (Add16 x (Const16 <t> [d]))) 922 // cond: 923 // result: (Add16 (Const16 <t> [int64(int16(c+d))]) x) 924 for { 925 _ = v.Args[1] 926 v_0 := v.Args[0] 927 if v_0.Op != OpConst16 { 928 break 929 } 930 t := v_0.Type 931 c := v_0.AuxInt 932 v_1 := v.Args[1] 933 if v_1.Op != OpAdd16 { 934 break 935 } 936 _ = v_1.Args[1] 937 x := v_1.Args[0] 938 v_1_1 := v_1.Args[1] 939 if v_1_1.Op != OpConst16 { 940 break 941 } 942 if v_1_1.Type != t { 943 break 944 } 945 d := v_1_1.AuxInt 946 v.reset(OpAdd16) 947 v0 := b.NewValue0(v.Pos, OpConst16, t) 948 v0.AuxInt = int64(int16(c + d)) 949 v.AddArg(v0) 950 v.AddArg(x) 951 return true 952 } 953 return false 954 } 955 func rewriteValuegeneric_OpAdd16_20(v *Value) bool { 956 b := v.Block 957 _ = b 958 // match: (Add16 (Add16 (Const16 <t> [d]) x) (Const16 <t> [c])) 959 // cond: 960 // result: (Add16 (Const16 <t> [int64(int16(c+d))]) x) 961 for { 962 _ = v.Args[1] 963 v_0 := v.Args[0] 964 if v_0.Op != OpAdd16 { 965 break 966 } 967 _ = v_0.Args[1] 968 v_0_0 := v_0.Args[0] 969 if v_0_0.Op != OpConst16 { 970 break 971 } 972 t := v_0_0.Type 973 d := v_0_0.AuxInt 974 x := v_0.Args[1] 975 v_1 := v.Args[1] 976 if v_1.Op != OpConst16 { 977 break 978 } 979 if v_1.Type != t { 980 break 981 } 982 c := v_1.AuxInt 983 v.reset(OpAdd16) 984 v0 := b.NewValue0(v.Pos, OpConst16, t) 985 v0.AuxInt = int64(int16(c + d)) 986 v.AddArg(v0) 987 v.AddArg(x) 988 return true 989 } 990 // match: (Add16 (Add16 x (Const16 <t> [d])) (Const16 <t> [c])) 991 // cond: 992 // result: (Add16 (Const16 <t> [int64(int16(c+d))]) x) 993 for { 994 _ = v.Args[1] 995 v_0 := v.Args[0] 996 if v_0.Op != OpAdd16 { 997 break 998 } 999 _ = v_0.Args[1] 1000 x := v_0.Args[0] 1001 v_0_1 := v_0.Args[1] 1002 if v_0_1.Op != OpConst16 { 1003 break 1004 } 1005 t := v_0_1.Type 1006 d := v_0_1.AuxInt 1007 v_1 := v.Args[1] 1008 if v_1.Op != OpConst16 { 1009 break 1010 } 1011 if v_1.Type != t { 1012 break 1013 } 1014 c := v_1.AuxInt 1015 v.reset(OpAdd16) 1016 v0 := b.NewValue0(v.Pos, OpConst16, t) 1017 v0.AuxInt = int64(int16(c + d)) 1018 v.AddArg(v0) 1019 v.AddArg(x) 1020 return true 1021 } 1022 // match: (Add16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x)) 1023 // cond: 1024 // result: (Sub16 (Const16 <t> [int64(int16(c+d))]) x) 1025 for { 1026 _ = v.Args[1] 1027 v_0 := v.Args[0] 1028 if v_0.Op != OpConst16 { 1029 break 1030 } 1031 t := v_0.Type 1032 c := v_0.AuxInt 1033 v_1 := v.Args[1] 1034 if v_1.Op != OpSub16 { 1035 break 1036 } 1037 _ = v_1.Args[1] 1038 v_1_0 := v_1.Args[0] 1039 if v_1_0.Op != OpConst16 { 1040 break 1041 } 1042 if v_1_0.Type != t { 1043 break 1044 } 1045 d := v_1_0.AuxInt 1046 x := v_1.Args[1] 1047 v.reset(OpSub16) 1048 v0 := b.NewValue0(v.Pos, OpConst16, t) 1049 v0.AuxInt = int64(int16(c + d)) 1050 v.AddArg(v0) 1051 v.AddArg(x) 1052 return true 1053 } 1054 // match: (Add16 (Sub16 (Const16 <t> [d]) x) (Const16 <t> [c])) 1055 // cond: 1056 // result: (Sub16 (Const16 <t> [int64(int16(c+d))]) x) 1057 for { 1058 _ = v.Args[1] 1059 v_0 := v.Args[0] 1060 if v_0.Op != OpSub16 { 1061 break 1062 } 1063 _ = v_0.Args[1] 1064 v_0_0 := v_0.Args[0] 1065 if v_0_0.Op != OpConst16 { 1066 break 1067 } 1068 t := v_0_0.Type 1069 d := v_0_0.AuxInt 1070 x := v_0.Args[1] 1071 v_1 := v.Args[1] 1072 if v_1.Op != OpConst16 { 1073 break 1074 } 1075 if v_1.Type != t { 1076 break 1077 } 1078 c := v_1.AuxInt 1079 v.reset(OpSub16) 1080 v0 := b.NewValue0(v.Pos, OpConst16, t) 1081 v0.AuxInt = int64(int16(c + d)) 1082 v.AddArg(v0) 1083 v.AddArg(x) 1084 return true 1085 } 1086 // match: (Add16 (Const16 <t> [c]) (Sub16 x (Const16 <t> [d]))) 1087 // cond: 1088 // result: (Add16 (Const16 <t> [int64(int16(c-d))]) x) 1089 for { 1090 _ = v.Args[1] 1091 v_0 := v.Args[0] 1092 if v_0.Op != OpConst16 { 1093 break 1094 } 1095 t := v_0.Type 1096 c := v_0.AuxInt 1097 v_1 := v.Args[1] 1098 if v_1.Op != OpSub16 { 1099 break 1100 } 1101 _ = v_1.Args[1] 1102 x := v_1.Args[0] 1103 v_1_1 := v_1.Args[1] 1104 if v_1_1.Op != OpConst16 { 1105 break 1106 } 1107 if v_1_1.Type != t { 1108 break 1109 } 1110 d := v_1_1.AuxInt 1111 v.reset(OpAdd16) 1112 v0 := b.NewValue0(v.Pos, OpConst16, t) 1113 v0.AuxInt = int64(int16(c - d)) 1114 v.AddArg(v0) 1115 v.AddArg(x) 1116 return true 1117 } 1118 // match: (Add16 (Sub16 x (Const16 <t> [d])) (Const16 <t> [c])) 1119 // cond: 1120 // result: (Add16 (Const16 <t> [int64(int16(c-d))]) x) 1121 for { 1122 _ = v.Args[1] 1123 v_0 := v.Args[0] 1124 if v_0.Op != OpSub16 { 1125 break 1126 } 1127 _ = v_0.Args[1] 1128 x := v_0.Args[0] 1129 v_0_1 := v_0.Args[1] 1130 if v_0_1.Op != OpConst16 { 1131 break 1132 } 1133 t := v_0_1.Type 1134 d := v_0_1.AuxInt 1135 v_1 := v.Args[1] 1136 if v_1.Op != OpConst16 { 1137 break 1138 } 1139 if v_1.Type != t { 1140 break 1141 } 1142 c := v_1.AuxInt 1143 v.reset(OpAdd16) 1144 v0 := b.NewValue0(v.Pos, OpConst16, t) 1145 v0.AuxInt = int64(int16(c - d)) 1146 v.AddArg(v0) 1147 v.AddArg(x) 1148 return true 1149 } 1150 return false 1151 } 1152 func rewriteValuegeneric_OpAdd32_0(v *Value) bool { 1153 b := v.Block 1154 _ = b 1155 // match: (Add32 (Const32 [c]) (Const32 [d])) 1156 // cond: 1157 // result: (Const32 [int64(int32(c+d))]) 1158 for { 1159 _ = v.Args[1] 1160 v_0 := v.Args[0] 1161 if v_0.Op != OpConst32 { 1162 break 1163 } 1164 c := v_0.AuxInt 1165 v_1 := v.Args[1] 1166 if v_1.Op != OpConst32 { 1167 break 1168 } 1169 d := v_1.AuxInt 1170 v.reset(OpConst32) 1171 v.AuxInt = int64(int32(c + d)) 1172 return true 1173 } 1174 // match: (Add32 (Const32 [d]) (Const32 [c])) 1175 // cond: 1176 // result: (Const32 [int64(int32(c+d))]) 1177 for { 1178 _ = v.Args[1] 1179 v_0 := v.Args[0] 1180 if v_0.Op != OpConst32 { 1181 break 1182 } 1183 d := v_0.AuxInt 1184 v_1 := v.Args[1] 1185 if v_1.Op != OpConst32 { 1186 break 1187 } 1188 c := v_1.AuxInt 1189 v.reset(OpConst32) 1190 v.AuxInt = int64(int32(c + d)) 1191 return true 1192 } 1193 // match: (Add32 (Const32 [0]) x) 1194 // cond: 1195 // result: x 1196 for { 1197 _ = v.Args[1] 1198 v_0 := v.Args[0] 1199 if v_0.Op != OpConst32 { 1200 break 1201 } 1202 if v_0.AuxInt != 0 { 1203 break 1204 } 1205 x := v.Args[1] 1206 v.reset(OpCopy) 1207 v.Type = x.Type 1208 v.AddArg(x) 1209 return true 1210 } 1211 // match: (Add32 x (Const32 [0])) 1212 // cond: 1213 // result: x 1214 for { 1215 _ = v.Args[1] 1216 x := v.Args[0] 1217 v_1 := v.Args[1] 1218 if v_1.Op != OpConst32 { 1219 break 1220 } 1221 if v_1.AuxInt != 0 { 1222 break 1223 } 1224 v.reset(OpCopy) 1225 v.Type = x.Type 1226 v.AddArg(x) 1227 return true 1228 } 1229 // match: (Add32 (Const32 [1]) (Com32 x)) 1230 // cond: 1231 // result: (Neg32 x) 1232 for { 1233 _ = v.Args[1] 1234 v_0 := v.Args[0] 1235 if v_0.Op != OpConst32 { 1236 break 1237 } 1238 if v_0.AuxInt != 1 { 1239 break 1240 } 1241 v_1 := v.Args[1] 1242 if v_1.Op != OpCom32 { 1243 break 1244 } 1245 x := v_1.Args[0] 1246 v.reset(OpNeg32) 1247 v.AddArg(x) 1248 return true 1249 } 1250 // match: (Add32 (Com32 x) (Const32 [1])) 1251 // cond: 1252 // result: (Neg32 x) 1253 for { 1254 _ = v.Args[1] 1255 v_0 := v.Args[0] 1256 if v_0.Op != OpCom32 { 1257 break 1258 } 1259 x := v_0.Args[0] 1260 v_1 := v.Args[1] 1261 if v_1.Op != OpConst32 { 1262 break 1263 } 1264 if v_1.AuxInt != 1 { 1265 break 1266 } 1267 v.reset(OpNeg32) 1268 v.AddArg(x) 1269 return true 1270 } 1271 // match: (Add32 (Add32 i:(Const32 <t>) z) x) 1272 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1273 // result: (Add32 i (Add32 <t> z x)) 1274 for { 1275 _ = v.Args[1] 1276 v_0 := v.Args[0] 1277 if v_0.Op != OpAdd32 { 1278 break 1279 } 1280 _ = v_0.Args[1] 1281 i := v_0.Args[0] 1282 if i.Op != OpConst32 { 1283 break 1284 } 1285 t := i.Type 1286 z := v_0.Args[1] 1287 x := v.Args[1] 1288 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1289 break 1290 } 1291 v.reset(OpAdd32) 1292 v.AddArg(i) 1293 v0 := b.NewValue0(v.Pos, OpAdd32, t) 1294 v0.AddArg(z) 1295 v0.AddArg(x) 1296 v.AddArg(v0) 1297 return true 1298 } 1299 // match: (Add32 (Add32 z i:(Const32 <t>)) x) 1300 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1301 // result: (Add32 i (Add32 <t> z x)) 1302 for { 1303 _ = v.Args[1] 1304 v_0 := v.Args[0] 1305 if v_0.Op != OpAdd32 { 1306 break 1307 } 1308 _ = v_0.Args[1] 1309 z := v_0.Args[0] 1310 i := v_0.Args[1] 1311 if i.Op != OpConst32 { 1312 break 1313 } 1314 t := i.Type 1315 x := v.Args[1] 1316 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1317 break 1318 } 1319 v.reset(OpAdd32) 1320 v.AddArg(i) 1321 v0 := b.NewValue0(v.Pos, OpAdd32, t) 1322 v0.AddArg(z) 1323 v0.AddArg(x) 1324 v.AddArg(v0) 1325 return true 1326 } 1327 // match: (Add32 x (Add32 i:(Const32 <t>) z)) 1328 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1329 // result: (Add32 i (Add32 <t> z x)) 1330 for { 1331 _ = v.Args[1] 1332 x := v.Args[0] 1333 v_1 := v.Args[1] 1334 if v_1.Op != OpAdd32 { 1335 break 1336 } 1337 _ = v_1.Args[1] 1338 i := v_1.Args[0] 1339 if i.Op != OpConst32 { 1340 break 1341 } 1342 t := i.Type 1343 z := v_1.Args[1] 1344 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1345 break 1346 } 1347 v.reset(OpAdd32) 1348 v.AddArg(i) 1349 v0 := b.NewValue0(v.Pos, OpAdd32, t) 1350 v0.AddArg(z) 1351 v0.AddArg(x) 1352 v.AddArg(v0) 1353 return true 1354 } 1355 // match: (Add32 x (Add32 z i:(Const32 <t>))) 1356 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1357 // result: (Add32 i (Add32 <t> z x)) 1358 for { 1359 _ = v.Args[1] 1360 x := v.Args[0] 1361 v_1 := v.Args[1] 1362 if v_1.Op != OpAdd32 { 1363 break 1364 } 1365 _ = v_1.Args[1] 1366 z := v_1.Args[0] 1367 i := v_1.Args[1] 1368 if i.Op != OpConst32 { 1369 break 1370 } 1371 t := i.Type 1372 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1373 break 1374 } 1375 v.reset(OpAdd32) 1376 v.AddArg(i) 1377 v0 := b.NewValue0(v.Pos, OpAdd32, t) 1378 v0.AddArg(z) 1379 v0.AddArg(x) 1380 v.AddArg(v0) 1381 return true 1382 } 1383 return false 1384 } 1385 func rewriteValuegeneric_OpAdd32_10(v *Value) bool { 1386 b := v.Block 1387 _ = b 1388 // match: (Add32 (Sub32 i:(Const32 <t>) z) x) 1389 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1390 // result: (Add32 i (Sub32 <t> x z)) 1391 for { 1392 _ = v.Args[1] 1393 v_0 := v.Args[0] 1394 if v_0.Op != OpSub32 { 1395 break 1396 } 1397 _ = v_0.Args[1] 1398 i := v_0.Args[0] 1399 if i.Op != OpConst32 { 1400 break 1401 } 1402 t := i.Type 1403 z := v_0.Args[1] 1404 x := v.Args[1] 1405 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1406 break 1407 } 1408 v.reset(OpAdd32) 1409 v.AddArg(i) 1410 v0 := b.NewValue0(v.Pos, OpSub32, t) 1411 v0.AddArg(x) 1412 v0.AddArg(z) 1413 v.AddArg(v0) 1414 return true 1415 } 1416 // match: (Add32 x (Sub32 i:(Const32 <t>) z)) 1417 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1418 // result: (Add32 i (Sub32 <t> x z)) 1419 for { 1420 _ = v.Args[1] 1421 x := v.Args[0] 1422 v_1 := v.Args[1] 1423 if v_1.Op != OpSub32 { 1424 break 1425 } 1426 _ = v_1.Args[1] 1427 i := v_1.Args[0] 1428 if i.Op != OpConst32 { 1429 break 1430 } 1431 t := i.Type 1432 z := v_1.Args[1] 1433 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1434 break 1435 } 1436 v.reset(OpAdd32) 1437 v.AddArg(i) 1438 v0 := b.NewValue0(v.Pos, OpSub32, t) 1439 v0.AddArg(x) 1440 v0.AddArg(z) 1441 v.AddArg(v0) 1442 return true 1443 } 1444 // match: (Add32 x (Sub32 i:(Const32 <t>) z)) 1445 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1446 // result: (Add32 i (Sub32 <t> x z)) 1447 for { 1448 _ = v.Args[1] 1449 x := v.Args[0] 1450 v_1 := v.Args[1] 1451 if v_1.Op != OpSub32 { 1452 break 1453 } 1454 _ = v_1.Args[1] 1455 i := v_1.Args[0] 1456 if i.Op != OpConst32 { 1457 break 1458 } 1459 t := i.Type 1460 z := v_1.Args[1] 1461 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1462 break 1463 } 1464 v.reset(OpAdd32) 1465 v.AddArg(i) 1466 v0 := b.NewValue0(v.Pos, OpSub32, t) 1467 v0.AddArg(x) 1468 v0.AddArg(z) 1469 v.AddArg(v0) 1470 return true 1471 } 1472 // match: (Add32 (Sub32 i:(Const32 <t>) z) x) 1473 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1474 // result: (Add32 i (Sub32 <t> x z)) 1475 for { 1476 _ = v.Args[1] 1477 v_0 := v.Args[0] 1478 if v_0.Op != OpSub32 { 1479 break 1480 } 1481 _ = v_0.Args[1] 1482 i := v_0.Args[0] 1483 if i.Op != OpConst32 { 1484 break 1485 } 1486 t := i.Type 1487 z := v_0.Args[1] 1488 x := v.Args[1] 1489 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1490 break 1491 } 1492 v.reset(OpAdd32) 1493 v.AddArg(i) 1494 v0 := b.NewValue0(v.Pos, OpSub32, t) 1495 v0.AddArg(x) 1496 v0.AddArg(z) 1497 v.AddArg(v0) 1498 return true 1499 } 1500 // match: (Add32 (Sub32 z i:(Const32 <t>)) x) 1501 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1502 // result: (Sub32 (Add32 <t> x z) i) 1503 for { 1504 _ = v.Args[1] 1505 v_0 := v.Args[0] 1506 if v_0.Op != OpSub32 { 1507 break 1508 } 1509 _ = v_0.Args[1] 1510 z := v_0.Args[0] 1511 i := v_0.Args[1] 1512 if i.Op != OpConst32 { 1513 break 1514 } 1515 t := i.Type 1516 x := v.Args[1] 1517 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1518 break 1519 } 1520 v.reset(OpSub32) 1521 v0 := b.NewValue0(v.Pos, OpAdd32, t) 1522 v0.AddArg(x) 1523 v0.AddArg(z) 1524 v.AddArg(v0) 1525 v.AddArg(i) 1526 return true 1527 } 1528 // match: (Add32 x (Sub32 z i:(Const32 <t>))) 1529 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1530 // result: (Sub32 (Add32 <t> x z) i) 1531 for { 1532 _ = v.Args[1] 1533 x := v.Args[0] 1534 v_1 := v.Args[1] 1535 if v_1.Op != OpSub32 { 1536 break 1537 } 1538 _ = v_1.Args[1] 1539 z := v_1.Args[0] 1540 i := v_1.Args[1] 1541 if i.Op != OpConst32 { 1542 break 1543 } 1544 t := i.Type 1545 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1546 break 1547 } 1548 v.reset(OpSub32) 1549 v0 := b.NewValue0(v.Pos, OpAdd32, t) 1550 v0.AddArg(x) 1551 v0.AddArg(z) 1552 v.AddArg(v0) 1553 v.AddArg(i) 1554 return true 1555 } 1556 // match: (Add32 x (Sub32 z i:(Const32 <t>))) 1557 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1558 // result: (Sub32 (Add32 <t> x z) i) 1559 for { 1560 _ = v.Args[1] 1561 x := v.Args[0] 1562 v_1 := v.Args[1] 1563 if v_1.Op != OpSub32 { 1564 break 1565 } 1566 _ = v_1.Args[1] 1567 z := v_1.Args[0] 1568 i := v_1.Args[1] 1569 if i.Op != OpConst32 { 1570 break 1571 } 1572 t := i.Type 1573 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1574 break 1575 } 1576 v.reset(OpSub32) 1577 v0 := b.NewValue0(v.Pos, OpAdd32, t) 1578 v0.AddArg(x) 1579 v0.AddArg(z) 1580 v.AddArg(v0) 1581 v.AddArg(i) 1582 return true 1583 } 1584 // match: (Add32 (Sub32 z i:(Const32 <t>)) x) 1585 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 1586 // result: (Sub32 (Add32 <t> x z) i) 1587 for { 1588 _ = v.Args[1] 1589 v_0 := v.Args[0] 1590 if v_0.Op != OpSub32 { 1591 break 1592 } 1593 _ = v_0.Args[1] 1594 z := v_0.Args[0] 1595 i := v_0.Args[1] 1596 if i.Op != OpConst32 { 1597 break 1598 } 1599 t := i.Type 1600 x := v.Args[1] 1601 if !(z.Op != OpConst32 && x.Op != OpConst32) { 1602 break 1603 } 1604 v.reset(OpSub32) 1605 v0 := b.NewValue0(v.Pos, OpAdd32, t) 1606 v0.AddArg(x) 1607 v0.AddArg(z) 1608 v.AddArg(v0) 1609 v.AddArg(i) 1610 return true 1611 } 1612 // match: (Add32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 1613 // cond: 1614 // result: (Add32 (Const32 <t> [int64(int32(c+d))]) x) 1615 for { 1616 _ = v.Args[1] 1617 v_0 := v.Args[0] 1618 if v_0.Op != OpConst32 { 1619 break 1620 } 1621 t := v_0.Type 1622 c := v_0.AuxInt 1623 v_1 := v.Args[1] 1624 if v_1.Op != OpAdd32 { 1625 break 1626 } 1627 _ = v_1.Args[1] 1628 v_1_0 := v_1.Args[0] 1629 if v_1_0.Op != OpConst32 { 1630 break 1631 } 1632 if v_1_0.Type != t { 1633 break 1634 } 1635 d := v_1_0.AuxInt 1636 x := v_1.Args[1] 1637 v.reset(OpAdd32) 1638 v0 := b.NewValue0(v.Pos, OpConst32, t) 1639 v0.AuxInt = int64(int32(c + d)) 1640 v.AddArg(v0) 1641 v.AddArg(x) 1642 return true 1643 } 1644 // match: (Add32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d]))) 1645 // cond: 1646 // result: (Add32 (Const32 <t> [int64(int32(c+d))]) x) 1647 for { 1648 _ = v.Args[1] 1649 v_0 := v.Args[0] 1650 if v_0.Op != OpConst32 { 1651 break 1652 } 1653 t := v_0.Type 1654 c := v_0.AuxInt 1655 v_1 := v.Args[1] 1656 if v_1.Op != OpAdd32 { 1657 break 1658 } 1659 _ = v_1.Args[1] 1660 x := v_1.Args[0] 1661 v_1_1 := v_1.Args[1] 1662 if v_1_1.Op != OpConst32 { 1663 break 1664 } 1665 if v_1_1.Type != t { 1666 break 1667 } 1668 d := v_1_1.AuxInt 1669 v.reset(OpAdd32) 1670 v0 := b.NewValue0(v.Pos, OpConst32, t) 1671 v0.AuxInt = int64(int32(c + d)) 1672 v.AddArg(v0) 1673 v.AddArg(x) 1674 return true 1675 } 1676 return false 1677 } 1678 func rewriteValuegeneric_OpAdd32_20(v *Value) bool { 1679 b := v.Block 1680 _ = b 1681 // match: (Add32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c])) 1682 // cond: 1683 // result: (Add32 (Const32 <t> [int64(int32(c+d))]) x) 1684 for { 1685 _ = v.Args[1] 1686 v_0 := v.Args[0] 1687 if v_0.Op != OpAdd32 { 1688 break 1689 } 1690 _ = v_0.Args[1] 1691 v_0_0 := v_0.Args[0] 1692 if v_0_0.Op != OpConst32 { 1693 break 1694 } 1695 t := v_0_0.Type 1696 d := v_0_0.AuxInt 1697 x := v_0.Args[1] 1698 v_1 := v.Args[1] 1699 if v_1.Op != OpConst32 { 1700 break 1701 } 1702 if v_1.Type != t { 1703 break 1704 } 1705 c := v_1.AuxInt 1706 v.reset(OpAdd32) 1707 v0 := b.NewValue0(v.Pos, OpConst32, t) 1708 v0.AuxInt = int64(int32(c + d)) 1709 v.AddArg(v0) 1710 v.AddArg(x) 1711 return true 1712 } 1713 // match: (Add32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c])) 1714 // cond: 1715 // result: (Add32 (Const32 <t> [int64(int32(c+d))]) x) 1716 for { 1717 _ = v.Args[1] 1718 v_0 := v.Args[0] 1719 if v_0.Op != OpAdd32 { 1720 break 1721 } 1722 _ = v_0.Args[1] 1723 x := v_0.Args[0] 1724 v_0_1 := v_0.Args[1] 1725 if v_0_1.Op != OpConst32 { 1726 break 1727 } 1728 t := v_0_1.Type 1729 d := v_0_1.AuxInt 1730 v_1 := v.Args[1] 1731 if v_1.Op != OpConst32 { 1732 break 1733 } 1734 if v_1.Type != t { 1735 break 1736 } 1737 c := v_1.AuxInt 1738 v.reset(OpAdd32) 1739 v0 := b.NewValue0(v.Pos, OpConst32, t) 1740 v0.AuxInt = int64(int32(c + d)) 1741 v.AddArg(v0) 1742 v.AddArg(x) 1743 return true 1744 } 1745 // match: (Add32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x)) 1746 // cond: 1747 // result: (Sub32 (Const32 <t> [int64(int32(c+d))]) x) 1748 for { 1749 _ = v.Args[1] 1750 v_0 := v.Args[0] 1751 if v_0.Op != OpConst32 { 1752 break 1753 } 1754 t := v_0.Type 1755 c := v_0.AuxInt 1756 v_1 := v.Args[1] 1757 if v_1.Op != OpSub32 { 1758 break 1759 } 1760 _ = v_1.Args[1] 1761 v_1_0 := v_1.Args[0] 1762 if v_1_0.Op != OpConst32 { 1763 break 1764 } 1765 if v_1_0.Type != t { 1766 break 1767 } 1768 d := v_1_0.AuxInt 1769 x := v_1.Args[1] 1770 v.reset(OpSub32) 1771 v0 := b.NewValue0(v.Pos, OpConst32, t) 1772 v0.AuxInt = int64(int32(c + d)) 1773 v.AddArg(v0) 1774 v.AddArg(x) 1775 return true 1776 } 1777 // match: (Add32 (Sub32 (Const32 <t> [d]) x) (Const32 <t> [c])) 1778 // cond: 1779 // result: (Sub32 (Const32 <t> [int64(int32(c+d))]) x) 1780 for { 1781 _ = v.Args[1] 1782 v_0 := v.Args[0] 1783 if v_0.Op != OpSub32 { 1784 break 1785 } 1786 _ = v_0.Args[1] 1787 v_0_0 := v_0.Args[0] 1788 if v_0_0.Op != OpConst32 { 1789 break 1790 } 1791 t := v_0_0.Type 1792 d := v_0_0.AuxInt 1793 x := v_0.Args[1] 1794 v_1 := v.Args[1] 1795 if v_1.Op != OpConst32 { 1796 break 1797 } 1798 if v_1.Type != t { 1799 break 1800 } 1801 c := v_1.AuxInt 1802 v.reset(OpSub32) 1803 v0 := b.NewValue0(v.Pos, OpConst32, t) 1804 v0.AuxInt = int64(int32(c + d)) 1805 v.AddArg(v0) 1806 v.AddArg(x) 1807 return true 1808 } 1809 // match: (Add32 (Const32 <t> [c]) (Sub32 x (Const32 <t> [d]))) 1810 // cond: 1811 // result: (Add32 (Const32 <t> [int64(int32(c-d))]) x) 1812 for { 1813 _ = v.Args[1] 1814 v_0 := v.Args[0] 1815 if v_0.Op != OpConst32 { 1816 break 1817 } 1818 t := v_0.Type 1819 c := v_0.AuxInt 1820 v_1 := v.Args[1] 1821 if v_1.Op != OpSub32 { 1822 break 1823 } 1824 _ = v_1.Args[1] 1825 x := v_1.Args[0] 1826 v_1_1 := v_1.Args[1] 1827 if v_1_1.Op != OpConst32 { 1828 break 1829 } 1830 if v_1_1.Type != t { 1831 break 1832 } 1833 d := v_1_1.AuxInt 1834 v.reset(OpAdd32) 1835 v0 := b.NewValue0(v.Pos, OpConst32, t) 1836 v0.AuxInt = int64(int32(c - d)) 1837 v.AddArg(v0) 1838 v.AddArg(x) 1839 return true 1840 } 1841 // match: (Add32 (Sub32 x (Const32 <t> [d])) (Const32 <t> [c])) 1842 // cond: 1843 // result: (Add32 (Const32 <t> [int64(int32(c-d))]) x) 1844 for { 1845 _ = v.Args[1] 1846 v_0 := v.Args[0] 1847 if v_0.Op != OpSub32 { 1848 break 1849 } 1850 _ = v_0.Args[1] 1851 x := v_0.Args[0] 1852 v_0_1 := v_0.Args[1] 1853 if v_0_1.Op != OpConst32 { 1854 break 1855 } 1856 t := v_0_1.Type 1857 d := v_0_1.AuxInt 1858 v_1 := v.Args[1] 1859 if v_1.Op != OpConst32 { 1860 break 1861 } 1862 if v_1.Type != t { 1863 break 1864 } 1865 c := v_1.AuxInt 1866 v.reset(OpAdd32) 1867 v0 := b.NewValue0(v.Pos, OpConst32, t) 1868 v0.AuxInt = int64(int32(c - d)) 1869 v.AddArg(v0) 1870 v.AddArg(x) 1871 return true 1872 } 1873 return false 1874 } 1875 func rewriteValuegeneric_OpAdd32F_0(v *Value) bool { 1876 // match: (Add32F (Const32F [c]) (Const32F [d])) 1877 // cond: 1878 // result: (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) 1879 for { 1880 _ = v.Args[1] 1881 v_0 := v.Args[0] 1882 if v_0.Op != OpConst32F { 1883 break 1884 } 1885 c := v_0.AuxInt 1886 v_1 := v.Args[1] 1887 if v_1.Op != OpConst32F { 1888 break 1889 } 1890 d := v_1.AuxInt 1891 v.reset(OpConst32F) 1892 v.AuxInt = f2i(float64(i2f32(c) + i2f32(d))) 1893 return true 1894 } 1895 // match: (Add32F (Const32F [d]) (Const32F [c])) 1896 // cond: 1897 // result: (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) 1898 for { 1899 _ = v.Args[1] 1900 v_0 := v.Args[0] 1901 if v_0.Op != OpConst32F { 1902 break 1903 } 1904 d := v_0.AuxInt 1905 v_1 := v.Args[1] 1906 if v_1.Op != OpConst32F { 1907 break 1908 } 1909 c := v_1.AuxInt 1910 v.reset(OpConst32F) 1911 v.AuxInt = f2i(float64(i2f32(c) + i2f32(d))) 1912 return true 1913 } 1914 // match: (Add32F x (Const32F [0])) 1915 // cond: 1916 // result: x 1917 for { 1918 _ = v.Args[1] 1919 x := v.Args[0] 1920 v_1 := v.Args[1] 1921 if v_1.Op != OpConst32F { 1922 break 1923 } 1924 if v_1.AuxInt != 0 { 1925 break 1926 } 1927 v.reset(OpCopy) 1928 v.Type = x.Type 1929 v.AddArg(x) 1930 return true 1931 } 1932 // match: (Add32F (Const32F [0]) x) 1933 // cond: 1934 // result: x 1935 for { 1936 _ = v.Args[1] 1937 v_0 := v.Args[0] 1938 if v_0.Op != OpConst32F { 1939 break 1940 } 1941 if v_0.AuxInt != 0 { 1942 break 1943 } 1944 x := v.Args[1] 1945 v.reset(OpCopy) 1946 v.Type = x.Type 1947 v.AddArg(x) 1948 return true 1949 } 1950 return false 1951 } 1952 func rewriteValuegeneric_OpAdd64_0(v *Value) bool { 1953 b := v.Block 1954 _ = b 1955 // match: (Add64 (Const64 [c]) (Const64 [d])) 1956 // cond: 1957 // result: (Const64 [c+d]) 1958 for { 1959 _ = v.Args[1] 1960 v_0 := v.Args[0] 1961 if v_0.Op != OpConst64 { 1962 break 1963 } 1964 c := v_0.AuxInt 1965 v_1 := v.Args[1] 1966 if v_1.Op != OpConst64 { 1967 break 1968 } 1969 d := v_1.AuxInt 1970 v.reset(OpConst64) 1971 v.AuxInt = c + d 1972 return true 1973 } 1974 // match: (Add64 (Const64 [d]) (Const64 [c])) 1975 // cond: 1976 // result: (Const64 [c+d]) 1977 for { 1978 _ = v.Args[1] 1979 v_0 := v.Args[0] 1980 if v_0.Op != OpConst64 { 1981 break 1982 } 1983 d := v_0.AuxInt 1984 v_1 := v.Args[1] 1985 if v_1.Op != OpConst64 { 1986 break 1987 } 1988 c := v_1.AuxInt 1989 v.reset(OpConst64) 1990 v.AuxInt = c + d 1991 return true 1992 } 1993 // match: (Add64 (Const64 [0]) x) 1994 // cond: 1995 // result: x 1996 for { 1997 _ = v.Args[1] 1998 v_0 := v.Args[0] 1999 if v_0.Op != OpConst64 { 2000 break 2001 } 2002 if v_0.AuxInt != 0 { 2003 break 2004 } 2005 x := v.Args[1] 2006 v.reset(OpCopy) 2007 v.Type = x.Type 2008 v.AddArg(x) 2009 return true 2010 } 2011 // match: (Add64 x (Const64 [0])) 2012 // cond: 2013 // result: x 2014 for { 2015 _ = v.Args[1] 2016 x := v.Args[0] 2017 v_1 := v.Args[1] 2018 if v_1.Op != OpConst64 { 2019 break 2020 } 2021 if v_1.AuxInt != 0 { 2022 break 2023 } 2024 v.reset(OpCopy) 2025 v.Type = x.Type 2026 v.AddArg(x) 2027 return true 2028 } 2029 // match: (Add64 (Const64 [1]) (Com64 x)) 2030 // cond: 2031 // result: (Neg64 x) 2032 for { 2033 _ = v.Args[1] 2034 v_0 := v.Args[0] 2035 if v_0.Op != OpConst64 { 2036 break 2037 } 2038 if v_0.AuxInt != 1 { 2039 break 2040 } 2041 v_1 := v.Args[1] 2042 if v_1.Op != OpCom64 { 2043 break 2044 } 2045 x := v_1.Args[0] 2046 v.reset(OpNeg64) 2047 v.AddArg(x) 2048 return true 2049 } 2050 // match: (Add64 (Com64 x) (Const64 [1])) 2051 // cond: 2052 // result: (Neg64 x) 2053 for { 2054 _ = v.Args[1] 2055 v_0 := v.Args[0] 2056 if v_0.Op != OpCom64 { 2057 break 2058 } 2059 x := v_0.Args[0] 2060 v_1 := v.Args[1] 2061 if v_1.Op != OpConst64 { 2062 break 2063 } 2064 if v_1.AuxInt != 1 { 2065 break 2066 } 2067 v.reset(OpNeg64) 2068 v.AddArg(x) 2069 return true 2070 } 2071 // match: (Add64 (Add64 i:(Const64 <t>) z) x) 2072 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2073 // result: (Add64 i (Add64 <t> z x)) 2074 for { 2075 _ = v.Args[1] 2076 v_0 := v.Args[0] 2077 if v_0.Op != OpAdd64 { 2078 break 2079 } 2080 _ = v_0.Args[1] 2081 i := v_0.Args[0] 2082 if i.Op != OpConst64 { 2083 break 2084 } 2085 t := i.Type 2086 z := v_0.Args[1] 2087 x := v.Args[1] 2088 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2089 break 2090 } 2091 v.reset(OpAdd64) 2092 v.AddArg(i) 2093 v0 := b.NewValue0(v.Pos, OpAdd64, t) 2094 v0.AddArg(z) 2095 v0.AddArg(x) 2096 v.AddArg(v0) 2097 return true 2098 } 2099 // match: (Add64 (Add64 z i:(Const64 <t>)) x) 2100 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2101 // result: (Add64 i (Add64 <t> z x)) 2102 for { 2103 _ = v.Args[1] 2104 v_0 := v.Args[0] 2105 if v_0.Op != OpAdd64 { 2106 break 2107 } 2108 _ = v_0.Args[1] 2109 z := v_0.Args[0] 2110 i := v_0.Args[1] 2111 if i.Op != OpConst64 { 2112 break 2113 } 2114 t := i.Type 2115 x := v.Args[1] 2116 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2117 break 2118 } 2119 v.reset(OpAdd64) 2120 v.AddArg(i) 2121 v0 := b.NewValue0(v.Pos, OpAdd64, t) 2122 v0.AddArg(z) 2123 v0.AddArg(x) 2124 v.AddArg(v0) 2125 return true 2126 } 2127 // match: (Add64 x (Add64 i:(Const64 <t>) z)) 2128 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2129 // result: (Add64 i (Add64 <t> z x)) 2130 for { 2131 _ = v.Args[1] 2132 x := v.Args[0] 2133 v_1 := v.Args[1] 2134 if v_1.Op != OpAdd64 { 2135 break 2136 } 2137 _ = v_1.Args[1] 2138 i := v_1.Args[0] 2139 if i.Op != OpConst64 { 2140 break 2141 } 2142 t := i.Type 2143 z := v_1.Args[1] 2144 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2145 break 2146 } 2147 v.reset(OpAdd64) 2148 v.AddArg(i) 2149 v0 := b.NewValue0(v.Pos, OpAdd64, t) 2150 v0.AddArg(z) 2151 v0.AddArg(x) 2152 v.AddArg(v0) 2153 return true 2154 } 2155 // match: (Add64 x (Add64 z i:(Const64 <t>))) 2156 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2157 // result: (Add64 i (Add64 <t> z x)) 2158 for { 2159 _ = v.Args[1] 2160 x := v.Args[0] 2161 v_1 := v.Args[1] 2162 if v_1.Op != OpAdd64 { 2163 break 2164 } 2165 _ = v_1.Args[1] 2166 z := v_1.Args[0] 2167 i := v_1.Args[1] 2168 if i.Op != OpConst64 { 2169 break 2170 } 2171 t := i.Type 2172 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2173 break 2174 } 2175 v.reset(OpAdd64) 2176 v.AddArg(i) 2177 v0 := b.NewValue0(v.Pos, OpAdd64, t) 2178 v0.AddArg(z) 2179 v0.AddArg(x) 2180 v.AddArg(v0) 2181 return true 2182 } 2183 return false 2184 } 2185 func rewriteValuegeneric_OpAdd64_10(v *Value) bool { 2186 b := v.Block 2187 _ = b 2188 // match: (Add64 (Sub64 i:(Const64 <t>) z) x) 2189 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2190 // result: (Add64 i (Sub64 <t> x z)) 2191 for { 2192 _ = v.Args[1] 2193 v_0 := v.Args[0] 2194 if v_0.Op != OpSub64 { 2195 break 2196 } 2197 _ = v_0.Args[1] 2198 i := v_0.Args[0] 2199 if i.Op != OpConst64 { 2200 break 2201 } 2202 t := i.Type 2203 z := v_0.Args[1] 2204 x := v.Args[1] 2205 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2206 break 2207 } 2208 v.reset(OpAdd64) 2209 v.AddArg(i) 2210 v0 := b.NewValue0(v.Pos, OpSub64, t) 2211 v0.AddArg(x) 2212 v0.AddArg(z) 2213 v.AddArg(v0) 2214 return true 2215 } 2216 // match: (Add64 x (Sub64 i:(Const64 <t>) z)) 2217 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2218 // result: (Add64 i (Sub64 <t> x z)) 2219 for { 2220 _ = v.Args[1] 2221 x := v.Args[0] 2222 v_1 := v.Args[1] 2223 if v_1.Op != OpSub64 { 2224 break 2225 } 2226 _ = v_1.Args[1] 2227 i := v_1.Args[0] 2228 if i.Op != OpConst64 { 2229 break 2230 } 2231 t := i.Type 2232 z := v_1.Args[1] 2233 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2234 break 2235 } 2236 v.reset(OpAdd64) 2237 v.AddArg(i) 2238 v0 := b.NewValue0(v.Pos, OpSub64, t) 2239 v0.AddArg(x) 2240 v0.AddArg(z) 2241 v.AddArg(v0) 2242 return true 2243 } 2244 // match: (Add64 x (Sub64 i:(Const64 <t>) z)) 2245 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2246 // result: (Add64 i (Sub64 <t> x z)) 2247 for { 2248 _ = v.Args[1] 2249 x := v.Args[0] 2250 v_1 := v.Args[1] 2251 if v_1.Op != OpSub64 { 2252 break 2253 } 2254 _ = v_1.Args[1] 2255 i := v_1.Args[0] 2256 if i.Op != OpConst64 { 2257 break 2258 } 2259 t := i.Type 2260 z := v_1.Args[1] 2261 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2262 break 2263 } 2264 v.reset(OpAdd64) 2265 v.AddArg(i) 2266 v0 := b.NewValue0(v.Pos, OpSub64, t) 2267 v0.AddArg(x) 2268 v0.AddArg(z) 2269 v.AddArg(v0) 2270 return true 2271 } 2272 // match: (Add64 (Sub64 i:(Const64 <t>) z) x) 2273 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2274 // result: (Add64 i (Sub64 <t> x z)) 2275 for { 2276 _ = v.Args[1] 2277 v_0 := v.Args[0] 2278 if v_0.Op != OpSub64 { 2279 break 2280 } 2281 _ = v_0.Args[1] 2282 i := v_0.Args[0] 2283 if i.Op != OpConst64 { 2284 break 2285 } 2286 t := i.Type 2287 z := v_0.Args[1] 2288 x := v.Args[1] 2289 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2290 break 2291 } 2292 v.reset(OpAdd64) 2293 v.AddArg(i) 2294 v0 := b.NewValue0(v.Pos, OpSub64, t) 2295 v0.AddArg(x) 2296 v0.AddArg(z) 2297 v.AddArg(v0) 2298 return true 2299 } 2300 // match: (Add64 (Sub64 z i:(Const64 <t>)) x) 2301 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2302 // result: (Sub64 (Add64 <t> x z) i) 2303 for { 2304 _ = v.Args[1] 2305 v_0 := v.Args[0] 2306 if v_0.Op != OpSub64 { 2307 break 2308 } 2309 _ = v_0.Args[1] 2310 z := v_0.Args[0] 2311 i := v_0.Args[1] 2312 if i.Op != OpConst64 { 2313 break 2314 } 2315 t := i.Type 2316 x := v.Args[1] 2317 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2318 break 2319 } 2320 v.reset(OpSub64) 2321 v0 := b.NewValue0(v.Pos, OpAdd64, t) 2322 v0.AddArg(x) 2323 v0.AddArg(z) 2324 v.AddArg(v0) 2325 v.AddArg(i) 2326 return true 2327 } 2328 // match: (Add64 x (Sub64 z i:(Const64 <t>))) 2329 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2330 // result: (Sub64 (Add64 <t> x z) i) 2331 for { 2332 _ = v.Args[1] 2333 x := v.Args[0] 2334 v_1 := v.Args[1] 2335 if v_1.Op != OpSub64 { 2336 break 2337 } 2338 _ = v_1.Args[1] 2339 z := v_1.Args[0] 2340 i := v_1.Args[1] 2341 if i.Op != OpConst64 { 2342 break 2343 } 2344 t := i.Type 2345 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2346 break 2347 } 2348 v.reset(OpSub64) 2349 v0 := b.NewValue0(v.Pos, OpAdd64, t) 2350 v0.AddArg(x) 2351 v0.AddArg(z) 2352 v.AddArg(v0) 2353 v.AddArg(i) 2354 return true 2355 } 2356 // match: (Add64 x (Sub64 z i:(Const64 <t>))) 2357 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2358 // result: (Sub64 (Add64 <t> x z) i) 2359 for { 2360 _ = v.Args[1] 2361 x := v.Args[0] 2362 v_1 := v.Args[1] 2363 if v_1.Op != OpSub64 { 2364 break 2365 } 2366 _ = v_1.Args[1] 2367 z := v_1.Args[0] 2368 i := v_1.Args[1] 2369 if i.Op != OpConst64 { 2370 break 2371 } 2372 t := i.Type 2373 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2374 break 2375 } 2376 v.reset(OpSub64) 2377 v0 := b.NewValue0(v.Pos, OpAdd64, t) 2378 v0.AddArg(x) 2379 v0.AddArg(z) 2380 v.AddArg(v0) 2381 v.AddArg(i) 2382 return true 2383 } 2384 // match: (Add64 (Sub64 z i:(Const64 <t>)) x) 2385 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 2386 // result: (Sub64 (Add64 <t> x z) i) 2387 for { 2388 _ = v.Args[1] 2389 v_0 := v.Args[0] 2390 if v_0.Op != OpSub64 { 2391 break 2392 } 2393 _ = v_0.Args[1] 2394 z := v_0.Args[0] 2395 i := v_0.Args[1] 2396 if i.Op != OpConst64 { 2397 break 2398 } 2399 t := i.Type 2400 x := v.Args[1] 2401 if !(z.Op != OpConst64 && x.Op != OpConst64) { 2402 break 2403 } 2404 v.reset(OpSub64) 2405 v0 := b.NewValue0(v.Pos, OpAdd64, t) 2406 v0.AddArg(x) 2407 v0.AddArg(z) 2408 v.AddArg(v0) 2409 v.AddArg(i) 2410 return true 2411 } 2412 // match: (Add64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 2413 // cond: 2414 // result: (Add64 (Const64 <t> [c+d]) x) 2415 for { 2416 _ = v.Args[1] 2417 v_0 := v.Args[0] 2418 if v_0.Op != OpConst64 { 2419 break 2420 } 2421 t := v_0.Type 2422 c := v_0.AuxInt 2423 v_1 := v.Args[1] 2424 if v_1.Op != OpAdd64 { 2425 break 2426 } 2427 _ = v_1.Args[1] 2428 v_1_0 := v_1.Args[0] 2429 if v_1_0.Op != OpConst64 { 2430 break 2431 } 2432 if v_1_0.Type != t { 2433 break 2434 } 2435 d := v_1_0.AuxInt 2436 x := v_1.Args[1] 2437 v.reset(OpAdd64) 2438 v0 := b.NewValue0(v.Pos, OpConst64, t) 2439 v0.AuxInt = c + d 2440 v.AddArg(v0) 2441 v.AddArg(x) 2442 return true 2443 } 2444 // match: (Add64 (Const64 <t> [c]) (Add64 x (Const64 <t> [d]))) 2445 // cond: 2446 // result: (Add64 (Const64 <t> [c+d]) x) 2447 for { 2448 _ = v.Args[1] 2449 v_0 := v.Args[0] 2450 if v_0.Op != OpConst64 { 2451 break 2452 } 2453 t := v_0.Type 2454 c := v_0.AuxInt 2455 v_1 := v.Args[1] 2456 if v_1.Op != OpAdd64 { 2457 break 2458 } 2459 _ = v_1.Args[1] 2460 x := v_1.Args[0] 2461 v_1_1 := v_1.Args[1] 2462 if v_1_1.Op != OpConst64 { 2463 break 2464 } 2465 if v_1_1.Type != t { 2466 break 2467 } 2468 d := v_1_1.AuxInt 2469 v.reset(OpAdd64) 2470 v0 := b.NewValue0(v.Pos, OpConst64, t) 2471 v0.AuxInt = c + d 2472 v.AddArg(v0) 2473 v.AddArg(x) 2474 return true 2475 } 2476 return false 2477 } 2478 func rewriteValuegeneric_OpAdd64_20(v *Value) bool { 2479 b := v.Block 2480 _ = b 2481 // match: (Add64 (Add64 (Const64 <t> [d]) x) (Const64 <t> [c])) 2482 // cond: 2483 // result: (Add64 (Const64 <t> [c+d]) x) 2484 for { 2485 _ = v.Args[1] 2486 v_0 := v.Args[0] 2487 if v_0.Op != OpAdd64 { 2488 break 2489 } 2490 _ = v_0.Args[1] 2491 v_0_0 := v_0.Args[0] 2492 if v_0_0.Op != OpConst64 { 2493 break 2494 } 2495 t := v_0_0.Type 2496 d := v_0_0.AuxInt 2497 x := v_0.Args[1] 2498 v_1 := v.Args[1] 2499 if v_1.Op != OpConst64 { 2500 break 2501 } 2502 if v_1.Type != t { 2503 break 2504 } 2505 c := v_1.AuxInt 2506 v.reset(OpAdd64) 2507 v0 := b.NewValue0(v.Pos, OpConst64, t) 2508 v0.AuxInt = c + d 2509 v.AddArg(v0) 2510 v.AddArg(x) 2511 return true 2512 } 2513 // match: (Add64 (Add64 x (Const64 <t> [d])) (Const64 <t> [c])) 2514 // cond: 2515 // result: (Add64 (Const64 <t> [c+d]) x) 2516 for { 2517 _ = v.Args[1] 2518 v_0 := v.Args[0] 2519 if v_0.Op != OpAdd64 { 2520 break 2521 } 2522 _ = v_0.Args[1] 2523 x := v_0.Args[0] 2524 v_0_1 := v_0.Args[1] 2525 if v_0_1.Op != OpConst64 { 2526 break 2527 } 2528 t := v_0_1.Type 2529 d := v_0_1.AuxInt 2530 v_1 := v.Args[1] 2531 if v_1.Op != OpConst64 { 2532 break 2533 } 2534 if v_1.Type != t { 2535 break 2536 } 2537 c := v_1.AuxInt 2538 v.reset(OpAdd64) 2539 v0 := b.NewValue0(v.Pos, OpConst64, t) 2540 v0.AuxInt = c + d 2541 v.AddArg(v0) 2542 v.AddArg(x) 2543 return true 2544 } 2545 // match: (Add64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x)) 2546 // cond: 2547 // result: (Sub64 (Const64 <t> [c+d]) x) 2548 for { 2549 _ = v.Args[1] 2550 v_0 := v.Args[0] 2551 if v_0.Op != OpConst64 { 2552 break 2553 } 2554 t := v_0.Type 2555 c := v_0.AuxInt 2556 v_1 := v.Args[1] 2557 if v_1.Op != OpSub64 { 2558 break 2559 } 2560 _ = v_1.Args[1] 2561 v_1_0 := v_1.Args[0] 2562 if v_1_0.Op != OpConst64 { 2563 break 2564 } 2565 if v_1_0.Type != t { 2566 break 2567 } 2568 d := v_1_0.AuxInt 2569 x := v_1.Args[1] 2570 v.reset(OpSub64) 2571 v0 := b.NewValue0(v.Pos, OpConst64, t) 2572 v0.AuxInt = c + d 2573 v.AddArg(v0) 2574 v.AddArg(x) 2575 return true 2576 } 2577 // match: (Add64 (Sub64 (Const64 <t> [d]) x) (Const64 <t> [c])) 2578 // cond: 2579 // result: (Sub64 (Const64 <t> [c+d]) x) 2580 for { 2581 _ = v.Args[1] 2582 v_0 := v.Args[0] 2583 if v_0.Op != OpSub64 { 2584 break 2585 } 2586 _ = v_0.Args[1] 2587 v_0_0 := v_0.Args[0] 2588 if v_0_0.Op != OpConst64 { 2589 break 2590 } 2591 t := v_0_0.Type 2592 d := v_0_0.AuxInt 2593 x := v_0.Args[1] 2594 v_1 := v.Args[1] 2595 if v_1.Op != OpConst64 { 2596 break 2597 } 2598 if v_1.Type != t { 2599 break 2600 } 2601 c := v_1.AuxInt 2602 v.reset(OpSub64) 2603 v0 := b.NewValue0(v.Pos, OpConst64, t) 2604 v0.AuxInt = c + d 2605 v.AddArg(v0) 2606 v.AddArg(x) 2607 return true 2608 } 2609 // match: (Add64 (Const64 <t> [c]) (Sub64 x (Const64 <t> [d]))) 2610 // cond: 2611 // result: (Add64 (Const64 <t> [c-d]) x) 2612 for { 2613 _ = v.Args[1] 2614 v_0 := v.Args[0] 2615 if v_0.Op != OpConst64 { 2616 break 2617 } 2618 t := v_0.Type 2619 c := v_0.AuxInt 2620 v_1 := v.Args[1] 2621 if v_1.Op != OpSub64 { 2622 break 2623 } 2624 _ = v_1.Args[1] 2625 x := v_1.Args[0] 2626 v_1_1 := v_1.Args[1] 2627 if v_1_1.Op != OpConst64 { 2628 break 2629 } 2630 if v_1_1.Type != t { 2631 break 2632 } 2633 d := v_1_1.AuxInt 2634 v.reset(OpAdd64) 2635 v0 := b.NewValue0(v.Pos, OpConst64, t) 2636 v0.AuxInt = c - d 2637 v.AddArg(v0) 2638 v.AddArg(x) 2639 return true 2640 } 2641 // match: (Add64 (Sub64 x (Const64 <t> [d])) (Const64 <t> [c])) 2642 // cond: 2643 // result: (Add64 (Const64 <t> [c-d]) x) 2644 for { 2645 _ = v.Args[1] 2646 v_0 := v.Args[0] 2647 if v_0.Op != OpSub64 { 2648 break 2649 } 2650 _ = v_0.Args[1] 2651 x := v_0.Args[0] 2652 v_0_1 := v_0.Args[1] 2653 if v_0_1.Op != OpConst64 { 2654 break 2655 } 2656 t := v_0_1.Type 2657 d := v_0_1.AuxInt 2658 v_1 := v.Args[1] 2659 if v_1.Op != OpConst64 { 2660 break 2661 } 2662 if v_1.Type != t { 2663 break 2664 } 2665 c := v_1.AuxInt 2666 v.reset(OpAdd64) 2667 v0 := b.NewValue0(v.Pos, OpConst64, t) 2668 v0.AuxInt = c - d 2669 v.AddArg(v0) 2670 v.AddArg(x) 2671 return true 2672 } 2673 return false 2674 } 2675 func rewriteValuegeneric_OpAdd64F_0(v *Value) bool { 2676 // match: (Add64F (Const64F [c]) (Const64F [d])) 2677 // cond: 2678 // result: (Const64F [f2i(i2f(c) + i2f(d))]) 2679 for { 2680 _ = v.Args[1] 2681 v_0 := v.Args[0] 2682 if v_0.Op != OpConst64F { 2683 break 2684 } 2685 c := v_0.AuxInt 2686 v_1 := v.Args[1] 2687 if v_1.Op != OpConst64F { 2688 break 2689 } 2690 d := v_1.AuxInt 2691 v.reset(OpConst64F) 2692 v.AuxInt = f2i(i2f(c) + i2f(d)) 2693 return true 2694 } 2695 // match: (Add64F (Const64F [d]) (Const64F [c])) 2696 // cond: 2697 // result: (Const64F [f2i(i2f(c) + i2f(d))]) 2698 for { 2699 _ = v.Args[1] 2700 v_0 := v.Args[0] 2701 if v_0.Op != OpConst64F { 2702 break 2703 } 2704 d := v_0.AuxInt 2705 v_1 := v.Args[1] 2706 if v_1.Op != OpConst64F { 2707 break 2708 } 2709 c := v_1.AuxInt 2710 v.reset(OpConst64F) 2711 v.AuxInt = f2i(i2f(c) + i2f(d)) 2712 return true 2713 } 2714 // match: (Add64F x (Const64F [0])) 2715 // cond: 2716 // result: x 2717 for { 2718 _ = v.Args[1] 2719 x := v.Args[0] 2720 v_1 := v.Args[1] 2721 if v_1.Op != OpConst64F { 2722 break 2723 } 2724 if v_1.AuxInt != 0 { 2725 break 2726 } 2727 v.reset(OpCopy) 2728 v.Type = x.Type 2729 v.AddArg(x) 2730 return true 2731 } 2732 // match: (Add64F (Const64F [0]) x) 2733 // cond: 2734 // result: x 2735 for { 2736 _ = v.Args[1] 2737 v_0 := v.Args[0] 2738 if v_0.Op != OpConst64F { 2739 break 2740 } 2741 if v_0.AuxInt != 0 { 2742 break 2743 } 2744 x := v.Args[1] 2745 v.reset(OpCopy) 2746 v.Type = x.Type 2747 v.AddArg(x) 2748 return true 2749 } 2750 return false 2751 } 2752 func rewriteValuegeneric_OpAdd8_0(v *Value) bool { 2753 b := v.Block 2754 _ = b 2755 // match: (Add8 (Const8 [c]) (Const8 [d])) 2756 // cond: 2757 // result: (Const8 [int64(int8(c+d))]) 2758 for { 2759 _ = v.Args[1] 2760 v_0 := v.Args[0] 2761 if v_0.Op != OpConst8 { 2762 break 2763 } 2764 c := v_0.AuxInt 2765 v_1 := v.Args[1] 2766 if v_1.Op != OpConst8 { 2767 break 2768 } 2769 d := v_1.AuxInt 2770 v.reset(OpConst8) 2771 v.AuxInt = int64(int8(c + d)) 2772 return true 2773 } 2774 // match: (Add8 (Const8 [d]) (Const8 [c])) 2775 // cond: 2776 // result: (Const8 [int64(int8(c+d))]) 2777 for { 2778 _ = v.Args[1] 2779 v_0 := v.Args[0] 2780 if v_0.Op != OpConst8 { 2781 break 2782 } 2783 d := v_0.AuxInt 2784 v_1 := v.Args[1] 2785 if v_1.Op != OpConst8 { 2786 break 2787 } 2788 c := v_1.AuxInt 2789 v.reset(OpConst8) 2790 v.AuxInt = int64(int8(c + d)) 2791 return true 2792 } 2793 // match: (Add8 (Const8 [0]) x) 2794 // cond: 2795 // result: x 2796 for { 2797 _ = v.Args[1] 2798 v_0 := v.Args[0] 2799 if v_0.Op != OpConst8 { 2800 break 2801 } 2802 if v_0.AuxInt != 0 { 2803 break 2804 } 2805 x := v.Args[1] 2806 v.reset(OpCopy) 2807 v.Type = x.Type 2808 v.AddArg(x) 2809 return true 2810 } 2811 // match: (Add8 x (Const8 [0])) 2812 // cond: 2813 // result: x 2814 for { 2815 _ = v.Args[1] 2816 x := v.Args[0] 2817 v_1 := v.Args[1] 2818 if v_1.Op != OpConst8 { 2819 break 2820 } 2821 if v_1.AuxInt != 0 { 2822 break 2823 } 2824 v.reset(OpCopy) 2825 v.Type = x.Type 2826 v.AddArg(x) 2827 return true 2828 } 2829 // match: (Add8 (Const8 [1]) (Com8 x)) 2830 // cond: 2831 // result: (Neg8 x) 2832 for { 2833 _ = v.Args[1] 2834 v_0 := v.Args[0] 2835 if v_0.Op != OpConst8 { 2836 break 2837 } 2838 if v_0.AuxInt != 1 { 2839 break 2840 } 2841 v_1 := v.Args[1] 2842 if v_1.Op != OpCom8 { 2843 break 2844 } 2845 x := v_1.Args[0] 2846 v.reset(OpNeg8) 2847 v.AddArg(x) 2848 return true 2849 } 2850 // match: (Add8 (Com8 x) (Const8 [1])) 2851 // cond: 2852 // result: (Neg8 x) 2853 for { 2854 _ = v.Args[1] 2855 v_0 := v.Args[0] 2856 if v_0.Op != OpCom8 { 2857 break 2858 } 2859 x := v_0.Args[0] 2860 v_1 := v.Args[1] 2861 if v_1.Op != OpConst8 { 2862 break 2863 } 2864 if v_1.AuxInt != 1 { 2865 break 2866 } 2867 v.reset(OpNeg8) 2868 v.AddArg(x) 2869 return true 2870 } 2871 // match: (Add8 (Add8 i:(Const8 <t>) z) x) 2872 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 2873 // result: (Add8 i (Add8 <t> z x)) 2874 for { 2875 _ = v.Args[1] 2876 v_0 := v.Args[0] 2877 if v_0.Op != OpAdd8 { 2878 break 2879 } 2880 _ = v_0.Args[1] 2881 i := v_0.Args[0] 2882 if i.Op != OpConst8 { 2883 break 2884 } 2885 t := i.Type 2886 z := v_0.Args[1] 2887 x := v.Args[1] 2888 if !(z.Op != OpConst8 && x.Op != OpConst8) { 2889 break 2890 } 2891 v.reset(OpAdd8) 2892 v.AddArg(i) 2893 v0 := b.NewValue0(v.Pos, OpAdd8, t) 2894 v0.AddArg(z) 2895 v0.AddArg(x) 2896 v.AddArg(v0) 2897 return true 2898 } 2899 // match: (Add8 (Add8 z i:(Const8 <t>)) x) 2900 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 2901 // result: (Add8 i (Add8 <t> z x)) 2902 for { 2903 _ = v.Args[1] 2904 v_0 := v.Args[0] 2905 if v_0.Op != OpAdd8 { 2906 break 2907 } 2908 _ = v_0.Args[1] 2909 z := v_0.Args[0] 2910 i := v_0.Args[1] 2911 if i.Op != OpConst8 { 2912 break 2913 } 2914 t := i.Type 2915 x := v.Args[1] 2916 if !(z.Op != OpConst8 && x.Op != OpConst8) { 2917 break 2918 } 2919 v.reset(OpAdd8) 2920 v.AddArg(i) 2921 v0 := b.NewValue0(v.Pos, OpAdd8, t) 2922 v0.AddArg(z) 2923 v0.AddArg(x) 2924 v.AddArg(v0) 2925 return true 2926 } 2927 // match: (Add8 x (Add8 i:(Const8 <t>) z)) 2928 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 2929 // result: (Add8 i (Add8 <t> z x)) 2930 for { 2931 _ = v.Args[1] 2932 x := v.Args[0] 2933 v_1 := v.Args[1] 2934 if v_1.Op != OpAdd8 { 2935 break 2936 } 2937 _ = v_1.Args[1] 2938 i := v_1.Args[0] 2939 if i.Op != OpConst8 { 2940 break 2941 } 2942 t := i.Type 2943 z := v_1.Args[1] 2944 if !(z.Op != OpConst8 && x.Op != OpConst8) { 2945 break 2946 } 2947 v.reset(OpAdd8) 2948 v.AddArg(i) 2949 v0 := b.NewValue0(v.Pos, OpAdd8, t) 2950 v0.AddArg(z) 2951 v0.AddArg(x) 2952 v.AddArg(v0) 2953 return true 2954 } 2955 // match: (Add8 x (Add8 z i:(Const8 <t>))) 2956 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 2957 // result: (Add8 i (Add8 <t> z x)) 2958 for { 2959 _ = v.Args[1] 2960 x := v.Args[0] 2961 v_1 := v.Args[1] 2962 if v_1.Op != OpAdd8 { 2963 break 2964 } 2965 _ = v_1.Args[1] 2966 z := v_1.Args[0] 2967 i := v_1.Args[1] 2968 if i.Op != OpConst8 { 2969 break 2970 } 2971 t := i.Type 2972 if !(z.Op != OpConst8 && x.Op != OpConst8) { 2973 break 2974 } 2975 v.reset(OpAdd8) 2976 v.AddArg(i) 2977 v0 := b.NewValue0(v.Pos, OpAdd8, t) 2978 v0.AddArg(z) 2979 v0.AddArg(x) 2980 v.AddArg(v0) 2981 return true 2982 } 2983 return false 2984 } 2985 func rewriteValuegeneric_OpAdd8_10(v *Value) bool { 2986 b := v.Block 2987 _ = b 2988 // match: (Add8 (Sub8 i:(Const8 <t>) z) x) 2989 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 2990 // result: (Add8 i (Sub8 <t> x z)) 2991 for { 2992 _ = v.Args[1] 2993 v_0 := v.Args[0] 2994 if v_0.Op != OpSub8 { 2995 break 2996 } 2997 _ = v_0.Args[1] 2998 i := v_0.Args[0] 2999 if i.Op != OpConst8 { 3000 break 3001 } 3002 t := i.Type 3003 z := v_0.Args[1] 3004 x := v.Args[1] 3005 if !(z.Op != OpConst8 && x.Op != OpConst8) { 3006 break 3007 } 3008 v.reset(OpAdd8) 3009 v.AddArg(i) 3010 v0 := b.NewValue0(v.Pos, OpSub8, t) 3011 v0.AddArg(x) 3012 v0.AddArg(z) 3013 v.AddArg(v0) 3014 return true 3015 } 3016 // match: (Add8 x (Sub8 i:(Const8 <t>) z)) 3017 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 3018 // result: (Add8 i (Sub8 <t> x z)) 3019 for { 3020 _ = v.Args[1] 3021 x := v.Args[0] 3022 v_1 := v.Args[1] 3023 if v_1.Op != OpSub8 { 3024 break 3025 } 3026 _ = v_1.Args[1] 3027 i := v_1.Args[0] 3028 if i.Op != OpConst8 { 3029 break 3030 } 3031 t := i.Type 3032 z := v_1.Args[1] 3033 if !(z.Op != OpConst8 && x.Op != OpConst8) { 3034 break 3035 } 3036 v.reset(OpAdd8) 3037 v.AddArg(i) 3038 v0 := b.NewValue0(v.Pos, OpSub8, t) 3039 v0.AddArg(x) 3040 v0.AddArg(z) 3041 v.AddArg(v0) 3042 return true 3043 } 3044 // match: (Add8 x (Sub8 i:(Const8 <t>) z)) 3045 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 3046 // result: (Add8 i (Sub8 <t> x z)) 3047 for { 3048 _ = v.Args[1] 3049 x := v.Args[0] 3050 v_1 := v.Args[1] 3051 if v_1.Op != OpSub8 { 3052 break 3053 } 3054 _ = v_1.Args[1] 3055 i := v_1.Args[0] 3056 if i.Op != OpConst8 { 3057 break 3058 } 3059 t := i.Type 3060 z := v_1.Args[1] 3061 if !(z.Op != OpConst8 && x.Op != OpConst8) { 3062 break 3063 } 3064 v.reset(OpAdd8) 3065 v.AddArg(i) 3066 v0 := b.NewValue0(v.Pos, OpSub8, t) 3067 v0.AddArg(x) 3068 v0.AddArg(z) 3069 v.AddArg(v0) 3070 return true 3071 } 3072 // match: (Add8 (Sub8 i:(Const8 <t>) z) x) 3073 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 3074 // result: (Add8 i (Sub8 <t> x z)) 3075 for { 3076 _ = v.Args[1] 3077 v_0 := v.Args[0] 3078 if v_0.Op != OpSub8 { 3079 break 3080 } 3081 _ = v_0.Args[1] 3082 i := v_0.Args[0] 3083 if i.Op != OpConst8 { 3084 break 3085 } 3086 t := i.Type 3087 z := v_0.Args[1] 3088 x := v.Args[1] 3089 if !(z.Op != OpConst8 && x.Op != OpConst8) { 3090 break 3091 } 3092 v.reset(OpAdd8) 3093 v.AddArg(i) 3094 v0 := b.NewValue0(v.Pos, OpSub8, t) 3095 v0.AddArg(x) 3096 v0.AddArg(z) 3097 v.AddArg(v0) 3098 return true 3099 } 3100 // match: (Add8 (Sub8 z i:(Const8 <t>)) x) 3101 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 3102 // result: (Sub8 (Add8 <t> x z) i) 3103 for { 3104 _ = v.Args[1] 3105 v_0 := v.Args[0] 3106 if v_0.Op != OpSub8 { 3107 break 3108 } 3109 _ = v_0.Args[1] 3110 z := v_0.Args[0] 3111 i := v_0.Args[1] 3112 if i.Op != OpConst8 { 3113 break 3114 } 3115 t := i.Type 3116 x := v.Args[1] 3117 if !(z.Op != OpConst8 && x.Op != OpConst8) { 3118 break 3119 } 3120 v.reset(OpSub8) 3121 v0 := b.NewValue0(v.Pos, OpAdd8, t) 3122 v0.AddArg(x) 3123 v0.AddArg(z) 3124 v.AddArg(v0) 3125 v.AddArg(i) 3126 return true 3127 } 3128 // match: (Add8 x (Sub8 z i:(Const8 <t>))) 3129 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 3130 // result: (Sub8 (Add8 <t> x z) i) 3131 for { 3132 _ = v.Args[1] 3133 x := v.Args[0] 3134 v_1 := v.Args[1] 3135 if v_1.Op != OpSub8 { 3136 break 3137 } 3138 _ = v_1.Args[1] 3139 z := v_1.Args[0] 3140 i := v_1.Args[1] 3141 if i.Op != OpConst8 { 3142 break 3143 } 3144 t := i.Type 3145 if !(z.Op != OpConst8 && x.Op != OpConst8) { 3146 break 3147 } 3148 v.reset(OpSub8) 3149 v0 := b.NewValue0(v.Pos, OpAdd8, t) 3150 v0.AddArg(x) 3151 v0.AddArg(z) 3152 v.AddArg(v0) 3153 v.AddArg(i) 3154 return true 3155 } 3156 // match: (Add8 x (Sub8 z i:(Const8 <t>))) 3157 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 3158 // result: (Sub8 (Add8 <t> x z) i) 3159 for { 3160 _ = v.Args[1] 3161 x := v.Args[0] 3162 v_1 := v.Args[1] 3163 if v_1.Op != OpSub8 { 3164 break 3165 } 3166 _ = v_1.Args[1] 3167 z := v_1.Args[0] 3168 i := v_1.Args[1] 3169 if i.Op != OpConst8 { 3170 break 3171 } 3172 t := i.Type 3173 if !(z.Op != OpConst8 && x.Op != OpConst8) { 3174 break 3175 } 3176 v.reset(OpSub8) 3177 v0 := b.NewValue0(v.Pos, OpAdd8, t) 3178 v0.AddArg(x) 3179 v0.AddArg(z) 3180 v.AddArg(v0) 3181 v.AddArg(i) 3182 return true 3183 } 3184 // match: (Add8 (Sub8 z i:(Const8 <t>)) x) 3185 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 3186 // result: (Sub8 (Add8 <t> x z) i) 3187 for { 3188 _ = v.Args[1] 3189 v_0 := v.Args[0] 3190 if v_0.Op != OpSub8 { 3191 break 3192 } 3193 _ = v_0.Args[1] 3194 z := v_0.Args[0] 3195 i := v_0.Args[1] 3196 if i.Op != OpConst8 { 3197 break 3198 } 3199 t := i.Type 3200 x := v.Args[1] 3201 if !(z.Op != OpConst8 && x.Op != OpConst8) { 3202 break 3203 } 3204 v.reset(OpSub8) 3205 v0 := b.NewValue0(v.Pos, OpAdd8, t) 3206 v0.AddArg(x) 3207 v0.AddArg(z) 3208 v.AddArg(v0) 3209 v.AddArg(i) 3210 return true 3211 } 3212 // match: (Add8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 3213 // cond: 3214 // result: (Add8 (Const8 <t> [int64(int8(c+d))]) x) 3215 for { 3216 _ = v.Args[1] 3217 v_0 := v.Args[0] 3218 if v_0.Op != OpConst8 { 3219 break 3220 } 3221 t := v_0.Type 3222 c := v_0.AuxInt 3223 v_1 := v.Args[1] 3224 if v_1.Op != OpAdd8 { 3225 break 3226 } 3227 _ = v_1.Args[1] 3228 v_1_0 := v_1.Args[0] 3229 if v_1_0.Op != OpConst8 { 3230 break 3231 } 3232 if v_1_0.Type != t { 3233 break 3234 } 3235 d := v_1_0.AuxInt 3236 x := v_1.Args[1] 3237 v.reset(OpAdd8) 3238 v0 := b.NewValue0(v.Pos, OpConst8, t) 3239 v0.AuxInt = int64(int8(c + d)) 3240 v.AddArg(v0) 3241 v.AddArg(x) 3242 return true 3243 } 3244 // match: (Add8 (Const8 <t> [c]) (Add8 x (Const8 <t> [d]))) 3245 // cond: 3246 // result: (Add8 (Const8 <t> [int64(int8(c+d))]) x) 3247 for { 3248 _ = v.Args[1] 3249 v_0 := v.Args[0] 3250 if v_0.Op != OpConst8 { 3251 break 3252 } 3253 t := v_0.Type 3254 c := v_0.AuxInt 3255 v_1 := v.Args[1] 3256 if v_1.Op != OpAdd8 { 3257 break 3258 } 3259 _ = v_1.Args[1] 3260 x := v_1.Args[0] 3261 v_1_1 := v_1.Args[1] 3262 if v_1_1.Op != OpConst8 { 3263 break 3264 } 3265 if v_1_1.Type != t { 3266 break 3267 } 3268 d := v_1_1.AuxInt 3269 v.reset(OpAdd8) 3270 v0 := b.NewValue0(v.Pos, OpConst8, t) 3271 v0.AuxInt = int64(int8(c + d)) 3272 v.AddArg(v0) 3273 v.AddArg(x) 3274 return true 3275 } 3276 return false 3277 } 3278 func rewriteValuegeneric_OpAdd8_20(v *Value) bool { 3279 b := v.Block 3280 _ = b 3281 // match: (Add8 (Add8 (Const8 <t> [d]) x) (Const8 <t> [c])) 3282 // cond: 3283 // result: (Add8 (Const8 <t> [int64(int8(c+d))]) x) 3284 for { 3285 _ = v.Args[1] 3286 v_0 := v.Args[0] 3287 if v_0.Op != OpAdd8 { 3288 break 3289 } 3290 _ = v_0.Args[1] 3291 v_0_0 := v_0.Args[0] 3292 if v_0_0.Op != OpConst8 { 3293 break 3294 } 3295 t := v_0_0.Type 3296 d := v_0_0.AuxInt 3297 x := v_0.Args[1] 3298 v_1 := v.Args[1] 3299 if v_1.Op != OpConst8 { 3300 break 3301 } 3302 if v_1.Type != t { 3303 break 3304 } 3305 c := v_1.AuxInt 3306 v.reset(OpAdd8) 3307 v0 := b.NewValue0(v.Pos, OpConst8, t) 3308 v0.AuxInt = int64(int8(c + d)) 3309 v.AddArg(v0) 3310 v.AddArg(x) 3311 return true 3312 } 3313 // match: (Add8 (Add8 x (Const8 <t> [d])) (Const8 <t> [c])) 3314 // cond: 3315 // result: (Add8 (Const8 <t> [int64(int8(c+d))]) x) 3316 for { 3317 _ = v.Args[1] 3318 v_0 := v.Args[0] 3319 if v_0.Op != OpAdd8 { 3320 break 3321 } 3322 _ = v_0.Args[1] 3323 x := v_0.Args[0] 3324 v_0_1 := v_0.Args[1] 3325 if v_0_1.Op != OpConst8 { 3326 break 3327 } 3328 t := v_0_1.Type 3329 d := v_0_1.AuxInt 3330 v_1 := v.Args[1] 3331 if v_1.Op != OpConst8 { 3332 break 3333 } 3334 if v_1.Type != t { 3335 break 3336 } 3337 c := v_1.AuxInt 3338 v.reset(OpAdd8) 3339 v0 := b.NewValue0(v.Pos, OpConst8, t) 3340 v0.AuxInt = int64(int8(c + d)) 3341 v.AddArg(v0) 3342 v.AddArg(x) 3343 return true 3344 } 3345 // match: (Add8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x)) 3346 // cond: 3347 // result: (Sub8 (Const8 <t> [int64(int8(c+d))]) x) 3348 for { 3349 _ = v.Args[1] 3350 v_0 := v.Args[0] 3351 if v_0.Op != OpConst8 { 3352 break 3353 } 3354 t := v_0.Type 3355 c := v_0.AuxInt 3356 v_1 := v.Args[1] 3357 if v_1.Op != OpSub8 { 3358 break 3359 } 3360 _ = v_1.Args[1] 3361 v_1_0 := v_1.Args[0] 3362 if v_1_0.Op != OpConst8 { 3363 break 3364 } 3365 if v_1_0.Type != t { 3366 break 3367 } 3368 d := v_1_0.AuxInt 3369 x := v_1.Args[1] 3370 v.reset(OpSub8) 3371 v0 := b.NewValue0(v.Pos, OpConst8, t) 3372 v0.AuxInt = int64(int8(c + d)) 3373 v.AddArg(v0) 3374 v.AddArg(x) 3375 return true 3376 } 3377 // match: (Add8 (Sub8 (Const8 <t> [d]) x) (Const8 <t> [c])) 3378 // cond: 3379 // result: (Sub8 (Const8 <t> [int64(int8(c+d))]) x) 3380 for { 3381 _ = v.Args[1] 3382 v_0 := v.Args[0] 3383 if v_0.Op != OpSub8 { 3384 break 3385 } 3386 _ = v_0.Args[1] 3387 v_0_0 := v_0.Args[0] 3388 if v_0_0.Op != OpConst8 { 3389 break 3390 } 3391 t := v_0_0.Type 3392 d := v_0_0.AuxInt 3393 x := v_0.Args[1] 3394 v_1 := v.Args[1] 3395 if v_1.Op != OpConst8 { 3396 break 3397 } 3398 if v_1.Type != t { 3399 break 3400 } 3401 c := v_1.AuxInt 3402 v.reset(OpSub8) 3403 v0 := b.NewValue0(v.Pos, OpConst8, t) 3404 v0.AuxInt = int64(int8(c + d)) 3405 v.AddArg(v0) 3406 v.AddArg(x) 3407 return true 3408 } 3409 // match: (Add8 (Const8 <t> [c]) (Sub8 x (Const8 <t> [d]))) 3410 // cond: 3411 // result: (Add8 (Const8 <t> [int64(int8(c-d))]) x) 3412 for { 3413 _ = v.Args[1] 3414 v_0 := v.Args[0] 3415 if v_0.Op != OpConst8 { 3416 break 3417 } 3418 t := v_0.Type 3419 c := v_0.AuxInt 3420 v_1 := v.Args[1] 3421 if v_1.Op != OpSub8 { 3422 break 3423 } 3424 _ = v_1.Args[1] 3425 x := v_1.Args[0] 3426 v_1_1 := v_1.Args[1] 3427 if v_1_1.Op != OpConst8 { 3428 break 3429 } 3430 if v_1_1.Type != t { 3431 break 3432 } 3433 d := v_1_1.AuxInt 3434 v.reset(OpAdd8) 3435 v0 := b.NewValue0(v.Pos, OpConst8, t) 3436 v0.AuxInt = int64(int8(c - d)) 3437 v.AddArg(v0) 3438 v.AddArg(x) 3439 return true 3440 } 3441 // match: (Add8 (Sub8 x (Const8 <t> [d])) (Const8 <t> [c])) 3442 // cond: 3443 // result: (Add8 (Const8 <t> [int64(int8(c-d))]) x) 3444 for { 3445 _ = v.Args[1] 3446 v_0 := v.Args[0] 3447 if v_0.Op != OpSub8 { 3448 break 3449 } 3450 _ = v_0.Args[1] 3451 x := v_0.Args[0] 3452 v_0_1 := v_0.Args[1] 3453 if v_0_1.Op != OpConst8 { 3454 break 3455 } 3456 t := v_0_1.Type 3457 d := v_0_1.AuxInt 3458 v_1 := v.Args[1] 3459 if v_1.Op != OpConst8 { 3460 break 3461 } 3462 if v_1.Type != t { 3463 break 3464 } 3465 c := v_1.AuxInt 3466 v.reset(OpAdd8) 3467 v0 := b.NewValue0(v.Pos, OpConst8, t) 3468 v0.AuxInt = int64(int8(c - d)) 3469 v.AddArg(v0) 3470 v.AddArg(x) 3471 return true 3472 } 3473 return false 3474 } 3475 func rewriteValuegeneric_OpAddPtr_0(v *Value) bool { 3476 // match: (AddPtr <t> x (Const64 [c])) 3477 // cond: 3478 // result: (OffPtr <t> x [c]) 3479 for { 3480 t := v.Type 3481 _ = v.Args[1] 3482 x := v.Args[0] 3483 v_1 := v.Args[1] 3484 if v_1.Op != OpConst64 { 3485 break 3486 } 3487 c := v_1.AuxInt 3488 v.reset(OpOffPtr) 3489 v.Type = t 3490 v.AuxInt = c 3491 v.AddArg(x) 3492 return true 3493 } 3494 // match: (AddPtr <t> x (Const32 [c])) 3495 // cond: 3496 // result: (OffPtr <t> x [c]) 3497 for { 3498 t := v.Type 3499 _ = v.Args[1] 3500 x := v.Args[0] 3501 v_1 := v.Args[1] 3502 if v_1.Op != OpConst32 { 3503 break 3504 } 3505 c := v_1.AuxInt 3506 v.reset(OpOffPtr) 3507 v.Type = t 3508 v.AuxInt = c 3509 v.AddArg(x) 3510 return true 3511 } 3512 return false 3513 } 3514 func rewriteValuegeneric_OpAnd16_0(v *Value) bool { 3515 // match: (And16 (Const16 [c]) (Const16 [d])) 3516 // cond: 3517 // result: (Const16 [int64(int16(c&d))]) 3518 for { 3519 _ = v.Args[1] 3520 v_0 := v.Args[0] 3521 if v_0.Op != OpConst16 { 3522 break 3523 } 3524 c := v_0.AuxInt 3525 v_1 := v.Args[1] 3526 if v_1.Op != OpConst16 { 3527 break 3528 } 3529 d := v_1.AuxInt 3530 v.reset(OpConst16) 3531 v.AuxInt = int64(int16(c & d)) 3532 return true 3533 } 3534 // match: (And16 (Const16 [d]) (Const16 [c])) 3535 // cond: 3536 // result: (Const16 [int64(int16(c&d))]) 3537 for { 3538 _ = v.Args[1] 3539 v_0 := v.Args[0] 3540 if v_0.Op != OpConst16 { 3541 break 3542 } 3543 d := v_0.AuxInt 3544 v_1 := v.Args[1] 3545 if v_1.Op != OpConst16 { 3546 break 3547 } 3548 c := v_1.AuxInt 3549 v.reset(OpConst16) 3550 v.AuxInt = int64(int16(c & d)) 3551 return true 3552 } 3553 // match: (And16 x x) 3554 // cond: 3555 // result: x 3556 for { 3557 _ = v.Args[1] 3558 x := v.Args[0] 3559 if x != v.Args[1] { 3560 break 3561 } 3562 v.reset(OpCopy) 3563 v.Type = x.Type 3564 v.AddArg(x) 3565 return true 3566 } 3567 // match: (And16 (Const16 [-1]) x) 3568 // cond: 3569 // result: x 3570 for { 3571 _ = v.Args[1] 3572 v_0 := v.Args[0] 3573 if v_0.Op != OpConst16 { 3574 break 3575 } 3576 if v_0.AuxInt != -1 { 3577 break 3578 } 3579 x := v.Args[1] 3580 v.reset(OpCopy) 3581 v.Type = x.Type 3582 v.AddArg(x) 3583 return true 3584 } 3585 // match: (And16 x (Const16 [-1])) 3586 // cond: 3587 // result: x 3588 for { 3589 _ = v.Args[1] 3590 x := v.Args[0] 3591 v_1 := v.Args[1] 3592 if v_1.Op != OpConst16 { 3593 break 3594 } 3595 if v_1.AuxInt != -1 { 3596 break 3597 } 3598 v.reset(OpCopy) 3599 v.Type = x.Type 3600 v.AddArg(x) 3601 return true 3602 } 3603 // match: (And16 (Const16 [0]) _) 3604 // cond: 3605 // result: (Const16 [0]) 3606 for { 3607 _ = v.Args[1] 3608 v_0 := v.Args[0] 3609 if v_0.Op != OpConst16 { 3610 break 3611 } 3612 if v_0.AuxInt != 0 { 3613 break 3614 } 3615 v.reset(OpConst16) 3616 v.AuxInt = 0 3617 return true 3618 } 3619 // match: (And16 _ (Const16 [0])) 3620 // cond: 3621 // result: (Const16 [0]) 3622 for { 3623 _ = v.Args[1] 3624 v_1 := v.Args[1] 3625 if v_1.Op != OpConst16 { 3626 break 3627 } 3628 if v_1.AuxInt != 0 { 3629 break 3630 } 3631 v.reset(OpConst16) 3632 v.AuxInt = 0 3633 return true 3634 } 3635 // match: (And16 x (And16 x y)) 3636 // cond: 3637 // result: (And16 x y) 3638 for { 3639 _ = v.Args[1] 3640 x := v.Args[0] 3641 v_1 := v.Args[1] 3642 if v_1.Op != OpAnd16 { 3643 break 3644 } 3645 _ = v_1.Args[1] 3646 if x != v_1.Args[0] { 3647 break 3648 } 3649 y := v_1.Args[1] 3650 v.reset(OpAnd16) 3651 v.AddArg(x) 3652 v.AddArg(y) 3653 return true 3654 } 3655 // match: (And16 x (And16 y x)) 3656 // cond: 3657 // result: (And16 x y) 3658 for { 3659 _ = v.Args[1] 3660 x := v.Args[0] 3661 v_1 := v.Args[1] 3662 if v_1.Op != OpAnd16 { 3663 break 3664 } 3665 _ = v_1.Args[1] 3666 y := v_1.Args[0] 3667 if x != v_1.Args[1] { 3668 break 3669 } 3670 v.reset(OpAnd16) 3671 v.AddArg(x) 3672 v.AddArg(y) 3673 return true 3674 } 3675 // match: (And16 (And16 x y) x) 3676 // cond: 3677 // result: (And16 x y) 3678 for { 3679 _ = v.Args[1] 3680 v_0 := v.Args[0] 3681 if v_0.Op != OpAnd16 { 3682 break 3683 } 3684 _ = v_0.Args[1] 3685 x := v_0.Args[0] 3686 y := v_0.Args[1] 3687 if x != v.Args[1] { 3688 break 3689 } 3690 v.reset(OpAnd16) 3691 v.AddArg(x) 3692 v.AddArg(y) 3693 return true 3694 } 3695 return false 3696 } 3697 func rewriteValuegeneric_OpAnd16_10(v *Value) bool { 3698 b := v.Block 3699 _ = b 3700 // match: (And16 (And16 y x) x) 3701 // cond: 3702 // result: (And16 x y) 3703 for { 3704 _ = v.Args[1] 3705 v_0 := v.Args[0] 3706 if v_0.Op != OpAnd16 { 3707 break 3708 } 3709 _ = v_0.Args[1] 3710 y := v_0.Args[0] 3711 x := v_0.Args[1] 3712 if x != v.Args[1] { 3713 break 3714 } 3715 v.reset(OpAnd16) 3716 v.AddArg(x) 3717 v.AddArg(y) 3718 return true 3719 } 3720 // match: (And16 (And16 i:(Const16 <t>) z) x) 3721 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 3722 // result: (And16 i (And16 <t> z x)) 3723 for { 3724 _ = v.Args[1] 3725 v_0 := v.Args[0] 3726 if v_0.Op != OpAnd16 { 3727 break 3728 } 3729 _ = v_0.Args[1] 3730 i := v_0.Args[0] 3731 if i.Op != OpConst16 { 3732 break 3733 } 3734 t := i.Type 3735 z := v_0.Args[1] 3736 x := v.Args[1] 3737 if !(z.Op != OpConst16 && x.Op != OpConst16) { 3738 break 3739 } 3740 v.reset(OpAnd16) 3741 v.AddArg(i) 3742 v0 := b.NewValue0(v.Pos, OpAnd16, t) 3743 v0.AddArg(z) 3744 v0.AddArg(x) 3745 v.AddArg(v0) 3746 return true 3747 } 3748 // match: (And16 (And16 z i:(Const16 <t>)) x) 3749 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 3750 // result: (And16 i (And16 <t> z x)) 3751 for { 3752 _ = v.Args[1] 3753 v_0 := v.Args[0] 3754 if v_0.Op != OpAnd16 { 3755 break 3756 } 3757 _ = v_0.Args[1] 3758 z := v_0.Args[0] 3759 i := v_0.Args[1] 3760 if i.Op != OpConst16 { 3761 break 3762 } 3763 t := i.Type 3764 x := v.Args[1] 3765 if !(z.Op != OpConst16 && x.Op != OpConst16) { 3766 break 3767 } 3768 v.reset(OpAnd16) 3769 v.AddArg(i) 3770 v0 := b.NewValue0(v.Pos, OpAnd16, t) 3771 v0.AddArg(z) 3772 v0.AddArg(x) 3773 v.AddArg(v0) 3774 return true 3775 } 3776 // match: (And16 x (And16 i:(Const16 <t>) z)) 3777 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 3778 // result: (And16 i (And16 <t> z x)) 3779 for { 3780 _ = v.Args[1] 3781 x := v.Args[0] 3782 v_1 := v.Args[1] 3783 if v_1.Op != OpAnd16 { 3784 break 3785 } 3786 _ = v_1.Args[1] 3787 i := v_1.Args[0] 3788 if i.Op != OpConst16 { 3789 break 3790 } 3791 t := i.Type 3792 z := v_1.Args[1] 3793 if !(z.Op != OpConst16 && x.Op != OpConst16) { 3794 break 3795 } 3796 v.reset(OpAnd16) 3797 v.AddArg(i) 3798 v0 := b.NewValue0(v.Pos, OpAnd16, t) 3799 v0.AddArg(z) 3800 v0.AddArg(x) 3801 v.AddArg(v0) 3802 return true 3803 } 3804 // match: (And16 x (And16 z i:(Const16 <t>))) 3805 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 3806 // result: (And16 i (And16 <t> z x)) 3807 for { 3808 _ = v.Args[1] 3809 x := v.Args[0] 3810 v_1 := v.Args[1] 3811 if v_1.Op != OpAnd16 { 3812 break 3813 } 3814 _ = v_1.Args[1] 3815 z := v_1.Args[0] 3816 i := v_1.Args[1] 3817 if i.Op != OpConst16 { 3818 break 3819 } 3820 t := i.Type 3821 if !(z.Op != OpConst16 && x.Op != OpConst16) { 3822 break 3823 } 3824 v.reset(OpAnd16) 3825 v.AddArg(i) 3826 v0 := b.NewValue0(v.Pos, OpAnd16, t) 3827 v0.AddArg(z) 3828 v0.AddArg(x) 3829 v.AddArg(v0) 3830 return true 3831 } 3832 // match: (And16 (Const16 <t> [c]) (And16 (Const16 <t> [d]) x)) 3833 // cond: 3834 // result: (And16 (Const16 <t> [int64(int16(c&d))]) x) 3835 for { 3836 _ = v.Args[1] 3837 v_0 := v.Args[0] 3838 if v_0.Op != OpConst16 { 3839 break 3840 } 3841 t := v_0.Type 3842 c := v_0.AuxInt 3843 v_1 := v.Args[1] 3844 if v_1.Op != OpAnd16 { 3845 break 3846 } 3847 _ = v_1.Args[1] 3848 v_1_0 := v_1.Args[0] 3849 if v_1_0.Op != OpConst16 { 3850 break 3851 } 3852 if v_1_0.Type != t { 3853 break 3854 } 3855 d := v_1_0.AuxInt 3856 x := v_1.Args[1] 3857 v.reset(OpAnd16) 3858 v0 := b.NewValue0(v.Pos, OpConst16, t) 3859 v0.AuxInt = int64(int16(c & d)) 3860 v.AddArg(v0) 3861 v.AddArg(x) 3862 return true 3863 } 3864 // match: (And16 (Const16 <t> [c]) (And16 x (Const16 <t> [d]))) 3865 // cond: 3866 // result: (And16 (Const16 <t> [int64(int16(c&d))]) x) 3867 for { 3868 _ = v.Args[1] 3869 v_0 := v.Args[0] 3870 if v_0.Op != OpConst16 { 3871 break 3872 } 3873 t := v_0.Type 3874 c := v_0.AuxInt 3875 v_1 := v.Args[1] 3876 if v_1.Op != OpAnd16 { 3877 break 3878 } 3879 _ = v_1.Args[1] 3880 x := v_1.Args[0] 3881 v_1_1 := v_1.Args[1] 3882 if v_1_1.Op != OpConst16 { 3883 break 3884 } 3885 if v_1_1.Type != t { 3886 break 3887 } 3888 d := v_1_1.AuxInt 3889 v.reset(OpAnd16) 3890 v0 := b.NewValue0(v.Pos, OpConst16, t) 3891 v0.AuxInt = int64(int16(c & d)) 3892 v.AddArg(v0) 3893 v.AddArg(x) 3894 return true 3895 } 3896 // match: (And16 (And16 (Const16 <t> [d]) x) (Const16 <t> [c])) 3897 // cond: 3898 // result: (And16 (Const16 <t> [int64(int16(c&d))]) x) 3899 for { 3900 _ = v.Args[1] 3901 v_0 := v.Args[0] 3902 if v_0.Op != OpAnd16 { 3903 break 3904 } 3905 _ = v_0.Args[1] 3906 v_0_0 := v_0.Args[0] 3907 if v_0_0.Op != OpConst16 { 3908 break 3909 } 3910 t := v_0_0.Type 3911 d := v_0_0.AuxInt 3912 x := v_0.Args[1] 3913 v_1 := v.Args[1] 3914 if v_1.Op != OpConst16 { 3915 break 3916 } 3917 if v_1.Type != t { 3918 break 3919 } 3920 c := v_1.AuxInt 3921 v.reset(OpAnd16) 3922 v0 := b.NewValue0(v.Pos, OpConst16, t) 3923 v0.AuxInt = int64(int16(c & d)) 3924 v.AddArg(v0) 3925 v.AddArg(x) 3926 return true 3927 } 3928 // match: (And16 (And16 x (Const16 <t> [d])) (Const16 <t> [c])) 3929 // cond: 3930 // result: (And16 (Const16 <t> [int64(int16(c&d))]) x) 3931 for { 3932 _ = v.Args[1] 3933 v_0 := v.Args[0] 3934 if v_0.Op != OpAnd16 { 3935 break 3936 } 3937 _ = v_0.Args[1] 3938 x := v_0.Args[0] 3939 v_0_1 := v_0.Args[1] 3940 if v_0_1.Op != OpConst16 { 3941 break 3942 } 3943 t := v_0_1.Type 3944 d := v_0_1.AuxInt 3945 v_1 := v.Args[1] 3946 if v_1.Op != OpConst16 { 3947 break 3948 } 3949 if v_1.Type != t { 3950 break 3951 } 3952 c := v_1.AuxInt 3953 v.reset(OpAnd16) 3954 v0 := b.NewValue0(v.Pos, OpConst16, t) 3955 v0.AuxInt = int64(int16(c & d)) 3956 v.AddArg(v0) 3957 v.AddArg(x) 3958 return true 3959 } 3960 return false 3961 } 3962 func rewriteValuegeneric_OpAnd32_0(v *Value) bool { 3963 // match: (And32 (Const32 [c]) (Const32 [d])) 3964 // cond: 3965 // result: (Const32 [int64(int32(c&d))]) 3966 for { 3967 _ = v.Args[1] 3968 v_0 := v.Args[0] 3969 if v_0.Op != OpConst32 { 3970 break 3971 } 3972 c := v_0.AuxInt 3973 v_1 := v.Args[1] 3974 if v_1.Op != OpConst32 { 3975 break 3976 } 3977 d := v_1.AuxInt 3978 v.reset(OpConst32) 3979 v.AuxInt = int64(int32(c & d)) 3980 return true 3981 } 3982 // match: (And32 (Const32 [d]) (Const32 [c])) 3983 // cond: 3984 // result: (Const32 [int64(int32(c&d))]) 3985 for { 3986 _ = v.Args[1] 3987 v_0 := v.Args[0] 3988 if v_0.Op != OpConst32 { 3989 break 3990 } 3991 d := v_0.AuxInt 3992 v_1 := v.Args[1] 3993 if v_1.Op != OpConst32 { 3994 break 3995 } 3996 c := v_1.AuxInt 3997 v.reset(OpConst32) 3998 v.AuxInt = int64(int32(c & d)) 3999 return true 4000 } 4001 // match: (And32 x x) 4002 // cond: 4003 // result: x 4004 for { 4005 _ = v.Args[1] 4006 x := v.Args[0] 4007 if x != v.Args[1] { 4008 break 4009 } 4010 v.reset(OpCopy) 4011 v.Type = x.Type 4012 v.AddArg(x) 4013 return true 4014 } 4015 // match: (And32 (Const32 [-1]) x) 4016 // cond: 4017 // result: x 4018 for { 4019 _ = v.Args[1] 4020 v_0 := v.Args[0] 4021 if v_0.Op != OpConst32 { 4022 break 4023 } 4024 if v_0.AuxInt != -1 { 4025 break 4026 } 4027 x := v.Args[1] 4028 v.reset(OpCopy) 4029 v.Type = x.Type 4030 v.AddArg(x) 4031 return true 4032 } 4033 // match: (And32 x (Const32 [-1])) 4034 // cond: 4035 // result: x 4036 for { 4037 _ = v.Args[1] 4038 x := v.Args[0] 4039 v_1 := v.Args[1] 4040 if v_1.Op != OpConst32 { 4041 break 4042 } 4043 if v_1.AuxInt != -1 { 4044 break 4045 } 4046 v.reset(OpCopy) 4047 v.Type = x.Type 4048 v.AddArg(x) 4049 return true 4050 } 4051 // match: (And32 (Const32 [0]) _) 4052 // cond: 4053 // result: (Const32 [0]) 4054 for { 4055 _ = v.Args[1] 4056 v_0 := v.Args[0] 4057 if v_0.Op != OpConst32 { 4058 break 4059 } 4060 if v_0.AuxInt != 0 { 4061 break 4062 } 4063 v.reset(OpConst32) 4064 v.AuxInt = 0 4065 return true 4066 } 4067 // match: (And32 _ (Const32 [0])) 4068 // cond: 4069 // result: (Const32 [0]) 4070 for { 4071 _ = v.Args[1] 4072 v_1 := v.Args[1] 4073 if v_1.Op != OpConst32 { 4074 break 4075 } 4076 if v_1.AuxInt != 0 { 4077 break 4078 } 4079 v.reset(OpConst32) 4080 v.AuxInt = 0 4081 return true 4082 } 4083 // match: (And32 x (And32 x y)) 4084 // cond: 4085 // result: (And32 x y) 4086 for { 4087 _ = v.Args[1] 4088 x := v.Args[0] 4089 v_1 := v.Args[1] 4090 if v_1.Op != OpAnd32 { 4091 break 4092 } 4093 _ = v_1.Args[1] 4094 if x != v_1.Args[0] { 4095 break 4096 } 4097 y := v_1.Args[1] 4098 v.reset(OpAnd32) 4099 v.AddArg(x) 4100 v.AddArg(y) 4101 return true 4102 } 4103 // match: (And32 x (And32 y x)) 4104 // cond: 4105 // result: (And32 x y) 4106 for { 4107 _ = v.Args[1] 4108 x := v.Args[0] 4109 v_1 := v.Args[1] 4110 if v_1.Op != OpAnd32 { 4111 break 4112 } 4113 _ = v_1.Args[1] 4114 y := v_1.Args[0] 4115 if x != v_1.Args[1] { 4116 break 4117 } 4118 v.reset(OpAnd32) 4119 v.AddArg(x) 4120 v.AddArg(y) 4121 return true 4122 } 4123 // match: (And32 (And32 x y) x) 4124 // cond: 4125 // result: (And32 x y) 4126 for { 4127 _ = v.Args[1] 4128 v_0 := v.Args[0] 4129 if v_0.Op != OpAnd32 { 4130 break 4131 } 4132 _ = v_0.Args[1] 4133 x := v_0.Args[0] 4134 y := v_0.Args[1] 4135 if x != v.Args[1] { 4136 break 4137 } 4138 v.reset(OpAnd32) 4139 v.AddArg(x) 4140 v.AddArg(y) 4141 return true 4142 } 4143 return false 4144 } 4145 func rewriteValuegeneric_OpAnd32_10(v *Value) bool { 4146 b := v.Block 4147 _ = b 4148 // match: (And32 (And32 y x) x) 4149 // cond: 4150 // result: (And32 x y) 4151 for { 4152 _ = v.Args[1] 4153 v_0 := v.Args[0] 4154 if v_0.Op != OpAnd32 { 4155 break 4156 } 4157 _ = v_0.Args[1] 4158 y := v_0.Args[0] 4159 x := v_0.Args[1] 4160 if x != v.Args[1] { 4161 break 4162 } 4163 v.reset(OpAnd32) 4164 v.AddArg(x) 4165 v.AddArg(y) 4166 return true 4167 } 4168 // match: (And32 (And32 i:(Const32 <t>) z) x) 4169 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 4170 // result: (And32 i (And32 <t> z x)) 4171 for { 4172 _ = v.Args[1] 4173 v_0 := v.Args[0] 4174 if v_0.Op != OpAnd32 { 4175 break 4176 } 4177 _ = v_0.Args[1] 4178 i := v_0.Args[0] 4179 if i.Op != OpConst32 { 4180 break 4181 } 4182 t := i.Type 4183 z := v_0.Args[1] 4184 x := v.Args[1] 4185 if !(z.Op != OpConst32 && x.Op != OpConst32) { 4186 break 4187 } 4188 v.reset(OpAnd32) 4189 v.AddArg(i) 4190 v0 := b.NewValue0(v.Pos, OpAnd32, t) 4191 v0.AddArg(z) 4192 v0.AddArg(x) 4193 v.AddArg(v0) 4194 return true 4195 } 4196 // match: (And32 (And32 z i:(Const32 <t>)) x) 4197 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 4198 // result: (And32 i (And32 <t> z x)) 4199 for { 4200 _ = v.Args[1] 4201 v_0 := v.Args[0] 4202 if v_0.Op != OpAnd32 { 4203 break 4204 } 4205 _ = v_0.Args[1] 4206 z := v_0.Args[0] 4207 i := v_0.Args[1] 4208 if i.Op != OpConst32 { 4209 break 4210 } 4211 t := i.Type 4212 x := v.Args[1] 4213 if !(z.Op != OpConst32 && x.Op != OpConst32) { 4214 break 4215 } 4216 v.reset(OpAnd32) 4217 v.AddArg(i) 4218 v0 := b.NewValue0(v.Pos, OpAnd32, t) 4219 v0.AddArg(z) 4220 v0.AddArg(x) 4221 v.AddArg(v0) 4222 return true 4223 } 4224 // match: (And32 x (And32 i:(Const32 <t>) z)) 4225 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 4226 // result: (And32 i (And32 <t> z x)) 4227 for { 4228 _ = v.Args[1] 4229 x := v.Args[0] 4230 v_1 := v.Args[1] 4231 if v_1.Op != OpAnd32 { 4232 break 4233 } 4234 _ = v_1.Args[1] 4235 i := v_1.Args[0] 4236 if i.Op != OpConst32 { 4237 break 4238 } 4239 t := i.Type 4240 z := v_1.Args[1] 4241 if !(z.Op != OpConst32 && x.Op != OpConst32) { 4242 break 4243 } 4244 v.reset(OpAnd32) 4245 v.AddArg(i) 4246 v0 := b.NewValue0(v.Pos, OpAnd32, t) 4247 v0.AddArg(z) 4248 v0.AddArg(x) 4249 v.AddArg(v0) 4250 return true 4251 } 4252 // match: (And32 x (And32 z i:(Const32 <t>))) 4253 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 4254 // result: (And32 i (And32 <t> z x)) 4255 for { 4256 _ = v.Args[1] 4257 x := v.Args[0] 4258 v_1 := v.Args[1] 4259 if v_1.Op != OpAnd32 { 4260 break 4261 } 4262 _ = v_1.Args[1] 4263 z := v_1.Args[0] 4264 i := v_1.Args[1] 4265 if i.Op != OpConst32 { 4266 break 4267 } 4268 t := i.Type 4269 if !(z.Op != OpConst32 && x.Op != OpConst32) { 4270 break 4271 } 4272 v.reset(OpAnd32) 4273 v.AddArg(i) 4274 v0 := b.NewValue0(v.Pos, OpAnd32, t) 4275 v0.AddArg(z) 4276 v0.AddArg(x) 4277 v.AddArg(v0) 4278 return true 4279 } 4280 // match: (And32 (Const32 <t> [c]) (And32 (Const32 <t> [d]) x)) 4281 // cond: 4282 // result: (And32 (Const32 <t> [int64(int32(c&d))]) x) 4283 for { 4284 _ = v.Args[1] 4285 v_0 := v.Args[0] 4286 if v_0.Op != OpConst32 { 4287 break 4288 } 4289 t := v_0.Type 4290 c := v_0.AuxInt 4291 v_1 := v.Args[1] 4292 if v_1.Op != OpAnd32 { 4293 break 4294 } 4295 _ = v_1.Args[1] 4296 v_1_0 := v_1.Args[0] 4297 if v_1_0.Op != OpConst32 { 4298 break 4299 } 4300 if v_1_0.Type != t { 4301 break 4302 } 4303 d := v_1_0.AuxInt 4304 x := v_1.Args[1] 4305 v.reset(OpAnd32) 4306 v0 := b.NewValue0(v.Pos, OpConst32, t) 4307 v0.AuxInt = int64(int32(c & d)) 4308 v.AddArg(v0) 4309 v.AddArg(x) 4310 return true 4311 } 4312 // match: (And32 (Const32 <t> [c]) (And32 x (Const32 <t> [d]))) 4313 // cond: 4314 // result: (And32 (Const32 <t> [int64(int32(c&d))]) x) 4315 for { 4316 _ = v.Args[1] 4317 v_0 := v.Args[0] 4318 if v_0.Op != OpConst32 { 4319 break 4320 } 4321 t := v_0.Type 4322 c := v_0.AuxInt 4323 v_1 := v.Args[1] 4324 if v_1.Op != OpAnd32 { 4325 break 4326 } 4327 _ = v_1.Args[1] 4328 x := v_1.Args[0] 4329 v_1_1 := v_1.Args[1] 4330 if v_1_1.Op != OpConst32 { 4331 break 4332 } 4333 if v_1_1.Type != t { 4334 break 4335 } 4336 d := v_1_1.AuxInt 4337 v.reset(OpAnd32) 4338 v0 := b.NewValue0(v.Pos, OpConst32, t) 4339 v0.AuxInt = int64(int32(c & d)) 4340 v.AddArg(v0) 4341 v.AddArg(x) 4342 return true 4343 } 4344 // match: (And32 (And32 (Const32 <t> [d]) x) (Const32 <t> [c])) 4345 // cond: 4346 // result: (And32 (Const32 <t> [int64(int32(c&d))]) x) 4347 for { 4348 _ = v.Args[1] 4349 v_0 := v.Args[0] 4350 if v_0.Op != OpAnd32 { 4351 break 4352 } 4353 _ = v_0.Args[1] 4354 v_0_0 := v_0.Args[0] 4355 if v_0_0.Op != OpConst32 { 4356 break 4357 } 4358 t := v_0_0.Type 4359 d := v_0_0.AuxInt 4360 x := v_0.Args[1] 4361 v_1 := v.Args[1] 4362 if v_1.Op != OpConst32 { 4363 break 4364 } 4365 if v_1.Type != t { 4366 break 4367 } 4368 c := v_1.AuxInt 4369 v.reset(OpAnd32) 4370 v0 := b.NewValue0(v.Pos, OpConst32, t) 4371 v0.AuxInt = int64(int32(c & d)) 4372 v.AddArg(v0) 4373 v.AddArg(x) 4374 return true 4375 } 4376 // match: (And32 (And32 x (Const32 <t> [d])) (Const32 <t> [c])) 4377 // cond: 4378 // result: (And32 (Const32 <t> [int64(int32(c&d))]) x) 4379 for { 4380 _ = v.Args[1] 4381 v_0 := v.Args[0] 4382 if v_0.Op != OpAnd32 { 4383 break 4384 } 4385 _ = v_0.Args[1] 4386 x := v_0.Args[0] 4387 v_0_1 := v_0.Args[1] 4388 if v_0_1.Op != OpConst32 { 4389 break 4390 } 4391 t := v_0_1.Type 4392 d := v_0_1.AuxInt 4393 v_1 := v.Args[1] 4394 if v_1.Op != OpConst32 { 4395 break 4396 } 4397 if v_1.Type != t { 4398 break 4399 } 4400 c := v_1.AuxInt 4401 v.reset(OpAnd32) 4402 v0 := b.NewValue0(v.Pos, OpConst32, t) 4403 v0.AuxInt = int64(int32(c & d)) 4404 v.AddArg(v0) 4405 v.AddArg(x) 4406 return true 4407 } 4408 return false 4409 } 4410 func rewriteValuegeneric_OpAnd64_0(v *Value) bool { 4411 // match: (And64 (Const64 [c]) (Const64 [d])) 4412 // cond: 4413 // result: (Const64 [c&d]) 4414 for { 4415 _ = v.Args[1] 4416 v_0 := v.Args[0] 4417 if v_0.Op != OpConst64 { 4418 break 4419 } 4420 c := v_0.AuxInt 4421 v_1 := v.Args[1] 4422 if v_1.Op != OpConst64 { 4423 break 4424 } 4425 d := v_1.AuxInt 4426 v.reset(OpConst64) 4427 v.AuxInt = c & d 4428 return true 4429 } 4430 // match: (And64 (Const64 [d]) (Const64 [c])) 4431 // cond: 4432 // result: (Const64 [c&d]) 4433 for { 4434 _ = v.Args[1] 4435 v_0 := v.Args[0] 4436 if v_0.Op != OpConst64 { 4437 break 4438 } 4439 d := v_0.AuxInt 4440 v_1 := v.Args[1] 4441 if v_1.Op != OpConst64 { 4442 break 4443 } 4444 c := v_1.AuxInt 4445 v.reset(OpConst64) 4446 v.AuxInt = c & d 4447 return true 4448 } 4449 // match: (And64 x x) 4450 // cond: 4451 // result: x 4452 for { 4453 _ = v.Args[1] 4454 x := v.Args[0] 4455 if x != v.Args[1] { 4456 break 4457 } 4458 v.reset(OpCopy) 4459 v.Type = x.Type 4460 v.AddArg(x) 4461 return true 4462 } 4463 // match: (And64 (Const64 [-1]) x) 4464 // cond: 4465 // result: x 4466 for { 4467 _ = v.Args[1] 4468 v_0 := v.Args[0] 4469 if v_0.Op != OpConst64 { 4470 break 4471 } 4472 if v_0.AuxInt != -1 { 4473 break 4474 } 4475 x := v.Args[1] 4476 v.reset(OpCopy) 4477 v.Type = x.Type 4478 v.AddArg(x) 4479 return true 4480 } 4481 // match: (And64 x (Const64 [-1])) 4482 // cond: 4483 // result: x 4484 for { 4485 _ = v.Args[1] 4486 x := v.Args[0] 4487 v_1 := v.Args[1] 4488 if v_1.Op != OpConst64 { 4489 break 4490 } 4491 if v_1.AuxInt != -1 { 4492 break 4493 } 4494 v.reset(OpCopy) 4495 v.Type = x.Type 4496 v.AddArg(x) 4497 return true 4498 } 4499 // match: (And64 (Const64 [0]) _) 4500 // cond: 4501 // result: (Const64 [0]) 4502 for { 4503 _ = v.Args[1] 4504 v_0 := v.Args[0] 4505 if v_0.Op != OpConst64 { 4506 break 4507 } 4508 if v_0.AuxInt != 0 { 4509 break 4510 } 4511 v.reset(OpConst64) 4512 v.AuxInt = 0 4513 return true 4514 } 4515 // match: (And64 _ (Const64 [0])) 4516 // cond: 4517 // result: (Const64 [0]) 4518 for { 4519 _ = v.Args[1] 4520 v_1 := v.Args[1] 4521 if v_1.Op != OpConst64 { 4522 break 4523 } 4524 if v_1.AuxInt != 0 { 4525 break 4526 } 4527 v.reset(OpConst64) 4528 v.AuxInt = 0 4529 return true 4530 } 4531 // match: (And64 x (And64 x y)) 4532 // cond: 4533 // result: (And64 x y) 4534 for { 4535 _ = v.Args[1] 4536 x := v.Args[0] 4537 v_1 := v.Args[1] 4538 if v_1.Op != OpAnd64 { 4539 break 4540 } 4541 _ = v_1.Args[1] 4542 if x != v_1.Args[0] { 4543 break 4544 } 4545 y := v_1.Args[1] 4546 v.reset(OpAnd64) 4547 v.AddArg(x) 4548 v.AddArg(y) 4549 return true 4550 } 4551 // match: (And64 x (And64 y x)) 4552 // cond: 4553 // result: (And64 x y) 4554 for { 4555 _ = v.Args[1] 4556 x := v.Args[0] 4557 v_1 := v.Args[1] 4558 if v_1.Op != OpAnd64 { 4559 break 4560 } 4561 _ = v_1.Args[1] 4562 y := v_1.Args[0] 4563 if x != v_1.Args[1] { 4564 break 4565 } 4566 v.reset(OpAnd64) 4567 v.AddArg(x) 4568 v.AddArg(y) 4569 return true 4570 } 4571 // match: (And64 (And64 x y) x) 4572 // cond: 4573 // result: (And64 x y) 4574 for { 4575 _ = v.Args[1] 4576 v_0 := v.Args[0] 4577 if v_0.Op != OpAnd64 { 4578 break 4579 } 4580 _ = v_0.Args[1] 4581 x := v_0.Args[0] 4582 y := v_0.Args[1] 4583 if x != v.Args[1] { 4584 break 4585 } 4586 v.reset(OpAnd64) 4587 v.AddArg(x) 4588 v.AddArg(y) 4589 return true 4590 } 4591 return false 4592 } 4593 func rewriteValuegeneric_OpAnd64_10(v *Value) bool { 4594 b := v.Block 4595 _ = b 4596 // match: (And64 (And64 y x) x) 4597 // cond: 4598 // result: (And64 x y) 4599 for { 4600 _ = v.Args[1] 4601 v_0 := v.Args[0] 4602 if v_0.Op != OpAnd64 { 4603 break 4604 } 4605 _ = v_0.Args[1] 4606 y := v_0.Args[0] 4607 x := v_0.Args[1] 4608 if x != v.Args[1] { 4609 break 4610 } 4611 v.reset(OpAnd64) 4612 v.AddArg(x) 4613 v.AddArg(y) 4614 return true 4615 } 4616 // match: (And64 <t> (Const64 [y]) x) 4617 // cond: nlz(y) + nto(y) == 64 && nto(y) >= 32 4618 // result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)])) 4619 for { 4620 t := v.Type 4621 _ = v.Args[1] 4622 v_0 := v.Args[0] 4623 if v_0.Op != OpConst64 { 4624 break 4625 } 4626 y := v_0.AuxInt 4627 x := v.Args[1] 4628 if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) { 4629 break 4630 } 4631 v.reset(OpRsh64Ux64) 4632 v0 := b.NewValue0(v.Pos, OpLsh64x64, t) 4633 v0.AddArg(x) 4634 v1 := b.NewValue0(v.Pos, OpConst64, t) 4635 v1.AuxInt = nlz(y) 4636 v0.AddArg(v1) 4637 v.AddArg(v0) 4638 v2 := b.NewValue0(v.Pos, OpConst64, t) 4639 v2.AuxInt = nlz(y) 4640 v.AddArg(v2) 4641 return true 4642 } 4643 // match: (And64 <t> x (Const64 [y])) 4644 // cond: nlz(y) + nto(y) == 64 && nto(y) >= 32 4645 // result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)])) 4646 for { 4647 t := v.Type 4648 _ = v.Args[1] 4649 x := v.Args[0] 4650 v_1 := v.Args[1] 4651 if v_1.Op != OpConst64 { 4652 break 4653 } 4654 y := v_1.AuxInt 4655 if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) { 4656 break 4657 } 4658 v.reset(OpRsh64Ux64) 4659 v0 := b.NewValue0(v.Pos, OpLsh64x64, t) 4660 v0.AddArg(x) 4661 v1 := b.NewValue0(v.Pos, OpConst64, t) 4662 v1.AuxInt = nlz(y) 4663 v0.AddArg(v1) 4664 v.AddArg(v0) 4665 v2 := b.NewValue0(v.Pos, OpConst64, t) 4666 v2.AuxInt = nlz(y) 4667 v.AddArg(v2) 4668 return true 4669 } 4670 // match: (And64 <t> (Const64 [y]) x) 4671 // cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32 4672 // result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)])) 4673 for { 4674 t := v.Type 4675 _ = v.Args[1] 4676 v_0 := v.Args[0] 4677 if v_0.Op != OpConst64 { 4678 break 4679 } 4680 y := v_0.AuxInt 4681 x := v.Args[1] 4682 if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) { 4683 break 4684 } 4685 v.reset(OpLsh64x64) 4686 v0 := b.NewValue0(v.Pos, OpRsh64Ux64, t) 4687 v0.AddArg(x) 4688 v1 := b.NewValue0(v.Pos, OpConst64, t) 4689 v1.AuxInt = ntz(y) 4690 v0.AddArg(v1) 4691 v.AddArg(v0) 4692 v2 := b.NewValue0(v.Pos, OpConst64, t) 4693 v2.AuxInt = ntz(y) 4694 v.AddArg(v2) 4695 return true 4696 } 4697 // match: (And64 <t> x (Const64 [y])) 4698 // cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32 4699 // result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)])) 4700 for { 4701 t := v.Type 4702 _ = v.Args[1] 4703 x := v.Args[0] 4704 v_1 := v.Args[1] 4705 if v_1.Op != OpConst64 { 4706 break 4707 } 4708 y := v_1.AuxInt 4709 if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) { 4710 break 4711 } 4712 v.reset(OpLsh64x64) 4713 v0 := b.NewValue0(v.Pos, OpRsh64Ux64, t) 4714 v0.AddArg(x) 4715 v1 := b.NewValue0(v.Pos, OpConst64, t) 4716 v1.AuxInt = ntz(y) 4717 v0.AddArg(v1) 4718 v.AddArg(v0) 4719 v2 := b.NewValue0(v.Pos, OpConst64, t) 4720 v2.AuxInt = ntz(y) 4721 v.AddArg(v2) 4722 return true 4723 } 4724 // match: (And64 (And64 i:(Const64 <t>) z) x) 4725 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 4726 // result: (And64 i (And64 <t> z x)) 4727 for { 4728 _ = v.Args[1] 4729 v_0 := v.Args[0] 4730 if v_0.Op != OpAnd64 { 4731 break 4732 } 4733 _ = v_0.Args[1] 4734 i := v_0.Args[0] 4735 if i.Op != OpConst64 { 4736 break 4737 } 4738 t := i.Type 4739 z := v_0.Args[1] 4740 x := v.Args[1] 4741 if !(z.Op != OpConst64 && x.Op != OpConst64) { 4742 break 4743 } 4744 v.reset(OpAnd64) 4745 v.AddArg(i) 4746 v0 := b.NewValue0(v.Pos, OpAnd64, t) 4747 v0.AddArg(z) 4748 v0.AddArg(x) 4749 v.AddArg(v0) 4750 return true 4751 } 4752 // match: (And64 (And64 z i:(Const64 <t>)) x) 4753 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 4754 // result: (And64 i (And64 <t> z x)) 4755 for { 4756 _ = v.Args[1] 4757 v_0 := v.Args[0] 4758 if v_0.Op != OpAnd64 { 4759 break 4760 } 4761 _ = v_0.Args[1] 4762 z := v_0.Args[0] 4763 i := v_0.Args[1] 4764 if i.Op != OpConst64 { 4765 break 4766 } 4767 t := i.Type 4768 x := v.Args[1] 4769 if !(z.Op != OpConst64 && x.Op != OpConst64) { 4770 break 4771 } 4772 v.reset(OpAnd64) 4773 v.AddArg(i) 4774 v0 := b.NewValue0(v.Pos, OpAnd64, t) 4775 v0.AddArg(z) 4776 v0.AddArg(x) 4777 v.AddArg(v0) 4778 return true 4779 } 4780 // match: (And64 x (And64 i:(Const64 <t>) z)) 4781 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 4782 // result: (And64 i (And64 <t> z x)) 4783 for { 4784 _ = v.Args[1] 4785 x := v.Args[0] 4786 v_1 := v.Args[1] 4787 if v_1.Op != OpAnd64 { 4788 break 4789 } 4790 _ = v_1.Args[1] 4791 i := v_1.Args[0] 4792 if i.Op != OpConst64 { 4793 break 4794 } 4795 t := i.Type 4796 z := v_1.Args[1] 4797 if !(z.Op != OpConst64 && x.Op != OpConst64) { 4798 break 4799 } 4800 v.reset(OpAnd64) 4801 v.AddArg(i) 4802 v0 := b.NewValue0(v.Pos, OpAnd64, t) 4803 v0.AddArg(z) 4804 v0.AddArg(x) 4805 v.AddArg(v0) 4806 return true 4807 } 4808 // match: (And64 x (And64 z i:(Const64 <t>))) 4809 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 4810 // result: (And64 i (And64 <t> z x)) 4811 for { 4812 _ = v.Args[1] 4813 x := v.Args[0] 4814 v_1 := v.Args[1] 4815 if v_1.Op != OpAnd64 { 4816 break 4817 } 4818 _ = v_1.Args[1] 4819 z := v_1.Args[0] 4820 i := v_1.Args[1] 4821 if i.Op != OpConst64 { 4822 break 4823 } 4824 t := i.Type 4825 if !(z.Op != OpConst64 && x.Op != OpConst64) { 4826 break 4827 } 4828 v.reset(OpAnd64) 4829 v.AddArg(i) 4830 v0 := b.NewValue0(v.Pos, OpAnd64, t) 4831 v0.AddArg(z) 4832 v0.AddArg(x) 4833 v.AddArg(v0) 4834 return true 4835 } 4836 // match: (And64 (Const64 <t> [c]) (And64 (Const64 <t> [d]) x)) 4837 // cond: 4838 // result: (And64 (Const64 <t> [c&d]) x) 4839 for { 4840 _ = v.Args[1] 4841 v_0 := v.Args[0] 4842 if v_0.Op != OpConst64 { 4843 break 4844 } 4845 t := v_0.Type 4846 c := v_0.AuxInt 4847 v_1 := v.Args[1] 4848 if v_1.Op != OpAnd64 { 4849 break 4850 } 4851 _ = v_1.Args[1] 4852 v_1_0 := v_1.Args[0] 4853 if v_1_0.Op != OpConst64 { 4854 break 4855 } 4856 if v_1_0.Type != t { 4857 break 4858 } 4859 d := v_1_0.AuxInt 4860 x := v_1.Args[1] 4861 v.reset(OpAnd64) 4862 v0 := b.NewValue0(v.Pos, OpConst64, t) 4863 v0.AuxInt = c & d 4864 v.AddArg(v0) 4865 v.AddArg(x) 4866 return true 4867 } 4868 return false 4869 } 4870 func rewriteValuegeneric_OpAnd64_20(v *Value) bool { 4871 b := v.Block 4872 _ = b 4873 // match: (And64 (Const64 <t> [c]) (And64 x (Const64 <t> [d]))) 4874 // cond: 4875 // result: (And64 (Const64 <t> [c&d]) x) 4876 for { 4877 _ = v.Args[1] 4878 v_0 := v.Args[0] 4879 if v_0.Op != OpConst64 { 4880 break 4881 } 4882 t := v_0.Type 4883 c := v_0.AuxInt 4884 v_1 := v.Args[1] 4885 if v_1.Op != OpAnd64 { 4886 break 4887 } 4888 _ = v_1.Args[1] 4889 x := v_1.Args[0] 4890 v_1_1 := v_1.Args[1] 4891 if v_1_1.Op != OpConst64 { 4892 break 4893 } 4894 if v_1_1.Type != t { 4895 break 4896 } 4897 d := v_1_1.AuxInt 4898 v.reset(OpAnd64) 4899 v0 := b.NewValue0(v.Pos, OpConst64, t) 4900 v0.AuxInt = c & d 4901 v.AddArg(v0) 4902 v.AddArg(x) 4903 return true 4904 } 4905 // match: (And64 (And64 (Const64 <t> [d]) x) (Const64 <t> [c])) 4906 // cond: 4907 // result: (And64 (Const64 <t> [c&d]) x) 4908 for { 4909 _ = v.Args[1] 4910 v_0 := v.Args[0] 4911 if v_0.Op != OpAnd64 { 4912 break 4913 } 4914 _ = v_0.Args[1] 4915 v_0_0 := v_0.Args[0] 4916 if v_0_0.Op != OpConst64 { 4917 break 4918 } 4919 t := v_0_0.Type 4920 d := v_0_0.AuxInt 4921 x := v_0.Args[1] 4922 v_1 := v.Args[1] 4923 if v_1.Op != OpConst64 { 4924 break 4925 } 4926 if v_1.Type != t { 4927 break 4928 } 4929 c := v_1.AuxInt 4930 v.reset(OpAnd64) 4931 v0 := b.NewValue0(v.Pos, OpConst64, t) 4932 v0.AuxInt = c & d 4933 v.AddArg(v0) 4934 v.AddArg(x) 4935 return true 4936 } 4937 // match: (And64 (And64 x (Const64 <t> [d])) (Const64 <t> [c])) 4938 // cond: 4939 // result: (And64 (Const64 <t> [c&d]) x) 4940 for { 4941 _ = v.Args[1] 4942 v_0 := v.Args[0] 4943 if v_0.Op != OpAnd64 { 4944 break 4945 } 4946 _ = v_0.Args[1] 4947 x := v_0.Args[0] 4948 v_0_1 := v_0.Args[1] 4949 if v_0_1.Op != OpConst64 { 4950 break 4951 } 4952 t := v_0_1.Type 4953 d := v_0_1.AuxInt 4954 v_1 := v.Args[1] 4955 if v_1.Op != OpConst64 { 4956 break 4957 } 4958 if v_1.Type != t { 4959 break 4960 } 4961 c := v_1.AuxInt 4962 v.reset(OpAnd64) 4963 v0 := b.NewValue0(v.Pos, OpConst64, t) 4964 v0.AuxInt = c & d 4965 v.AddArg(v0) 4966 v.AddArg(x) 4967 return true 4968 } 4969 return false 4970 } 4971 func rewriteValuegeneric_OpAnd8_0(v *Value) bool { 4972 // match: (And8 (Const8 [c]) (Const8 [d])) 4973 // cond: 4974 // result: (Const8 [int64(int8(c&d))]) 4975 for { 4976 _ = v.Args[1] 4977 v_0 := v.Args[0] 4978 if v_0.Op != OpConst8 { 4979 break 4980 } 4981 c := v_0.AuxInt 4982 v_1 := v.Args[1] 4983 if v_1.Op != OpConst8 { 4984 break 4985 } 4986 d := v_1.AuxInt 4987 v.reset(OpConst8) 4988 v.AuxInt = int64(int8(c & d)) 4989 return true 4990 } 4991 // match: (And8 (Const8 [d]) (Const8 [c])) 4992 // cond: 4993 // result: (Const8 [int64(int8(c&d))]) 4994 for { 4995 _ = v.Args[1] 4996 v_0 := v.Args[0] 4997 if v_0.Op != OpConst8 { 4998 break 4999 } 5000 d := v_0.AuxInt 5001 v_1 := v.Args[1] 5002 if v_1.Op != OpConst8 { 5003 break 5004 } 5005 c := v_1.AuxInt 5006 v.reset(OpConst8) 5007 v.AuxInt = int64(int8(c & d)) 5008 return true 5009 } 5010 // match: (And8 x x) 5011 // cond: 5012 // result: x 5013 for { 5014 _ = v.Args[1] 5015 x := v.Args[0] 5016 if x != v.Args[1] { 5017 break 5018 } 5019 v.reset(OpCopy) 5020 v.Type = x.Type 5021 v.AddArg(x) 5022 return true 5023 } 5024 // match: (And8 (Const8 [-1]) x) 5025 // cond: 5026 // result: x 5027 for { 5028 _ = v.Args[1] 5029 v_0 := v.Args[0] 5030 if v_0.Op != OpConst8 { 5031 break 5032 } 5033 if v_0.AuxInt != -1 { 5034 break 5035 } 5036 x := v.Args[1] 5037 v.reset(OpCopy) 5038 v.Type = x.Type 5039 v.AddArg(x) 5040 return true 5041 } 5042 // match: (And8 x (Const8 [-1])) 5043 // cond: 5044 // result: x 5045 for { 5046 _ = v.Args[1] 5047 x := v.Args[0] 5048 v_1 := v.Args[1] 5049 if v_1.Op != OpConst8 { 5050 break 5051 } 5052 if v_1.AuxInt != -1 { 5053 break 5054 } 5055 v.reset(OpCopy) 5056 v.Type = x.Type 5057 v.AddArg(x) 5058 return true 5059 } 5060 // match: (And8 (Const8 [0]) _) 5061 // cond: 5062 // result: (Const8 [0]) 5063 for { 5064 _ = v.Args[1] 5065 v_0 := v.Args[0] 5066 if v_0.Op != OpConst8 { 5067 break 5068 } 5069 if v_0.AuxInt != 0 { 5070 break 5071 } 5072 v.reset(OpConst8) 5073 v.AuxInt = 0 5074 return true 5075 } 5076 // match: (And8 _ (Const8 [0])) 5077 // cond: 5078 // result: (Const8 [0]) 5079 for { 5080 _ = v.Args[1] 5081 v_1 := v.Args[1] 5082 if v_1.Op != OpConst8 { 5083 break 5084 } 5085 if v_1.AuxInt != 0 { 5086 break 5087 } 5088 v.reset(OpConst8) 5089 v.AuxInt = 0 5090 return true 5091 } 5092 // match: (And8 x (And8 x y)) 5093 // cond: 5094 // result: (And8 x y) 5095 for { 5096 _ = v.Args[1] 5097 x := v.Args[0] 5098 v_1 := v.Args[1] 5099 if v_1.Op != OpAnd8 { 5100 break 5101 } 5102 _ = v_1.Args[1] 5103 if x != v_1.Args[0] { 5104 break 5105 } 5106 y := v_1.Args[1] 5107 v.reset(OpAnd8) 5108 v.AddArg(x) 5109 v.AddArg(y) 5110 return true 5111 } 5112 // match: (And8 x (And8 y x)) 5113 // cond: 5114 // result: (And8 x y) 5115 for { 5116 _ = v.Args[1] 5117 x := v.Args[0] 5118 v_1 := v.Args[1] 5119 if v_1.Op != OpAnd8 { 5120 break 5121 } 5122 _ = v_1.Args[1] 5123 y := v_1.Args[0] 5124 if x != v_1.Args[1] { 5125 break 5126 } 5127 v.reset(OpAnd8) 5128 v.AddArg(x) 5129 v.AddArg(y) 5130 return true 5131 } 5132 // match: (And8 (And8 x y) x) 5133 // cond: 5134 // result: (And8 x y) 5135 for { 5136 _ = v.Args[1] 5137 v_0 := v.Args[0] 5138 if v_0.Op != OpAnd8 { 5139 break 5140 } 5141 _ = v_0.Args[1] 5142 x := v_0.Args[0] 5143 y := v_0.Args[1] 5144 if x != v.Args[1] { 5145 break 5146 } 5147 v.reset(OpAnd8) 5148 v.AddArg(x) 5149 v.AddArg(y) 5150 return true 5151 } 5152 return false 5153 } 5154 func rewriteValuegeneric_OpAnd8_10(v *Value) bool { 5155 b := v.Block 5156 _ = b 5157 // match: (And8 (And8 y x) x) 5158 // cond: 5159 // result: (And8 x y) 5160 for { 5161 _ = v.Args[1] 5162 v_0 := v.Args[0] 5163 if v_0.Op != OpAnd8 { 5164 break 5165 } 5166 _ = v_0.Args[1] 5167 y := v_0.Args[0] 5168 x := v_0.Args[1] 5169 if x != v.Args[1] { 5170 break 5171 } 5172 v.reset(OpAnd8) 5173 v.AddArg(x) 5174 v.AddArg(y) 5175 return true 5176 } 5177 // match: (And8 (And8 i:(Const8 <t>) z) x) 5178 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 5179 // result: (And8 i (And8 <t> z x)) 5180 for { 5181 _ = v.Args[1] 5182 v_0 := v.Args[0] 5183 if v_0.Op != OpAnd8 { 5184 break 5185 } 5186 _ = v_0.Args[1] 5187 i := v_0.Args[0] 5188 if i.Op != OpConst8 { 5189 break 5190 } 5191 t := i.Type 5192 z := v_0.Args[1] 5193 x := v.Args[1] 5194 if !(z.Op != OpConst8 && x.Op != OpConst8) { 5195 break 5196 } 5197 v.reset(OpAnd8) 5198 v.AddArg(i) 5199 v0 := b.NewValue0(v.Pos, OpAnd8, t) 5200 v0.AddArg(z) 5201 v0.AddArg(x) 5202 v.AddArg(v0) 5203 return true 5204 } 5205 // match: (And8 (And8 z i:(Const8 <t>)) x) 5206 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 5207 // result: (And8 i (And8 <t> z x)) 5208 for { 5209 _ = v.Args[1] 5210 v_0 := v.Args[0] 5211 if v_0.Op != OpAnd8 { 5212 break 5213 } 5214 _ = v_0.Args[1] 5215 z := v_0.Args[0] 5216 i := v_0.Args[1] 5217 if i.Op != OpConst8 { 5218 break 5219 } 5220 t := i.Type 5221 x := v.Args[1] 5222 if !(z.Op != OpConst8 && x.Op != OpConst8) { 5223 break 5224 } 5225 v.reset(OpAnd8) 5226 v.AddArg(i) 5227 v0 := b.NewValue0(v.Pos, OpAnd8, t) 5228 v0.AddArg(z) 5229 v0.AddArg(x) 5230 v.AddArg(v0) 5231 return true 5232 } 5233 // match: (And8 x (And8 i:(Const8 <t>) z)) 5234 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 5235 // result: (And8 i (And8 <t> z x)) 5236 for { 5237 _ = v.Args[1] 5238 x := v.Args[0] 5239 v_1 := v.Args[1] 5240 if v_1.Op != OpAnd8 { 5241 break 5242 } 5243 _ = v_1.Args[1] 5244 i := v_1.Args[0] 5245 if i.Op != OpConst8 { 5246 break 5247 } 5248 t := i.Type 5249 z := v_1.Args[1] 5250 if !(z.Op != OpConst8 && x.Op != OpConst8) { 5251 break 5252 } 5253 v.reset(OpAnd8) 5254 v.AddArg(i) 5255 v0 := b.NewValue0(v.Pos, OpAnd8, t) 5256 v0.AddArg(z) 5257 v0.AddArg(x) 5258 v.AddArg(v0) 5259 return true 5260 } 5261 // match: (And8 x (And8 z i:(Const8 <t>))) 5262 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 5263 // result: (And8 i (And8 <t> z x)) 5264 for { 5265 _ = v.Args[1] 5266 x := v.Args[0] 5267 v_1 := v.Args[1] 5268 if v_1.Op != OpAnd8 { 5269 break 5270 } 5271 _ = v_1.Args[1] 5272 z := v_1.Args[0] 5273 i := v_1.Args[1] 5274 if i.Op != OpConst8 { 5275 break 5276 } 5277 t := i.Type 5278 if !(z.Op != OpConst8 && x.Op != OpConst8) { 5279 break 5280 } 5281 v.reset(OpAnd8) 5282 v.AddArg(i) 5283 v0 := b.NewValue0(v.Pos, OpAnd8, t) 5284 v0.AddArg(z) 5285 v0.AddArg(x) 5286 v.AddArg(v0) 5287 return true 5288 } 5289 // match: (And8 (Const8 <t> [c]) (And8 (Const8 <t> [d]) x)) 5290 // cond: 5291 // result: (And8 (Const8 <t> [int64(int8(c&d))]) x) 5292 for { 5293 _ = v.Args[1] 5294 v_0 := v.Args[0] 5295 if v_0.Op != OpConst8 { 5296 break 5297 } 5298 t := v_0.Type 5299 c := v_0.AuxInt 5300 v_1 := v.Args[1] 5301 if v_1.Op != OpAnd8 { 5302 break 5303 } 5304 _ = v_1.Args[1] 5305 v_1_0 := v_1.Args[0] 5306 if v_1_0.Op != OpConst8 { 5307 break 5308 } 5309 if v_1_0.Type != t { 5310 break 5311 } 5312 d := v_1_0.AuxInt 5313 x := v_1.Args[1] 5314 v.reset(OpAnd8) 5315 v0 := b.NewValue0(v.Pos, OpConst8, t) 5316 v0.AuxInt = int64(int8(c & d)) 5317 v.AddArg(v0) 5318 v.AddArg(x) 5319 return true 5320 } 5321 // match: (And8 (Const8 <t> [c]) (And8 x (Const8 <t> [d]))) 5322 // cond: 5323 // result: (And8 (Const8 <t> [int64(int8(c&d))]) x) 5324 for { 5325 _ = v.Args[1] 5326 v_0 := v.Args[0] 5327 if v_0.Op != OpConst8 { 5328 break 5329 } 5330 t := v_0.Type 5331 c := v_0.AuxInt 5332 v_1 := v.Args[1] 5333 if v_1.Op != OpAnd8 { 5334 break 5335 } 5336 _ = v_1.Args[1] 5337 x := v_1.Args[0] 5338 v_1_1 := v_1.Args[1] 5339 if v_1_1.Op != OpConst8 { 5340 break 5341 } 5342 if v_1_1.Type != t { 5343 break 5344 } 5345 d := v_1_1.AuxInt 5346 v.reset(OpAnd8) 5347 v0 := b.NewValue0(v.Pos, OpConst8, t) 5348 v0.AuxInt = int64(int8(c & d)) 5349 v.AddArg(v0) 5350 v.AddArg(x) 5351 return true 5352 } 5353 // match: (And8 (And8 (Const8 <t> [d]) x) (Const8 <t> [c])) 5354 // cond: 5355 // result: (And8 (Const8 <t> [int64(int8(c&d))]) x) 5356 for { 5357 _ = v.Args[1] 5358 v_0 := v.Args[0] 5359 if v_0.Op != OpAnd8 { 5360 break 5361 } 5362 _ = v_0.Args[1] 5363 v_0_0 := v_0.Args[0] 5364 if v_0_0.Op != OpConst8 { 5365 break 5366 } 5367 t := v_0_0.Type 5368 d := v_0_0.AuxInt 5369 x := v_0.Args[1] 5370 v_1 := v.Args[1] 5371 if v_1.Op != OpConst8 { 5372 break 5373 } 5374 if v_1.Type != t { 5375 break 5376 } 5377 c := v_1.AuxInt 5378 v.reset(OpAnd8) 5379 v0 := b.NewValue0(v.Pos, OpConst8, t) 5380 v0.AuxInt = int64(int8(c & d)) 5381 v.AddArg(v0) 5382 v.AddArg(x) 5383 return true 5384 } 5385 // match: (And8 (And8 x (Const8 <t> [d])) (Const8 <t> [c])) 5386 // cond: 5387 // result: (And8 (Const8 <t> [int64(int8(c&d))]) x) 5388 for { 5389 _ = v.Args[1] 5390 v_0 := v.Args[0] 5391 if v_0.Op != OpAnd8 { 5392 break 5393 } 5394 _ = v_0.Args[1] 5395 x := v_0.Args[0] 5396 v_0_1 := v_0.Args[1] 5397 if v_0_1.Op != OpConst8 { 5398 break 5399 } 5400 t := v_0_1.Type 5401 d := v_0_1.AuxInt 5402 v_1 := v.Args[1] 5403 if v_1.Op != OpConst8 { 5404 break 5405 } 5406 if v_1.Type != t { 5407 break 5408 } 5409 c := v_1.AuxInt 5410 v.reset(OpAnd8) 5411 v0 := b.NewValue0(v.Pos, OpConst8, t) 5412 v0.AuxInt = int64(int8(c & d)) 5413 v.AddArg(v0) 5414 v.AddArg(x) 5415 return true 5416 } 5417 return false 5418 } 5419 func rewriteValuegeneric_OpArg_0(v *Value) bool { 5420 b := v.Block 5421 _ = b 5422 config := b.Func.Config 5423 _ = config 5424 fe := b.Func.fe 5425 _ = fe 5426 typ := &b.Func.Config.Types 5427 _ = typ 5428 // match: (Arg {n} [off]) 5429 // cond: v.Type.IsString() 5430 // result: (StringMake (Arg <typ.BytePtr> {n} [off]) (Arg <typ.Int> {n} [off+config.PtrSize])) 5431 for { 5432 off := v.AuxInt 5433 n := v.Aux 5434 if !(v.Type.IsString()) { 5435 break 5436 } 5437 v.reset(OpStringMake) 5438 v0 := b.NewValue0(v.Pos, OpArg, typ.BytePtr) 5439 v0.AuxInt = off 5440 v0.Aux = n 5441 v.AddArg(v0) 5442 v1 := b.NewValue0(v.Pos, OpArg, typ.Int) 5443 v1.AuxInt = off + config.PtrSize 5444 v1.Aux = n 5445 v.AddArg(v1) 5446 return true 5447 } 5448 // match: (Arg {n} [off]) 5449 // cond: v.Type.IsSlice() 5450 // result: (SliceMake (Arg <v.Type.ElemType().PtrTo()> {n} [off]) (Arg <typ.Int> {n} [off+config.PtrSize]) (Arg <typ.Int> {n} [off+2*config.PtrSize])) 5451 for { 5452 off := v.AuxInt 5453 n := v.Aux 5454 if !(v.Type.IsSlice()) { 5455 break 5456 } 5457 v.reset(OpSliceMake) 5458 v0 := b.NewValue0(v.Pos, OpArg, v.Type.ElemType().PtrTo()) 5459 v0.AuxInt = off 5460 v0.Aux = n 5461 v.AddArg(v0) 5462 v1 := b.NewValue0(v.Pos, OpArg, typ.Int) 5463 v1.AuxInt = off + config.PtrSize 5464 v1.Aux = n 5465 v.AddArg(v1) 5466 v2 := b.NewValue0(v.Pos, OpArg, typ.Int) 5467 v2.AuxInt = off + 2*config.PtrSize 5468 v2.Aux = n 5469 v.AddArg(v2) 5470 return true 5471 } 5472 // match: (Arg {n} [off]) 5473 // cond: v.Type.IsInterface() 5474 // result: (IMake (Arg <typ.BytePtr> {n} [off]) (Arg <typ.BytePtr> {n} [off+config.PtrSize])) 5475 for { 5476 off := v.AuxInt 5477 n := v.Aux 5478 if !(v.Type.IsInterface()) { 5479 break 5480 } 5481 v.reset(OpIMake) 5482 v0 := b.NewValue0(v.Pos, OpArg, typ.BytePtr) 5483 v0.AuxInt = off 5484 v0.Aux = n 5485 v.AddArg(v0) 5486 v1 := b.NewValue0(v.Pos, OpArg, typ.BytePtr) 5487 v1.AuxInt = off + config.PtrSize 5488 v1.Aux = n 5489 v.AddArg(v1) 5490 return true 5491 } 5492 // match: (Arg {n} [off]) 5493 // cond: v.Type.IsComplex() && v.Type.Size() == 16 5494 // result: (ComplexMake (Arg <typ.Float64> {n} [off]) (Arg <typ.Float64> {n} [off+8])) 5495 for { 5496 off := v.AuxInt 5497 n := v.Aux 5498 if !(v.Type.IsComplex() && v.Type.Size() == 16) { 5499 break 5500 } 5501 v.reset(OpComplexMake) 5502 v0 := b.NewValue0(v.Pos, OpArg, typ.Float64) 5503 v0.AuxInt = off 5504 v0.Aux = n 5505 v.AddArg(v0) 5506 v1 := b.NewValue0(v.Pos, OpArg, typ.Float64) 5507 v1.AuxInt = off + 8 5508 v1.Aux = n 5509 v.AddArg(v1) 5510 return true 5511 } 5512 // match: (Arg {n} [off]) 5513 // cond: v.Type.IsComplex() && v.Type.Size() == 8 5514 // result: (ComplexMake (Arg <typ.Float32> {n} [off]) (Arg <typ.Float32> {n} [off+4])) 5515 for { 5516 off := v.AuxInt 5517 n := v.Aux 5518 if !(v.Type.IsComplex() && v.Type.Size() == 8) { 5519 break 5520 } 5521 v.reset(OpComplexMake) 5522 v0 := b.NewValue0(v.Pos, OpArg, typ.Float32) 5523 v0.AuxInt = off 5524 v0.Aux = n 5525 v.AddArg(v0) 5526 v1 := b.NewValue0(v.Pos, OpArg, typ.Float32) 5527 v1.AuxInt = off + 4 5528 v1.Aux = n 5529 v.AddArg(v1) 5530 return true 5531 } 5532 // match: (Arg <t>) 5533 // cond: t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t) 5534 // result: (StructMake0) 5535 for { 5536 t := v.Type 5537 if !(t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)) { 5538 break 5539 } 5540 v.reset(OpStructMake0) 5541 return true 5542 } 5543 // match: (Arg <t> {n} [off]) 5544 // cond: t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t) 5545 // result: (StructMake1 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])) 5546 for { 5547 t := v.Type 5548 off := v.AuxInt 5549 n := v.Aux 5550 if !(t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)) { 5551 break 5552 } 5553 v.reset(OpStructMake1) 5554 v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0)) 5555 v0.AuxInt = off + t.FieldOff(0) 5556 v0.Aux = n 5557 v.AddArg(v0) 5558 return true 5559 } 5560 // match: (Arg <t> {n} [off]) 5561 // cond: t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t) 5562 // result: (StructMake2 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])) 5563 for { 5564 t := v.Type 5565 off := v.AuxInt 5566 n := v.Aux 5567 if !(t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)) { 5568 break 5569 } 5570 v.reset(OpStructMake2) 5571 v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0)) 5572 v0.AuxInt = off + t.FieldOff(0) 5573 v0.Aux = n 5574 v.AddArg(v0) 5575 v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1)) 5576 v1.AuxInt = off + t.FieldOff(1) 5577 v1.Aux = n 5578 v.AddArg(v1) 5579 return true 5580 } 5581 // match: (Arg <t> {n} [off]) 5582 // cond: t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t) 5583 // result: (StructMake3 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])) 5584 for { 5585 t := v.Type 5586 off := v.AuxInt 5587 n := v.Aux 5588 if !(t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)) { 5589 break 5590 } 5591 v.reset(OpStructMake3) 5592 v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0)) 5593 v0.AuxInt = off + t.FieldOff(0) 5594 v0.Aux = n 5595 v.AddArg(v0) 5596 v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1)) 5597 v1.AuxInt = off + t.FieldOff(1) 5598 v1.Aux = n 5599 v.AddArg(v1) 5600 v2 := b.NewValue0(v.Pos, OpArg, t.FieldType(2)) 5601 v2.AuxInt = off + t.FieldOff(2) 5602 v2.Aux = n 5603 v.AddArg(v2) 5604 return true 5605 } 5606 // match: (Arg <t> {n} [off]) 5607 // cond: t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t) 5608 // result: (StructMake4 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]) (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)])) 5609 for { 5610 t := v.Type 5611 off := v.AuxInt 5612 n := v.Aux 5613 if !(t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)) { 5614 break 5615 } 5616 v.reset(OpStructMake4) 5617 v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0)) 5618 v0.AuxInt = off + t.FieldOff(0) 5619 v0.Aux = n 5620 v.AddArg(v0) 5621 v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1)) 5622 v1.AuxInt = off + t.FieldOff(1) 5623 v1.Aux = n 5624 v.AddArg(v1) 5625 v2 := b.NewValue0(v.Pos, OpArg, t.FieldType(2)) 5626 v2.AuxInt = off + t.FieldOff(2) 5627 v2.Aux = n 5628 v.AddArg(v2) 5629 v3 := b.NewValue0(v.Pos, OpArg, t.FieldType(3)) 5630 v3.AuxInt = off + t.FieldOff(3) 5631 v3.Aux = n 5632 v.AddArg(v3) 5633 return true 5634 } 5635 return false 5636 } 5637 func rewriteValuegeneric_OpArg_10(v *Value) bool { 5638 b := v.Block 5639 _ = b 5640 fe := b.Func.fe 5641 _ = fe 5642 // match: (Arg <t>) 5643 // cond: t.IsArray() && t.NumElem() == 0 5644 // result: (ArrayMake0) 5645 for { 5646 t := v.Type 5647 if !(t.IsArray() && t.NumElem() == 0) { 5648 break 5649 } 5650 v.reset(OpArrayMake0) 5651 return true 5652 } 5653 // match: (Arg <t> {n} [off]) 5654 // cond: t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t) 5655 // result: (ArrayMake1 (Arg <t.ElemType()> {n} [off])) 5656 for { 5657 t := v.Type 5658 off := v.AuxInt 5659 n := v.Aux 5660 if !(t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)) { 5661 break 5662 } 5663 v.reset(OpArrayMake1) 5664 v0 := b.NewValue0(v.Pos, OpArg, t.ElemType()) 5665 v0.AuxInt = off 5666 v0.Aux = n 5667 v.AddArg(v0) 5668 return true 5669 } 5670 return false 5671 } 5672 func rewriteValuegeneric_OpArraySelect_0(v *Value) bool { 5673 // match: (ArraySelect (ArrayMake1 x)) 5674 // cond: 5675 // result: x 5676 for { 5677 v_0 := v.Args[0] 5678 if v_0.Op != OpArrayMake1 { 5679 break 5680 } 5681 x := v_0.Args[0] 5682 v.reset(OpCopy) 5683 v.Type = x.Type 5684 v.AddArg(x) 5685 return true 5686 } 5687 // match: (ArraySelect [0] (Load ptr mem)) 5688 // cond: 5689 // result: (Load ptr mem) 5690 for { 5691 if v.AuxInt != 0 { 5692 break 5693 } 5694 v_0 := v.Args[0] 5695 if v_0.Op != OpLoad { 5696 break 5697 } 5698 _ = v_0.Args[1] 5699 ptr := v_0.Args[0] 5700 mem := v_0.Args[1] 5701 v.reset(OpLoad) 5702 v.AddArg(ptr) 5703 v.AddArg(mem) 5704 return true 5705 } 5706 // match: (ArraySelect [0] x:(IData _)) 5707 // cond: 5708 // result: x 5709 for { 5710 if v.AuxInt != 0 { 5711 break 5712 } 5713 x := v.Args[0] 5714 if x.Op != OpIData { 5715 break 5716 } 5717 v.reset(OpCopy) 5718 v.Type = x.Type 5719 v.AddArg(x) 5720 return true 5721 } 5722 return false 5723 } 5724 func rewriteValuegeneric_OpCom16_0(v *Value) bool { 5725 // match: (Com16 (Com16 x)) 5726 // cond: 5727 // result: x 5728 for { 5729 v_0 := v.Args[0] 5730 if v_0.Op != OpCom16 { 5731 break 5732 } 5733 x := v_0.Args[0] 5734 v.reset(OpCopy) 5735 v.Type = x.Type 5736 v.AddArg(x) 5737 return true 5738 } 5739 // match: (Com16 (Const16 [c])) 5740 // cond: 5741 // result: (Const16 [^c]) 5742 for { 5743 v_0 := v.Args[0] 5744 if v_0.Op != OpConst16 { 5745 break 5746 } 5747 c := v_0.AuxInt 5748 v.reset(OpConst16) 5749 v.AuxInt = ^c 5750 return true 5751 } 5752 return false 5753 } 5754 func rewriteValuegeneric_OpCom32_0(v *Value) bool { 5755 // match: (Com32 (Com32 x)) 5756 // cond: 5757 // result: x 5758 for { 5759 v_0 := v.Args[0] 5760 if v_0.Op != OpCom32 { 5761 break 5762 } 5763 x := v_0.Args[0] 5764 v.reset(OpCopy) 5765 v.Type = x.Type 5766 v.AddArg(x) 5767 return true 5768 } 5769 // match: (Com32 (Const32 [c])) 5770 // cond: 5771 // result: (Const32 [^c]) 5772 for { 5773 v_0 := v.Args[0] 5774 if v_0.Op != OpConst32 { 5775 break 5776 } 5777 c := v_0.AuxInt 5778 v.reset(OpConst32) 5779 v.AuxInt = ^c 5780 return true 5781 } 5782 return false 5783 } 5784 func rewriteValuegeneric_OpCom64_0(v *Value) bool { 5785 // match: (Com64 (Com64 x)) 5786 // cond: 5787 // result: x 5788 for { 5789 v_0 := v.Args[0] 5790 if v_0.Op != OpCom64 { 5791 break 5792 } 5793 x := v_0.Args[0] 5794 v.reset(OpCopy) 5795 v.Type = x.Type 5796 v.AddArg(x) 5797 return true 5798 } 5799 // match: (Com64 (Const64 [c])) 5800 // cond: 5801 // result: (Const64 [^c]) 5802 for { 5803 v_0 := v.Args[0] 5804 if v_0.Op != OpConst64 { 5805 break 5806 } 5807 c := v_0.AuxInt 5808 v.reset(OpConst64) 5809 v.AuxInt = ^c 5810 return true 5811 } 5812 return false 5813 } 5814 func rewriteValuegeneric_OpCom8_0(v *Value) bool { 5815 // match: (Com8 (Com8 x)) 5816 // cond: 5817 // result: x 5818 for { 5819 v_0 := v.Args[0] 5820 if v_0.Op != OpCom8 { 5821 break 5822 } 5823 x := v_0.Args[0] 5824 v.reset(OpCopy) 5825 v.Type = x.Type 5826 v.AddArg(x) 5827 return true 5828 } 5829 // match: (Com8 (Const8 [c])) 5830 // cond: 5831 // result: (Const8 [^c]) 5832 for { 5833 v_0 := v.Args[0] 5834 if v_0.Op != OpConst8 { 5835 break 5836 } 5837 c := v_0.AuxInt 5838 v.reset(OpConst8) 5839 v.AuxInt = ^c 5840 return true 5841 } 5842 return false 5843 } 5844 func rewriteValuegeneric_OpConstInterface_0(v *Value) bool { 5845 b := v.Block 5846 _ = b 5847 typ := &b.Func.Config.Types 5848 _ = typ 5849 // match: (ConstInterface) 5850 // cond: 5851 // result: (IMake (ConstNil <typ.BytePtr>) (ConstNil <typ.BytePtr>)) 5852 for { 5853 v.reset(OpIMake) 5854 v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr) 5855 v.AddArg(v0) 5856 v1 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr) 5857 v.AddArg(v1) 5858 return true 5859 } 5860 } 5861 func rewriteValuegeneric_OpConstSlice_0(v *Value) bool { 5862 b := v.Block 5863 _ = b 5864 config := b.Func.Config 5865 _ = config 5866 typ := &b.Func.Config.Types 5867 _ = typ 5868 // match: (ConstSlice) 5869 // cond: config.PtrSize == 4 5870 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const32 <typ.Int> [0]) (Const32 <typ.Int> [0])) 5871 for { 5872 if !(config.PtrSize == 4) { 5873 break 5874 } 5875 v.reset(OpSliceMake) 5876 v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.ElemType().PtrTo()) 5877 v.AddArg(v0) 5878 v1 := b.NewValue0(v.Pos, OpConst32, typ.Int) 5879 v1.AuxInt = 0 5880 v.AddArg(v1) 5881 v2 := b.NewValue0(v.Pos, OpConst32, typ.Int) 5882 v2.AuxInt = 0 5883 v.AddArg(v2) 5884 return true 5885 } 5886 // match: (ConstSlice) 5887 // cond: config.PtrSize == 8 5888 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const64 <typ.Int> [0]) (Const64 <typ.Int> [0])) 5889 for { 5890 if !(config.PtrSize == 8) { 5891 break 5892 } 5893 v.reset(OpSliceMake) 5894 v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.ElemType().PtrTo()) 5895 v.AddArg(v0) 5896 v1 := b.NewValue0(v.Pos, OpConst64, typ.Int) 5897 v1.AuxInt = 0 5898 v.AddArg(v1) 5899 v2 := b.NewValue0(v.Pos, OpConst64, typ.Int) 5900 v2.AuxInt = 0 5901 v.AddArg(v2) 5902 return true 5903 } 5904 return false 5905 } 5906 func rewriteValuegeneric_OpConstString_0(v *Value) bool { 5907 b := v.Block 5908 _ = b 5909 config := b.Func.Config 5910 _ = config 5911 fe := b.Func.fe 5912 _ = fe 5913 typ := &b.Func.Config.Types 5914 _ = typ 5915 // match: (ConstString {s}) 5916 // cond: config.PtrSize == 4 && s.(string) == "" 5917 // result: (StringMake (ConstNil) (Const32 <typ.Int> [0])) 5918 for { 5919 s := v.Aux 5920 if !(config.PtrSize == 4 && s.(string) == "") { 5921 break 5922 } 5923 v.reset(OpStringMake) 5924 v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr) 5925 v.AddArg(v0) 5926 v1 := b.NewValue0(v.Pos, OpConst32, typ.Int) 5927 v1.AuxInt = 0 5928 v.AddArg(v1) 5929 return true 5930 } 5931 // match: (ConstString {s}) 5932 // cond: config.PtrSize == 8 && s.(string) == "" 5933 // result: (StringMake (ConstNil) (Const64 <typ.Int> [0])) 5934 for { 5935 s := v.Aux 5936 if !(config.PtrSize == 8 && s.(string) == "") { 5937 break 5938 } 5939 v.reset(OpStringMake) 5940 v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr) 5941 v.AddArg(v0) 5942 v1 := b.NewValue0(v.Pos, OpConst64, typ.Int) 5943 v1.AuxInt = 0 5944 v.AddArg(v1) 5945 return true 5946 } 5947 // match: (ConstString {s}) 5948 // cond: config.PtrSize == 4 && s.(string) != "" 5949 // result: (StringMake (Addr <typ.BytePtr> {fe.StringData(s.(string))} (SB)) (Const32 <typ.Int> [int64(len(s.(string)))])) 5950 for { 5951 s := v.Aux 5952 if !(config.PtrSize == 4 && s.(string) != "") { 5953 break 5954 } 5955 v.reset(OpStringMake) 5956 v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr) 5957 v0.Aux = fe.StringData(s.(string)) 5958 v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr) 5959 v0.AddArg(v1) 5960 v.AddArg(v0) 5961 v2 := b.NewValue0(v.Pos, OpConst32, typ.Int) 5962 v2.AuxInt = int64(len(s.(string))) 5963 v.AddArg(v2) 5964 return true 5965 } 5966 // match: (ConstString {s}) 5967 // cond: config.PtrSize == 8 && s.(string) != "" 5968 // result: (StringMake (Addr <typ.BytePtr> {fe.StringData(s.(string))} (SB)) (Const64 <typ.Int> [int64(len(s.(string)))])) 5969 for { 5970 s := v.Aux 5971 if !(config.PtrSize == 8 && s.(string) != "") { 5972 break 5973 } 5974 v.reset(OpStringMake) 5975 v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr) 5976 v0.Aux = fe.StringData(s.(string)) 5977 v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr) 5978 v0.AddArg(v1) 5979 v.AddArg(v0) 5980 v2 := b.NewValue0(v.Pos, OpConst64, typ.Int) 5981 v2.AuxInt = int64(len(s.(string))) 5982 v.AddArg(v2) 5983 return true 5984 } 5985 return false 5986 } 5987 func rewriteValuegeneric_OpConvert_0(v *Value) bool { 5988 // match: (Convert (Add64 (Convert ptr mem) off) mem) 5989 // cond: 5990 // result: (Add64 ptr off) 5991 for { 5992 _ = v.Args[1] 5993 v_0 := v.Args[0] 5994 if v_0.Op != OpAdd64 { 5995 break 5996 } 5997 _ = v_0.Args[1] 5998 v_0_0 := v_0.Args[0] 5999 if v_0_0.Op != OpConvert { 6000 break 6001 } 6002 _ = v_0_0.Args[1] 6003 ptr := v_0_0.Args[0] 6004 mem := v_0_0.Args[1] 6005 off := v_0.Args[1] 6006 if mem != v.Args[1] { 6007 break 6008 } 6009 v.reset(OpAdd64) 6010 v.AddArg(ptr) 6011 v.AddArg(off) 6012 return true 6013 } 6014 // match: (Convert (Add64 off (Convert ptr mem)) mem) 6015 // cond: 6016 // result: (Add64 ptr off) 6017 for { 6018 _ = v.Args[1] 6019 v_0 := v.Args[0] 6020 if v_0.Op != OpAdd64 { 6021 break 6022 } 6023 _ = v_0.Args[1] 6024 off := v_0.Args[0] 6025 v_0_1 := v_0.Args[1] 6026 if v_0_1.Op != OpConvert { 6027 break 6028 } 6029 _ = v_0_1.Args[1] 6030 ptr := v_0_1.Args[0] 6031 mem := v_0_1.Args[1] 6032 if mem != v.Args[1] { 6033 break 6034 } 6035 v.reset(OpAdd64) 6036 v.AddArg(ptr) 6037 v.AddArg(off) 6038 return true 6039 } 6040 // match: (Convert (Convert ptr mem) mem) 6041 // cond: 6042 // result: ptr 6043 for { 6044 _ = v.Args[1] 6045 v_0 := v.Args[0] 6046 if v_0.Op != OpConvert { 6047 break 6048 } 6049 _ = v_0.Args[1] 6050 ptr := v_0.Args[0] 6051 mem := v_0.Args[1] 6052 if mem != v.Args[1] { 6053 break 6054 } 6055 v.reset(OpCopy) 6056 v.Type = ptr.Type 6057 v.AddArg(ptr) 6058 return true 6059 } 6060 return false 6061 } 6062 func rewriteValuegeneric_OpCvt32Fto64F_0(v *Value) bool { 6063 // match: (Cvt32Fto64F (Const32F [c])) 6064 // cond: 6065 // result: (Const64F [c]) 6066 for { 6067 v_0 := v.Args[0] 6068 if v_0.Op != OpConst32F { 6069 break 6070 } 6071 c := v_0.AuxInt 6072 v.reset(OpConst64F) 6073 v.AuxInt = c 6074 return true 6075 } 6076 return false 6077 } 6078 func rewriteValuegeneric_OpCvt64Fto32F_0(v *Value) bool { 6079 // match: (Cvt64Fto32F (Const64F [c])) 6080 // cond: 6081 // result: (Const32F [f2i(float64(i2f32(c)))]) 6082 for { 6083 v_0 := v.Args[0] 6084 if v_0.Op != OpConst64F { 6085 break 6086 } 6087 c := v_0.AuxInt 6088 v.reset(OpConst32F) 6089 v.AuxInt = f2i(float64(i2f32(c))) 6090 return true 6091 } 6092 return false 6093 } 6094 func rewriteValuegeneric_OpDiv16_0(v *Value) bool { 6095 b := v.Block 6096 _ = b 6097 typ := &b.Func.Config.Types 6098 _ = typ 6099 // match: (Div16 (Const16 [c]) (Const16 [d])) 6100 // cond: d != 0 6101 // result: (Const16 [int64(int16(c)/int16(d))]) 6102 for { 6103 _ = v.Args[1] 6104 v_0 := v.Args[0] 6105 if v_0.Op != OpConst16 { 6106 break 6107 } 6108 c := v_0.AuxInt 6109 v_1 := v.Args[1] 6110 if v_1.Op != OpConst16 { 6111 break 6112 } 6113 d := v_1.AuxInt 6114 if !(d != 0) { 6115 break 6116 } 6117 v.reset(OpConst16) 6118 v.AuxInt = int64(int16(c) / int16(d)) 6119 return true 6120 } 6121 // match: (Div16 <t> n (Const16 [c])) 6122 // cond: c < 0 && c != -1<<15 6123 // result: (Neg16 (Div16 <t> n (Const16 <t> [-c]))) 6124 for { 6125 t := v.Type 6126 _ = v.Args[1] 6127 n := v.Args[0] 6128 v_1 := v.Args[1] 6129 if v_1.Op != OpConst16 { 6130 break 6131 } 6132 c := v_1.AuxInt 6133 if !(c < 0 && c != -1<<15) { 6134 break 6135 } 6136 v.reset(OpNeg16) 6137 v0 := b.NewValue0(v.Pos, OpDiv16, t) 6138 v0.AddArg(n) 6139 v1 := b.NewValue0(v.Pos, OpConst16, t) 6140 v1.AuxInt = -c 6141 v0.AddArg(v1) 6142 v.AddArg(v0) 6143 return true 6144 } 6145 // match: (Div16 <t> x (Const16 [-1<<15])) 6146 // cond: 6147 // result: (Rsh16Ux64 (And16 <t> x (Neg16 <t> x)) (Const64 <typ.UInt64> [15])) 6148 for { 6149 t := v.Type 6150 _ = v.Args[1] 6151 x := v.Args[0] 6152 v_1 := v.Args[1] 6153 if v_1.Op != OpConst16 { 6154 break 6155 } 6156 if v_1.AuxInt != -1<<15 { 6157 break 6158 } 6159 v.reset(OpRsh16Ux64) 6160 v0 := b.NewValue0(v.Pos, OpAnd16, t) 6161 v0.AddArg(x) 6162 v1 := b.NewValue0(v.Pos, OpNeg16, t) 6163 v1.AddArg(x) 6164 v0.AddArg(v1) 6165 v.AddArg(v0) 6166 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6167 v2.AuxInt = 15 6168 v.AddArg(v2) 6169 return true 6170 } 6171 // match: (Div16 <t> n (Const16 [c])) 6172 // cond: isPowerOfTwo(c) 6173 // result: (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [16-log2(c)]))) (Const64 <typ.UInt64> [log2(c)])) 6174 for { 6175 t := v.Type 6176 _ = v.Args[1] 6177 n := v.Args[0] 6178 v_1 := v.Args[1] 6179 if v_1.Op != OpConst16 { 6180 break 6181 } 6182 c := v_1.AuxInt 6183 if !(isPowerOfTwo(c)) { 6184 break 6185 } 6186 v.reset(OpRsh16x64) 6187 v0 := b.NewValue0(v.Pos, OpAdd16, t) 6188 v0.AddArg(n) 6189 v1 := b.NewValue0(v.Pos, OpRsh16Ux64, t) 6190 v2 := b.NewValue0(v.Pos, OpRsh16x64, t) 6191 v2.AddArg(n) 6192 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6193 v3.AuxInt = 15 6194 v2.AddArg(v3) 6195 v1.AddArg(v2) 6196 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6197 v4.AuxInt = 16 - log2(c) 6198 v1.AddArg(v4) 6199 v0.AddArg(v1) 6200 v.AddArg(v0) 6201 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6202 v5.AuxInt = log2(c) 6203 v.AddArg(v5) 6204 return true 6205 } 6206 // match: (Div16 <t> x (Const16 [c])) 6207 // cond: smagicOK(16,c) 6208 // result: (Sub16 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(smagic(16,c).m)]) (SignExt16to32 x)) (Const64 <typ.UInt64> [16+smagic(16,c).s])) (Rsh32x64 <t> (SignExt16to32 x) (Const64 <typ.UInt64> [31]))) 6209 for { 6210 t := v.Type 6211 _ = v.Args[1] 6212 x := v.Args[0] 6213 v_1 := v.Args[1] 6214 if v_1.Op != OpConst16 { 6215 break 6216 } 6217 c := v_1.AuxInt 6218 if !(smagicOK(16, c)) { 6219 break 6220 } 6221 v.reset(OpSub16) 6222 v.Type = t 6223 v0 := b.NewValue0(v.Pos, OpRsh32x64, t) 6224 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32) 6225 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 6226 v2.AuxInt = int64(smagic(16, c).m) 6227 v1.AddArg(v2) 6228 v3 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 6229 v3.AddArg(x) 6230 v1.AddArg(v3) 6231 v0.AddArg(v1) 6232 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6233 v4.AuxInt = 16 + smagic(16, c).s 6234 v0.AddArg(v4) 6235 v.AddArg(v0) 6236 v5 := b.NewValue0(v.Pos, OpRsh32x64, t) 6237 v6 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 6238 v6.AddArg(x) 6239 v5.AddArg(v6) 6240 v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6241 v7.AuxInt = 31 6242 v5.AddArg(v7) 6243 v.AddArg(v5) 6244 return true 6245 } 6246 return false 6247 } 6248 func rewriteValuegeneric_OpDiv16u_0(v *Value) bool { 6249 b := v.Block 6250 _ = b 6251 config := b.Func.Config 6252 _ = config 6253 typ := &b.Func.Config.Types 6254 _ = typ 6255 // match: (Div16u (Const16 [c]) (Const16 [d])) 6256 // cond: d != 0 6257 // result: (Const16 [int64(int16(uint16(c)/uint16(d)))]) 6258 for { 6259 _ = v.Args[1] 6260 v_0 := v.Args[0] 6261 if v_0.Op != OpConst16 { 6262 break 6263 } 6264 c := v_0.AuxInt 6265 v_1 := v.Args[1] 6266 if v_1.Op != OpConst16 { 6267 break 6268 } 6269 d := v_1.AuxInt 6270 if !(d != 0) { 6271 break 6272 } 6273 v.reset(OpConst16) 6274 v.AuxInt = int64(int16(uint16(c) / uint16(d))) 6275 return true 6276 } 6277 // match: (Div16u n (Const16 [c])) 6278 // cond: isPowerOfTwo(c&0xffff) 6279 // result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log2(c&0xffff)])) 6280 for { 6281 _ = v.Args[1] 6282 n := v.Args[0] 6283 v_1 := v.Args[1] 6284 if v_1.Op != OpConst16 { 6285 break 6286 } 6287 c := v_1.AuxInt 6288 if !(isPowerOfTwo(c & 0xffff)) { 6289 break 6290 } 6291 v.reset(OpRsh16Ux64) 6292 v.AddArg(n) 6293 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6294 v0.AuxInt = log2(c & 0xffff) 6295 v.AddArg(v0) 6296 return true 6297 } 6298 // match: (Div16u x (Const16 [c])) 6299 // cond: umagicOK(16, c) && config.RegSize == 8 6300 // result: (Trunc64to16 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<16+umagic(16,c).m)]) (ZeroExt16to64 x)) (Const64 <typ.UInt64> [16+umagic(16,c).s]))) 6301 for { 6302 _ = v.Args[1] 6303 x := v.Args[0] 6304 v_1 := v.Args[1] 6305 if v_1.Op != OpConst16 { 6306 break 6307 } 6308 c := v_1.AuxInt 6309 if !(umagicOK(16, c) && config.RegSize == 8) { 6310 break 6311 } 6312 v.reset(OpTrunc64to16) 6313 v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64) 6314 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64) 6315 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6316 v2.AuxInt = int64(1<<16 + umagic(16, c).m) 6317 v1.AddArg(v2) 6318 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6319 v3.AddArg(x) 6320 v1.AddArg(v3) 6321 v0.AddArg(v1) 6322 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6323 v4.AuxInt = 16 + umagic(16, c).s 6324 v0.AddArg(v4) 6325 v.AddArg(v0) 6326 return true 6327 } 6328 // match: (Div16u x (Const16 [c])) 6329 // cond: umagicOK(16, c) && config.RegSize == 4 && umagic(16,c).m&1 == 0 6330 // result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(1<<15+umagic(16,c).m/2)]) (ZeroExt16to32 x)) (Const64 <typ.UInt64> [16+umagic(16,c).s-1]))) 6331 for { 6332 _ = v.Args[1] 6333 x := v.Args[0] 6334 v_1 := v.Args[1] 6335 if v_1.Op != OpConst16 { 6336 break 6337 } 6338 c := v_1.AuxInt 6339 if !(umagicOK(16, c) && config.RegSize == 4 && umagic(16, c).m&1 == 0) { 6340 break 6341 } 6342 v.reset(OpTrunc32to16) 6343 v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32) 6344 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32) 6345 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 6346 v2.AuxInt = int64(1<<15 + umagic(16, c).m/2) 6347 v1.AddArg(v2) 6348 v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 6349 v3.AddArg(x) 6350 v1.AddArg(v3) 6351 v0.AddArg(v1) 6352 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6353 v4.AuxInt = 16 + umagic(16, c).s - 1 6354 v0.AddArg(v4) 6355 v.AddArg(v0) 6356 return true 6357 } 6358 // match: (Div16u x (Const16 [c])) 6359 // cond: umagicOK(16, c) && config.RegSize == 4 && c&1 == 0 6360 // result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(1<<15+(umagic(16,c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [16+umagic(16,c).s-2]))) 6361 for { 6362 _ = v.Args[1] 6363 x := v.Args[0] 6364 v_1 := v.Args[1] 6365 if v_1.Op != OpConst16 { 6366 break 6367 } 6368 c := v_1.AuxInt 6369 if !(umagicOK(16, c) && config.RegSize == 4 && c&1 == 0) { 6370 break 6371 } 6372 v.reset(OpTrunc32to16) 6373 v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32) 6374 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32) 6375 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 6376 v2.AuxInt = int64(1<<15 + (umagic(16, c).m+1)/2) 6377 v1.AddArg(v2) 6378 v3 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32) 6379 v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 6380 v4.AddArg(x) 6381 v3.AddArg(v4) 6382 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6383 v5.AuxInt = 1 6384 v3.AddArg(v5) 6385 v1.AddArg(v3) 6386 v0.AddArg(v1) 6387 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6388 v6.AuxInt = 16 + umagic(16, c).s - 2 6389 v0.AddArg(v6) 6390 v.AddArg(v0) 6391 return true 6392 } 6393 // match: (Div16u x (Const16 [c])) 6394 // cond: umagicOK(16, c) && config.RegSize == 4 6395 // result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Avg32u (Lsh32x64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [16])) (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(umagic(16,c).m)]) (ZeroExt16to32 x))) (Const64 <typ.UInt64> [16+umagic(16,c).s-1]))) 6396 for { 6397 _ = v.Args[1] 6398 x := v.Args[0] 6399 v_1 := v.Args[1] 6400 if v_1.Op != OpConst16 { 6401 break 6402 } 6403 c := v_1.AuxInt 6404 if !(umagicOK(16, c) && config.RegSize == 4) { 6405 break 6406 } 6407 v.reset(OpTrunc32to16) 6408 v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32) 6409 v1 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32) 6410 v2 := b.NewValue0(v.Pos, OpLsh32x64, typ.UInt32) 6411 v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 6412 v3.AddArg(x) 6413 v2.AddArg(v3) 6414 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6415 v4.AuxInt = 16 6416 v2.AddArg(v4) 6417 v1.AddArg(v2) 6418 v5 := b.NewValue0(v.Pos, OpMul32, typ.UInt32) 6419 v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 6420 v6.AuxInt = int64(umagic(16, c).m) 6421 v5.AddArg(v6) 6422 v7 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 6423 v7.AddArg(x) 6424 v5.AddArg(v7) 6425 v1.AddArg(v5) 6426 v0.AddArg(v1) 6427 v8 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6428 v8.AuxInt = 16 + umagic(16, c).s - 1 6429 v0.AddArg(v8) 6430 v.AddArg(v0) 6431 return true 6432 } 6433 return false 6434 } 6435 func rewriteValuegeneric_OpDiv32_0(v *Value) bool { 6436 b := v.Block 6437 _ = b 6438 config := b.Func.Config 6439 _ = config 6440 typ := &b.Func.Config.Types 6441 _ = typ 6442 // match: (Div32 (Const32 [c]) (Const32 [d])) 6443 // cond: d != 0 6444 // result: (Const32 [int64(int32(c)/int32(d))]) 6445 for { 6446 _ = v.Args[1] 6447 v_0 := v.Args[0] 6448 if v_0.Op != OpConst32 { 6449 break 6450 } 6451 c := v_0.AuxInt 6452 v_1 := v.Args[1] 6453 if v_1.Op != OpConst32 { 6454 break 6455 } 6456 d := v_1.AuxInt 6457 if !(d != 0) { 6458 break 6459 } 6460 v.reset(OpConst32) 6461 v.AuxInt = int64(int32(c) / int32(d)) 6462 return true 6463 } 6464 // match: (Div32 <t> n (Const32 [c])) 6465 // cond: c < 0 && c != -1<<31 6466 // result: (Neg32 (Div32 <t> n (Const32 <t> [-c]))) 6467 for { 6468 t := v.Type 6469 _ = v.Args[1] 6470 n := v.Args[0] 6471 v_1 := v.Args[1] 6472 if v_1.Op != OpConst32 { 6473 break 6474 } 6475 c := v_1.AuxInt 6476 if !(c < 0 && c != -1<<31) { 6477 break 6478 } 6479 v.reset(OpNeg32) 6480 v0 := b.NewValue0(v.Pos, OpDiv32, t) 6481 v0.AddArg(n) 6482 v1 := b.NewValue0(v.Pos, OpConst32, t) 6483 v1.AuxInt = -c 6484 v0.AddArg(v1) 6485 v.AddArg(v0) 6486 return true 6487 } 6488 // match: (Div32 <t> x (Const32 [-1<<31])) 6489 // cond: 6490 // result: (Rsh32Ux64 (And32 <t> x (Neg32 <t> x)) (Const64 <typ.UInt64> [31])) 6491 for { 6492 t := v.Type 6493 _ = v.Args[1] 6494 x := v.Args[0] 6495 v_1 := v.Args[1] 6496 if v_1.Op != OpConst32 { 6497 break 6498 } 6499 if v_1.AuxInt != -1<<31 { 6500 break 6501 } 6502 v.reset(OpRsh32Ux64) 6503 v0 := b.NewValue0(v.Pos, OpAnd32, t) 6504 v0.AddArg(x) 6505 v1 := b.NewValue0(v.Pos, OpNeg32, t) 6506 v1.AddArg(x) 6507 v0.AddArg(v1) 6508 v.AddArg(v0) 6509 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6510 v2.AuxInt = 31 6511 v.AddArg(v2) 6512 return true 6513 } 6514 // match: (Div32 <t> n (Const32 [c])) 6515 // cond: isPowerOfTwo(c) 6516 // result: (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [32-log2(c)]))) (Const64 <typ.UInt64> [log2(c)])) 6517 for { 6518 t := v.Type 6519 _ = v.Args[1] 6520 n := v.Args[0] 6521 v_1 := v.Args[1] 6522 if v_1.Op != OpConst32 { 6523 break 6524 } 6525 c := v_1.AuxInt 6526 if !(isPowerOfTwo(c)) { 6527 break 6528 } 6529 v.reset(OpRsh32x64) 6530 v0 := b.NewValue0(v.Pos, OpAdd32, t) 6531 v0.AddArg(n) 6532 v1 := b.NewValue0(v.Pos, OpRsh32Ux64, t) 6533 v2 := b.NewValue0(v.Pos, OpRsh32x64, t) 6534 v2.AddArg(n) 6535 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6536 v3.AuxInt = 31 6537 v2.AddArg(v3) 6538 v1.AddArg(v2) 6539 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6540 v4.AuxInt = 32 - log2(c) 6541 v1.AddArg(v4) 6542 v0.AddArg(v1) 6543 v.AddArg(v0) 6544 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6545 v5.AuxInt = log2(c) 6546 v.AddArg(v5) 6547 return true 6548 } 6549 // match: (Div32 <t> x (Const32 [c])) 6550 // cond: smagicOK(32,c) && config.RegSize == 8 6551 // result: (Sub32 <t> (Rsh64x64 <t> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(smagic(32,c).m)]) (SignExt32to64 x)) (Const64 <typ.UInt64> [32+smagic(32,c).s])) (Rsh64x64 <t> (SignExt32to64 x) (Const64 <typ.UInt64> [63]))) 6552 for { 6553 t := v.Type 6554 _ = v.Args[1] 6555 x := v.Args[0] 6556 v_1 := v.Args[1] 6557 if v_1.Op != OpConst32 { 6558 break 6559 } 6560 c := v_1.AuxInt 6561 if !(smagicOK(32, c) && config.RegSize == 8) { 6562 break 6563 } 6564 v.reset(OpSub32) 6565 v.Type = t 6566 v0 := b.NewValue0(v.Pos, OpRsh64x64, t) 6567 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64) 6568 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6569 v2.AuxInt = int64(smagic(32, c).m) 6570 v1.AddArg(v2) 6571 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6572 v3.AddArg(x) 6573 v1.AddArg(v3) 6574 v0.AddArg(v1) 6575 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6576 v4.AuxInt = 32 + smagic(32, c).s 6577 v0.AddArg(v4) 6578 v.AddArg(v0) 6579 v5 := b.NewValue0(v.Pos, OpRsh64x64, t) 6580 v6 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6581 v6.AddArg(x) 6582 v5.AddArg(v6) 6583 v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6584 v7.AuxInt = 63 6585 v5.AddArg(v7) 6586 v.AddArg(v5) 6587 return true 6588 } 6589 // match: (Div32 <t> x (Const32 [c])) 6590 // cond: smagicOK(32,c) && config.RegSize == 4 && smagic(32,c).m&1 == 0 6591 // result: (Sub32 <t> (Rsh32x64 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int64(int32(smagic(32,c).m/2))]) x) (Const64 <typ.UInt64> [smagic(32,c).s-1])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31]))) 6592 for { 6593 t := v.Type 6594 _ = v.Args[1] 6595 x := v.Args[0] 6596 v_1 := v.Args[1] 6597 if v_1.Op != OpConst32 { 6598 break 6599 } 6600 c := v_1.AuxInt 6601 if !(smagicOK(32, c) && config.RegSize == 4 && smagic(32, c).m&1 == 0) { 6602 break 6603 } 6604 v.reset(OpSub32) 6605 v.Type = t 6606 v0 := b.NewValue0(v.Pos, OpRsh32x64, t) 6607 v1 := b.NewValue0(v.Pos, OpHmul32, t) 6608 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 6609 v2.AuxInt = int64(int32(smagic(32, c).m / 2)) 6610 v1.AddArg(v2) 6611 v1.AddArg(x) 6612 v0.AddArg(v1) 6613 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6614 v3.AuxInt = smagic(32, c).s - 1 6615 v0.AddArg(v3) 6616 v.AddArg(v0) 6617 v4 := b.NewValue0(v.Pos, OpRsh32x64, t) 6618 v4.AddArg(x) 6619 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6620 v5.AuxInt = 31 6621 v4.AddArg(v5) 6622 v.AddArg(v4) 6623 return true 6624 } 6625 // match: (Div32 <t> x (Const32 [c])) 6626 // cond: smagicOK(32,c) && config.RegSize == 4 && smagic(32,c).m&1 != 0 6627 // result: (Sub32 <t> (Rsh32x64 <t> (Add32 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int64(int32(smagic(32,c).m))]) x) x) (Const64 <typ.UInt64> [smagic(32,c).s])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31]))) 6628 for { 6629 t := v.Type 6630 _ = v.Args[1] 6631 x := v.Args[0] 6632 v_1 := v.Args[1] 6633 if v_1.Op != OpConst32 { 6634 break 6635 } 6636 c := v_1.AuxInt 6637 if !(smagicOK(32, c) && config.RegSize == 4 && smagic(32, c).m&1 != 0) { 6638 break 6639 } 6640 v.reset(OpSub32) 6641 v.Type = t 6642 v0 := b.NewValue0(v.Pos, OpRsh32x64, t) 6643 v1 := b.NewValue0(v.Pos, OpAdd32, t) 6644 v2 := b.NewValue0(v.Pos, OpHmul32, t) 6645 v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 6646 v3.AuxInt = int64(int32(smagic(32, c).m)) 6647 v2.AddArg(v3) 6648 v2.AddArg(x) 6649 v1.AddArg(v2) 6650 v1.AddArg(x) 6651 v0.AddArg(v1) 6652 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6653 v4.AuxInt = smagic(32, c).s 6654 v0.AddArg(v4) 6655 v.AddArg(v0) 6656 v5 := b.NewValue0(v.Pos, OpRsh32x64, t) 6657 v5.AddArg(x) 6658 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6659 v6.AuxInt = 31 6660 v5.AddArg(v6) 6661 v.AddArg(v5) 6662 return true 6663 } 6664 return false 6665 } 6666 func rewriteValuegeneric_OpDiv32F_0(v *Value) bool { 6667 b := v.Block 6668 _ = b 6669 // match: (Div32F (Const32F [c]) (Const32F [d])) 6670 // cond: 6671 // result: (Const32F [f2i(float64(i2f32(c) / i2f32(d)))]) 6672 for { 6673 _ = v.Args[1] 6674 v_0 := v.Args[0] 6675 if v_0.Op != OpConst32F { 6676 break 6677 } 6678 c := v_0.AuxInt 6679 v_1 := v.Args[1] 6680 if v_1.Op != OpConst32F { 6681 break 6682 } 6683 d := v_1.AuxInt 6684 v.reset(OpConst32F) 6685 v.AuxInt = f2i(float64(i2f32(c) / i2f32(d))) 6686 return true 6687 } 6688 // match: (Div32F x (Const32F <t> [c])) 6689 // cond: reciprocalExact32(float32(i2f(c))) 6690 // result: (Mul32F x (Const32F <t> [f2i(1/i2f(c))])) 6691 for { 6692 _ = v.Args[1] 6693 x := v.Args[0] 6694 v_1 := v.Args[1] 6695 if v_1.Op != OpConst32F { 6696 break 6697 } 6698 t := v_1.Type 6699 c := v_1.AuxInt 6700 if !(reciprocalExact32(float32(i2f(c)))) { 6701 break 6702 } 6703 v.reset(OpMul32F) 6704 v.AddArg(x) 6705 v0 := b.NewValue0(v.Pos, OpConst32F, t) 6706 v0.AuxInt = f2i(1 / i2f(c)) 6707 v.AddArg(v0) 6708 return true 6709 } 6710 return false 6711 } 6712 func rewriteValuegeneric_OpDiv32u_0(v *Value) bool { 6713 b := v.Block 6714 _ = b 6715 config := b.Func.Config 6716 _ = config 6717 typ := &b.Func.Config.Types 6718 _ = typ 6719 // match: (Div32u (Const32 [c]) (Const32 [d])) 6720 // cond: d != 0 6721 // result: (Const32 [int64(int32(uint32(c)/uint32(d)))]) 6722 for { 6723 _ = v.Args[1] 6724 v_0 := v.Args[0] 6725 if v_0.Op != OpConst32 { 6726 break 6727 } 6728 c := v_0.AuxInt 6729 v_1 := v.Args[1] 6730 if v_1.Op != OpConst32 { 6731 break 6732 } 6733 d := v_1.AuxInt 6734 if !(d != 0) { 6735 break 6736 } 6737 v.reset(OpConst32) 6738 v.AuxInt = int64(int32(uint32(c) / uint32(d))) 6739 return true 6740 } 6741 // match: (Div32u n (Const32 [c])) 6742 // cond: isPowerOfTwo(c&0xffffffff) 6743 // result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log2(c&0xffffffff)])) 6744 for { 6745 _ = v.Args[1] 6746 n := v.Args[0] 6747 v_1 := v.Args[1] 6748 if v_1.Op != OpConst32 { 6749 break 6750 } 6751 c := v_1.AuxInt 6752 if !(isPowerOfTwo(c & 0xffffffff)) { 6753 break 6754 } 6755 v.reset(OpRsh32Ux64) 6756 v.AddArg(n) 6757 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6758 v0.AuxInt = log2(c & 0xffffffff) 6759 v.AddArg(v0) 6760 return true 6761 } 6762 // match: (Div32u x (Const32 [c])) 6763 // cond: umagicOK(32, c) && config.RegSize == 4 && umagic(32,c).m&1 == 0 6764 // result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(1<<31+umagic(32,c).m/2))]) x) (Const64 <typ.UInt64> [umagic(32,c).s-1])) 6765 for { 6766 _ = v.Args[1] 6767 x := v.Args[0] 6768 v_1 := v.Args[1] 6769 if v_1.Op != OpConst32 { 6770 break 6771 } 6772 c := v_1.AuxInt 6773 if !(umagicOK(32, c) && config.RegSize == 4 && umagic(32, c).m&1 == 0) { 6774 break 6775 } 6776 v.reset(OpRsh32Ux64) 6777 v.Type = typ.UInt32 6778 v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32) 6779 v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 6780 v1.AuxInt = int64(int32(1<<31 + umagic(32, c).m/2)) 6781 v0.AddArg(v1) 6782 v0.AddArg(x) 6783 v.AddArg(v0) 6784 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6785 v2.AuxInt = umagic(32, c).s - 1 6786 v.AddArg(v2) 6787 return true 6788 } 6789 // match: (Div32u x (Const32 [c])) 6790 // cond: umagicOK(32, c) && config.RegSize == 4 && c&1 == 0 6791 // result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(1<<31+(umagic(32,c).m+1)/2))]) (Rsh32Ux64 <typ.UInt32> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic(32,c).s-2])) 6792 for { 6793 _ = v.Args[1] 6794 x := v.Args[0] 6795 v_1 := v.Args[1] 6796 if v_1.Op != OpConst32 { 6797 break 6798 } 6799 c := v_1.AuxInt 6800 if !(umagicOK(32, c) && config.RegSize == 4 && c&1 == 0) { 6801 break 6802 } 6803 v.reset(OpRsh32Ux64) 6804 v.Type = typ.UInt32 6805 v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32) 6806 v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 6807 v1.AuxInt = int64(int32(1<<31 + (umagic(32, c).m+1)/2)) 6808 v0.AddArg(v1) 6809 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32) 6810 v2.AddArg(x) 6811 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6812 v3.AuxInt = 1 6813 v2.AddArg(v3) 6814 v0.AddArg(v2) 6815 v.AddArg(v0) 6816 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6817 v4.AuxInt = umagic(32, c).s - 2 6818 v.AddArg(v4) 6819 return true 6820 } 6821 // match: (Div32u x (Const32 [c])) 6822 // cond: umagicOK(32, c) && config.RegSize == 4 6823 // result: (Rsh32Ux64 <typ.UInt32> (Avg32u x (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(umagic(32,c).m))]) x)) (Const64 <typ.UInt64> [umagic(32,c).s-1])) 6824 for { 6825 _ = v.Args[1] 6826 x := v.Args[0] 6827 v_1 := v.Args[1] 6828 if v_1.Op != OpConst32 { 6829 break 6830 } 6831 c := v_1.AuxInt 6832 if !(umagicOK(32, c) && config.RegSize == 4) { 6833 break 6834 } 6835 v.reset(OpRsh32Ux64) 6836 v.Type = typ.UInt32 6837 v0 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32) 6838 v0.AddArg(x) 6839 v1 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32) 6840 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 6841 v2.AuxInt = int64(int32(umagic(32, c).m)) 6842 v1.AddArg(v2) 6843 v1.AddArg(x) 6844 v0.AddArg(v1) 6845 v.AddArg(v0) 6846 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6847 v3.AuxInt = umagic(32, c).s - 1 6848 v.AddArg(v3) 6849 return true 6850 } 6851 // match: (Div32u x (Const32 [c])) 6852 // cond: umagicOK(32, c) && config.RegSize == 8 && umagic(32,c).m&1 == 0 6853 // result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+umagic(32,c).m/2)]) (ZeroExt32to64 x)) (Const64 <typ.UInt64> [32+umagic(32,c).s-1]))) 6854 for { 6855 _ = v.Args[1] 6856 x := v.Args[0] 6857 v_1 := v.Args[1] 6858 if v_1.Op != OpConst32 { 6859 break 6860 } 6861 c := v_1.AuxInt 6862 if !(umagicOK(32, c) && config.RegSize == 8 && umagic(32, c).m&1 == 0) { 6863 break 6864 } 6865 v.reset(OpTrunc64to32) 6866 v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64) 6867 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64) 6868 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6869 v2.AuxInt = int64(1<<31 + umagic(32, c).m/2) 6870 v1.AddArg(v2) 6871 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6872 v3.AddArg(x) 6873 v1.AddArg(v3) 6874 v0.AddArg(v1) 6875 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6876 v4.AuxInt = 32 + umagic(32, c).s - 1 6877 v0.AddArg(v4) 6878 v.AddArg(v0) 6879 return true 6880 } 6881 // match: (Div32u x (Const32 [c])) 6882 // cond: umagicOK(32, c) && config.RegSize == 8 && c&1 == 0 6883 // result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+(umagic(32,c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [32+umagic(32,c).s-2]))) 6884 for { 6885 _ = v.Args[1] 6886 x := v.Args[0] 6887 v_1 := v.Args[1] 6888 if v_1.Op != OpConst32 { 6889 break 6890 } 6891 c := v_1.AuxInt 6892 if !(umagicOK(32, c) && config.RegSize == 8 && c&1 == 0) { 6893 break 6894 } 6895 v.reset(OpTrunc64to32) 6896 v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64) 6897 v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64) 6898 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6899 v2.AuxInt = int64(1<<31 + (umagic(32, c).m+1)/2) 6900 v1.AddArg(v2) 6901 v3 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64) 6902 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6903 v4.AddArg(x) 6904 v3.AddArg(v4) 6905 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6906 v5.AuxInt = 1 6907 v3.AddArg(v5) 6908 v1.AddArg(v3) 6909 v0.AddArg(v1) 6910 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6911 v6.AuxInt = 32 + umagic(32, c).s - 2 6912 v0.AddArg(v6) 6913 v.AddArg(v0) 6914 return true 6915 } 6916 // match: (Div32u x (Const32 [c])) 6917 // cond: umagicOK(32, c) && config.RegSize == 8 6918 // result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Avg64u (Lsh64x64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [32])) (Mul64 <typ.UInt64> (Const64 <typ.UInt32> [int64(umagic(32,c).m)]) (ZeroExt32to64 x))) (Const64 <typ.UInt64> [32+umagic(32,c).s-1]))) 6919 for { 6920 _ = v.Args[1] 6921 x := v.Args[0] 6922 v_1 := v.Args[1] 6923 if v_1.Op != OpConst32 { 6924 break 6925 } 6926 c := v_1.AuxInt 6927 if !(umagicOK(32, c) && config.RegSize == 8) { 6928 break 6929 } 6930 v.reset(OpTrunc64to32) 6931 v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64) 6932 v1 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64) 6933 v2 := b.NewValue0(v.Pos, OpLsh64x64, typ.UInt64) 6934 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6935 v3.AddArg(x) 6936 v2.AddArg(v3) 6937 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6938 v4.AuxInt = 32 6939 v2.AddArg(v4) 6940 v1.AddArg(v2) 6941 v5 := b.NewValue0(v.Pos, OpMul64, typ.UInt64) 6942 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt32) 6943 v6.AuxInt = int64(umagic(32, c).m) 6944 v5.AddArg(v6) 6945 v7 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6946 v7.AddArg(x) 6947 v5.AddArg(v7) 6948 v1.AddArg(v5) 6949 v0.AddArg(v1) 6950 v8 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 6951 v8.AuxInt = 32 + umagic(32, c).s - 1 6952 v0.AddArg(v8) 6953 v.AddArg(v0) 6954 return true 6955 } 6956 return false 6957 } 6958 func rewriteValuegeneric_OpDiv64_0(v *Value) bool { 6959 b := v.Block 6960 _ = b 6961 typ := &b.Func.Config.Types 6962 _ = typ 6963 // match: (Div64 (Const64 [c]) (Const64 [d])) 6964 // cond: d != 0 6965 // result: (Const64 [c/d]) 6966 for { 6967 _ = v.Args[1] 6968 v_0 := v.Args[0] 6969 if v_0.Op != OpConst64 { 6970 break 6971 } 6972 c := v_0.AuxInt 6973 v_1 := v.Args[1] 6974 if v_1.Op != OpConst64 { 6975 break 6976 } 6977 d := v_1.AuxInt 6978 if !(d != 0) { 6979 break 6980 } 6981 v.reset(OpConst64) 6982 v.AuxInt = c / d 6983 return true 6984 } 6985 // match: (Div64 <t> n (Const64 [c])) 6986 // cond: c < 0 && c != -1<<63 6987 // result: (Neg64 (Div64 <t> n (Const64 <t> [-c]))) 6988 for { 6989 t := v.Type 6990 _ = v.Args[1] 6991 n := v.Args[0] 6992 v_1 := v.Args[1] 6993 if v_1.Op != OpConst64 { 6994 break 6995 } 6996 c := v_1.AuxInt 6997 if !(c < 0 && c != -1<<63) { 6998 break 6999 } 7000 v.reset(OpNeg64) 7001 v0 := b.NewValue0(v.Pos, OpDiv64, t) 7002 v0.AddArg(n) 7003 v1 := b.NewValue0(v.Pos, OpConst64, t) 7004 v1.AuxInt = -c 7005 v0.AddArg(v1) 7006 v.AddArg(v0) 7007 return true 7008 } 7009 // match: (Div64 <t> x (Const64 [-1<<63])) 7010 // cond: 7011 // result: (Rsh64Ux64 (And64 <t> x (Neg64 <t> x)) (Const64 <typ.UInt64> [63])) 7012 for { 7013 t := v.Type 7014 _ = v.Args[1] 7015 x := v.Args[0] 7016 v_1 := v.Args[1] 7017 if v_1.Op != OpConst64 { 7018 break 7019 } 7020 if v_1.AuxInt != -1<<63 { 7021 break 7022 } 7023 v.reset(OpRsh64Ux64) 7024 v0 := b.NewValue0(v.Pos, OpAnd64, t) 7025 v0.AddArg(x) 7026 v1 := b.NewValue0(v.Pos, OpNeg64, t) 7027 v1.AddArg(x) 7028 v0.AddArg(v1) 7029 v.AddArg(v0) 7030 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7031 v2.AuxInt = 63 7032 v.AddArg(v2) 7033 return true 7034 } 7035 // match: (Div64 <t> n (Const64 [c])) 7036 // cond: isPowerOfTwo(c) 7037 // result: (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [64-log2(c)]))) (Const64 <typ.UInt64> [log2(c)])) 7038 for { 7039 t := v.Type 7040 _ = v.Args[1] 7041 n := v.Args[0] 7042 v_1 := v.Args[1] 7043 if v_1.Op != OpConst64 { 7044 break 7045 } 7046 c := v_1.AuxInt 7047 if !(isPowerOfTwo(c)) { 7048 break 7049 } 7050 v.reset(OpRsh64x64) 7051 v0 := b.NewValue0(v.Pos, OpAdd64, t) 7052 v0.AddArg(n) 7053 v1 := b.NewValue0(v.Pos, OpRsh64Ux64, t) 7054 v2 := b.NewValue0(v.Pos, OpRsh64x64, t) 7055 v2.AddArg(n) 7056 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7057 v3.AuxInt = 63 7058 v2.AddArg(v3) 7059 v1.AddArg(v2) 7060 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7061 v4.AuxInt = 64 - log2(c) 7062 v1.AddArg(v4) 7063 v0.AddArg(v1) 7064 v.AddArg(v0) 7065 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7066 v5.AuxInt = log2(c) 7067 v.AddArg(v5) 7068 return true 7069 } 7070 // match: (Div64 <t> x (Const64 [c])) 7071 // cond: smagicOK(64,c) && smagic(64,c).m&1 == 0 7072 // result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic(64,c).m/2)]) x) (Const64 <typ.UInt64> [smagic(64,c).s-1])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63]))) 7073 for { 7074 t := v.Type 7075 _ = v.Args[1] 7076 x := v.Args[0] 7077 v_1 := v.Args[1] 7078 if v_1.Op != OpConst64 { 7079 break 7080 } 7081 c := v_1.AuxInt 7082 if !(smagicOK(64, c) && smagic(64, c).m&1 == 0) { 7083 break 7084 } 7085 v.reset(OpSub64) 7086 v.Type = t 7087 v0 := b.NewValue0(v.Pos, OpRsh64x64, t) 7088 v1 := b.NewValue0(v.Pos, OpHmul64, t) 7089 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7090 v2.AuxInt = int64(smagic(64, c).m / 2) 7091 v1.AddArg(v2) 7092 v1.AddArg(x) 7093 v0.AddArg(v1) 7094 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7095 v3.AuxInt = smagic(64, c).s - 1 7096 v0.AddArg(v3) 7097 v.AddArg(v0) 7098 v4 := b.NewValue0(v.Pos, OpRsh64x64, t) 7099 v4.AddArg(x) 7100 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7101 v5.AuxInt = 63 7102 v4.AddArg(v5) 7103 v.AddArg(v4) 7104 return true 7105 } 7106 // match: (Div64 <t> x (Const64 [c])) 7107 // cond: smagicOK(64,c) && smagic(64,c).m&1 != 0 7108 // result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic(64,c).m)]) x) x) (Const64 <typ.UInt64> [smagic(64,c).s])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63]))) 7109 for { 7110 t := v.Type 7111 _ = v.Args[1] 7112 x := v.Args[0] 7113 v_1 := v.Args[1] 7114 if v_1.Op != OpConst64 { 7115 break 7116 } 7117 c := v_1.AuxInt 7118 if !(smagicOK(64, c) && smagic(64, c).m&1 != 0) { 7119 break 7120 } 7121 v.reset(OpSub64) 7122 v.Type = t 7123 v0 := b.NewValue0(v.Pos, OpRsh64x64, t) 7124 v1 := b.NewValue0(v.Pos, OpAdd64, t) 7125 v2 := b.NewValue0(v.Pos, OpHmul64, t) 7126 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7127 v3.AuxInt = int64(smagic(64, c).m) 7128 v2.AddArg(v3) 7129 v2.AddArg(x) 7130 v1.AddArg(v2) 7131 v1.AddArg(x) 7132 v0.AddArg(v1) 7133 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7134 v4.AuxInt = smagic(64, c).s 7135 v0.AddArg(v4) 7136 v.AddArg(v0) 7137 v5 := b.NewValue0(v.Pos, OpRsh64x64, t) 7138 v5.AddArg(x) 7139 v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7140 v6.AuxInt = 63 7141 v5.AddArg(v6) 7142 v.AddArg(v5) 7143 return true 7144 } 7145 return false 7146 } 7147 func rewriteValuegeneric_OpDiv64F_0(v *Value) bool { 7148 b := v.Block 7149 _ = b 7150 // match: (Div64F (Const64F [c]) (Const64F [d])) 7151 // cond: 7152 // result: (Const64F [f2i(i2f(c) / i2f(d))]) 7153 for { 7154 _ = v.Args[1] 7155 v_0 := v.Args[0] 7156 if v_0.Op != OpConst64F { 7157 break 7158 } 7159 c := v_0.AuxInt 7160 v_1 := v.Args[1] 7161 if v_1.Op != OpConst64F { 7162 break 7163 } 7164 d := v_1.AuxInt 7165 v.reset(OpConst64F) 7166 v.AuxInt = f2i(i2f(c) / i2f(d)) 7167 return true 7168 } 7169 // match: (Div64F x (Const64F <t> [c])) 7170 // cond: reciprocalExact64(i2f(c)) 7171 // result: (Mul64F x (Const64F <t> [f2i(1/i2f(c))])) 7172 for { 7173 _ = v.Args[1] 7174 x := v.Args[0] 7175 v_1 := v.Args[1] 7176 if v_1.Op != OpConst64F { 7177 break 7178 } 7179 t := v_1.Type 7180 c := v_1.AuxInt 7181 if !(reciprocalExact64(i2f(c))) { 7182 break 7183 } 7184 v.reset(OpMul64F) 7185 v.AddArg(x) 7186 v0 := b.NewValue0(v.Pos, OpConst64F, t) 7187 v0.AuxInt = f2i(1 / i2f(c)) 7188 v.AddArg(v0) 7189 return true 7190 } 7191 return false 7192 } 7193 func rewriteValuegeneric_OpDiv64u_0(v *Value) bool { 7194 b := v.Block 7195 _ = b 7196 config := b.Func.Config 7197 _ = config 7198 typ := &b.Func.Config.Types 7199 _ = typ 7200 // match: (Div64u (Const64 [c]) (Const64 [d])) 7201 // cond: d != 0 7202 // result: (Const64 [int64(uint64(c)/uint64(d))]) 7203 for { 7204 _ = v.Args[1] 7205 v_0 := v.Args[0] 7206 if v_0.Op != OpConst64 { 7207 break 7208 } 7209 c := v_0.AuxInt 7210 v_1 := v.Args[1] 7211 if v_1.Op != OpConst64 { 7212 break 7213 } 7214 d := v_1.AuxInt 7215 if !(d != 0) { 7216 break 7217 } 7218 v.reset(OpConst64) 7219 v.AuxInt = int64(uint64(c) / uint64(d)) 7220 return true 7221 } 7222 // match: (Div64u n (Const64 [c])) 7223 // cond: isPowerOfTwo(c) 7224 // result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log2(c)])) 7225 for { 7226 _ = v.Args[1] 7227 n := v.Args[0] 7228 v_1 := v.Args[1] 7229 if v_1.Op != OpConst64 { 7230 break 7231 } 7232 c := v_1.AuxInt 7233 if !(isPowerOfTwo(c)) { 7234 break 7235 } 7236 v.reset(OpRsh64Ux64) 7237 v.AddArg(n) 7238 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7239 v0.AuxInt = log2(c) 7240 v.AddArg(v0) 7241 return true 7242 } 7243 // match: (Div64u x (Const64 [c])) 7244 // cond: umagicOK(64, c) && config.RegSize == 8 && umagic(64,c).m&1 == 0 7245 // result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+umagic(64,c).m/2)]) x) (Const64 <typ.UInt64> [umagic(64,c).s-1])) 7246 for { 7247 _ = v.Args[1] 7248 x := v.Args[0] 7249 v_1 := v.Args[1] 7250 if v_1.Op != OpConst64 { 7251 break 7252 } 7253 c := v_1.AuxInt 7254 if !(umagicOK(64, c) && config.RegSize == 8 && umagic(64, c).m&1 == 0) { 7255 break 7256 } 7257 v.reset(OpRsh64Ux64) 7258 v.Type = typ.UInt64 7259 v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64) 7260 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7261 v1.AuxInt = int64(1<<63 + umagic(64, c).m/2) 7262 v0.AddArg(v1) 7263 v0.AddArg(x) 7264 v.AddArg(v0) 7265 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7266 v2.AuxInt = umagic(64, c).s - 1 7267 v.AddArg(v2) 7268 return true 7269 } 7270 // match: (Div64u x (Const64 [c])) 7271 // cond: umagicOK(64, c) && config.RegSize == 8 && c&1 == 0 7272 // result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+(umagic(64,c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic(64,c).s-2])) 7273 for { 7274 _ = v.Args[1] 7275 x := v.Args[0] 7276 v_1 := v.Args[1] 7277 if v_1.Op != OpConst64 { 7278 break 7279 } 7280 c := v_1.AuxInt 7281 if !(umagicOK(64, c) && config.RegSize == 8 && c&1 == 0) { 7282 break 7283 } 7284 v.reset(OpRsh64Ux64) 7285 v.Type = typ.UInt64 7286 v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64) 7287 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7288 v1.AuxInt = int64(1<<63 + (umagic(64, c).m+1)/2) 7289 v0.AddArg(v1) 7290 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64) 7291 v2.AddArg(x) 7292 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7293 v3.AuxInt = 1 7294 v2.AddArg(v3) 7295 v0.AddArg(v2) 7296 v.AddArg(v0) 7297 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7298 v4.AuxInt = umagic(64, c).s - 2 7299 v.AddArg(v4) 7300 return true 7301 } 7302 // match: (Div64u x (Const64 [c])) 7303 // cond: umagicOK(64, c) && config.RegSize == 8 7304 // result: (Rsh64Ux64 <typ.UInt64> (Avg64u x (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(umagic(64,c).m)]) x)) (Const64 <typ.UInt64> [umagic(64,c).s-1])) 7305 for { 7306 _ = v.Args[1] 7307 x := v.Args[0] 7308 v_1 := v.Args[1] 7309 if v_1.Op != OpConst64 { 7310 break 7311 } 7312 c := v_1.AuxInt 7313 if !(umagicOK(64, c) && config.RegSize == 8) { 7314 break 7315 } 7316 v.reset(OpRsh64Ux64) 7317 v.Type = typ.UInt64 7318 v0 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64) 7319 v0.AddArg(x) 7320 v1 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64) 7321 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7322 v2.AuxInt = int64(umagic(64, c).m) 7323 v1.AddArg(v2) 7324 v1.AddArg(x) 7325 v0.AddArg(v1) 7326 v.AddArg(v0) 7327 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7328 v3.AuxInt = umagic(64, c).s - 1 7329 v.AddArg(v3) 7330 return true 7331 } 7332 return false 7333 } 7334 func rewriteValuegeneric_OpDiv8_0(v *Value) bool { 7335 b := v.Block 7336 _ = b 7337 typ := &b.Func.Config.Types 7338 _ = typ 7339 // match: (Div8 (Const8 [c]) (Const8 [d])) 7340 // cond: d != 0 7341 // result: (Const8 [int64(int8(c)/int8(d))]) 7342 for { 7343 _ = v.Args[1] 7344 v_0 := v.Args[0] 7345 if v_0.Op != OpConst8 { 7346 break 7347 } 7348 c := v_0.AuxInt 7349 v_1 := v.Args[1] 7350 if v_1.Op != OpConst8 { 7351 break 7352 } 7353 d := v_1.AuxInt 7354 if !(d != 0) { 7355 break 7356 } 7357 v.reset(OpConst8) 7358 v.AuxInt = int64(int8(c) / int8(d)) 7359 return true 7360 } 7361 // match: (Div8 <t> n (Const8 [c])) 7362 // cond: c < 0 && c != -1<<7 7363 // result: (Neg8 (Div8 <t> n (Const8 <t> [-c]))) 7364 for { 7365 t := v.Type 7366 _ = v.Args[1] 7367 n := v.Args[0] 7368 v_1 := v.Args[1] 7369 if v_1.Op != OpConst8 { 7370 break 7371 } 7372 c := v_1.AuxInt 7373 if !(c < 0 && c != -1<<7) { 7374 break 7375 } 7376 v.reset(OpNeg8) 7377 v0 := b.NewValue0(v.Pos, OpDiv8, t) 7378 v0.AddArg(n) 7379 v1 := b.NewValue0(v.Pos, OpConst8, t) 7380 v1.AuxInt = -c 7381 v0.AddArg(v1) 7382 v.AddArg(v0) 7383 return true 7384 } 7385 // match: (Div8 <t> x (Const8 [-1<<7 ])) 7386 // cond: 7387 // result: (Rsh8Ux64 (And8 <t> x (Neg8 <t> x)) (Const64 <typ.UInt64> [7 ])) 7388 for { 7389 t := v.Type 7390 _ = v.Args[1] 7391 x := v.Args[0] 7392 v_1 := v.Args[1] 7393 if v_1.Op != OpConst8 { 7394 break 7395 } 7396 if v_1.AuxInt != -1<<7 { 7397 break 7398 } 7399 v.reset(OpRsh8Ux64) 7400 v0 := b.NewValue0(v.Pos, OpAnd8, t) 7401 v0.AddArg(x) 7402 v1 := b.NewValue0(v.Pos, OpNeg8, t) 7403 v1.AddArg(x) 7404 v0.AddArg(v1) 7405 v.AddArg(v0) 7406 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7407 v2.AuxInt = 7 7408 v.AddArg(v2) 7409 return true 7410 } 7411 // match: (Div8 <t> n (Const8 [c])) 7412 // cond: isPowerOfTwo(c) 7413 // result: (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [ 8-log2(c)]))) (Const64 <typ.UInt64> [log2(c)])) 7414 for { 7415 t := v.Type 7416 _ = v.Args[1] 7417 n := v.Args[0] 7418 v_1 := v.Args[1] 7419 if v_1.Op != OpConst8 { 7420 break 7421 } 7422 c := v_1.AuxInt 7423 if !(isPowerOfTwo(c)) { 7424 break 7425 } 7426 v.reset(OpRsh8x64) 7427 v0 := b.NewValue0(v.Pos, OpAdd8, t) 7428 v0.AddArg(n) 7429 v1 := b.NewValue0(v.Pos, OpRsh8Ux64, t) 7430 v2 := b.NewValue0(v.Pos, OpRsh8x64, t) 7431 v2.AddArg(n) 7432 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7433 v3.AuxInt = 7 7434 v2.AddArg(v3) 7435 v1.AddArg(v2) 7436 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7437 v4.AuxInt = 8 - log2(c) 7438 v1.AddArg(v4) 7439 v0.AddArg(v1) 7440 v.AddArg(v0) 7441 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7442 v5.AuxInt = log2(c) 7443 v.AddArg(v5) 7444 return true 7445 } 7446 // match: (Div8 <t> x (Const8 [c])) 7447 // cond: smagicOK(8,c) 7448 // result: (Sub8 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(smagic(8,c).m)]) (SignExt8to32 x)) (Const64 <typ.UInt64> [8+smagic(8,c).s])) (Rsh32x64 <t> (SignExt8to32 x) (Const64 <typ.UInt64> [31]))) 7449 for { 7450 t := v.Type 7451 _ = v.Args[1] 7452 x := v.Args[0] 7453 v_1 := v.Args[1] 7454 if v_1.Op != OpConst8 { 7455 break 7456 } 7457 c := v_1.AuxInt 7458 if !(smagicOK(8, c)) { 7459 break 7460 } 7461 v.reset(OpSub8) 7462 v.Type = t 7463 v0 := b.NewValue0(v.Pos, OpRsh32x64, t) 7464 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32) 7465 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 7466 v2.AuxInt = int64(smagic(8, c).m) 7467 v1.AddArg(v2) 7468 v3 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 7469 v3.AddArg(x) 7470 v1.AddArg(v3) 7471 v0.AddArg(v1) 7472 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7473 v4.AuxInt = 8 + smagic(8, c).s 7474 v0.AddArg(v4) 7475 v.AddArg(v0) 7476 v5 := b.NewValue0(v.Pos, OpRsh32x64, t) 7477 v6 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 7478 v6.AddArg(x) 7479 v5.AddArg(v6) 7480 v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7481 v7.AuxInt = 31 7482 v5.AddArg(v7) 7483 v.AddArg(v5) 7484 return true 7485 } 7486 return false 7487 } 7488 func rewriteValuegeneric_OpDiv8u_0(v *Value) bool { 7489 b := v.Block 7490 _ = b 7491 typ := &b.Func.Config.Types 7492 _ = typ 7493 // match: (Div8u (Const8 [c]) (Const8 [d])) 7494 // cond: d != 0 7495 // result: (Const8 [int64(int8(uint8(c)/uint8(d)))]) 7496 for { 7497 _ = v.Args[1] 7498 v_0 := v.Args[0] 7499 if v_0.Op != OpConst8 { 7500 break 7501 } 7502 c := v_0.AuxInt 7503 v_1 := v.Args[1] 7504 if v_1.Op != OpConst8 { 7505 break 7506 } 7507 d := v_1.AuxInt 7508 if !(d != 0) { 7509 break 7510 } 7511 v.reset(OpConst8) 7512 v.AuxInt = int64(int8(uint8(c) / uint8(d))) 7513 return true 7514 } 7515 // match: (Div8u n (Const8 [c])) 7516 // cond: isPowerOfTwo(c&0xff) 7517 // result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log2(c&0xff)])) 7518 for { 7519 _ = v.Args[1] 7520 n := v.Args[0] 7521 v_1 := v.Args[1] 7522 if v_1.Op != OpConst8 { 7523 break 7524 } 7525 c := v_1.AuxInt 7526 if !(isPowerOfTwo(c & 0xff)) { 7527 break 7528 } 7529 v.reset(OpRsh8Ux64) 7530 v.AddArg(n) 7531 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7532 v0.AuxInt = log2(c & 0xff) 7533 v.AddArg(v0) 7534 return true 7535 } 7536 // match: (Div8u x (Const8 [c])) 7537 // cond: umagicOK(8, c) 7538 // result: (Trunc32to8 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(1<<8+umagic(8,c).m)]) (ZeroExt8to32 x)) (Const64 <typ.UInt64> [8+umagic(8,c).s]))) 7539 for { 7540 _ = v.Args[1] 7541 x := v.Args[0] 7542 v_1 := v.Args[1] 7543 if v_1.Op != OpConst8 { 7544 break 7545 } 7546 c := v_1.AuxInt 7547 if !(umagicOK(8, c)) { 7548 break 7549 } 7550 v.reset(OpTrunc32to8) 7551 v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32) 7552 v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32) 7553 v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) 7554 v2.AuxInt = int64(1<<8 + umagic(8, c).m) 7555 v1.AddArg(v2) 7556 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7557 v3.AddArg(x) 7558 v1.AddArg(v3) 7559 v0.AddArg(v1) 7560 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7561 v4.AuxInt = 8 + umagic(8, c).s 7562 v0.AddArg(v4) 7563 v.AddArg(v0) 7564 return true 7565 } 7566 return false 7567 } 7568 func rewriteValuegeneric_OpEq16_0(v *Value) bool { 7569 b := v.Block 7570 _ = b 7571 // match: (Eq16 x x) 7572 // cond: 7573 // result: (ConstBool [1]) 7574 for { 7575 _ = v.Args[1] 7576 x := v.Args[0] 7577 if x != v.Args[1] { 7578 break 7579 } 7580 v.reset(OpConstBool) 7581 v.AuxInt = 1 7582 return true 7583 } 7584 // match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 7585 // cond: 7586 // result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x) 7587 for { 7588 _ = v.Args[1] 7589 v_0 := v.Args[0] 7590 if v_0.Op != OpConst16 { 7591 break 7592 } 7593 t := v_0.Type 7594 c := v_0.AuxInt 7595 v_1 := v.Args[1] 7596 if v_1.Op != OpAdd16 { 7597 break 7598 } 7599 _ = v_1.Args[1] 7600 v_1_0 := v_1.Args[0] 7601 if v_1_0.Op != OpConst16 { 7602 break 7603 } 7604 if v_1_0.Type != t { 7605 break 7606 } 7607 d := v_1_0.AuxInt 7608 x := v_1.Args[1] 7609 v.reset(OpEq16) 7610 v0 := b.NewValue0(v.Pos, OpConst16, t) 7611 v0.AuxInt = int64(int16(c - d)) 7612 v.AddArg(v0) 7613 v.AddArg(x) 7614 return true 7615 } 7616 // match: (Eq16 (Const16 <t> [c]) (Add16 x (Const16 <t> [d]))) 7617 // cond: 7618 // result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x) 7619 for { 7620 _ = v.Args[1] 7621 v_0 := v.Args[0] 7622 if v_0.Op != OpConst16 { 7623 break 7624 } 7625 t := v_0.Type 7626 c := v_0.AuxInt 7627 v_1 := v.Args[1] 7628 if v_1.Op != OpAdd16 { 7629 break 7630 } 7631 _ = v_1.Args[1] 7632 x := v_1.Args[0] 7633 v_1_1 := v_1.Args[1] 7634 if v_1_1.Op != OpConst16 { 7635 break 7636 } 7637 if v_1_1.Type != t { 7638 break 7639 } 7640 d := v_1_1.AuxInt 7641 v.reset(OpEq16) 7642 v0 := b.NewValue0(v.Pos, OpConst16, t) 7643 v0.AuxInt = int64(int16(c - d)) 7644 v.AddArg(v0) 7645 v.AddArg(x) 7646 return true 7647 } 7648 // match: (Eq16 (Add16 (Const16 <t> [d]) x) (Const16 <t> [c])) 7649 // cond: 7650 // result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x) 7651 for { 7652 _ = v.Args[1] 7653 v_0 := v.Args[0] 7654 if v_0.Op != OpAdd16 { 7655 break 7656 } 7657 _ = v_0.Args[1] 7658 v_0_0 := v_0.Args[0] 7659 if v_0_0.Op != OpConst16 { 7660 break 7661 } 7662 t := v_0_0.Type 7663 d := v_0_0.AuxInt 7664 x := v_0.Args[1] 7665 v_1 := v.Args[1] 7666 if v_1.Op != OpConst16 { 7667 break 7668 } 7669 if v_1.Type != t { 7670 break 7671 } 7672 c := v_1.AuxInt 7673 v.reset(OpEq16) 7674 v0 := b.NewValue0(v.Pos, OpConst16, t) 7675 v0.AuxInt = int64(int16(c - d)) 7676 v.AddArg(v0) 7677 v.AddArg(x) 7678 return true 7679 } 7680 // match: (Eq16 (Add16 x (Const16 <t> [d])) (Const16 <t> [c])) 7681 // cond: 7682 // result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x) 7683 for { 7684 _ = v.Args[1] 7685 v_0 := v.Args[0] 7686 if v_0.Op != OpAdd16 { 7687 break 7688 } 7689 _ = v_0.Args[1] 7690 x := v_0.Args[0] 7691 v_0_1 := v_0.Args[1] 7692 if v_0_1.Op != OpConst16 { 7693 break 7694 } 7695 t := v_0_1.Type 7696 d := v_0_1.AuxInt 7697 v_1 := v.Args[1] 7698 if v_1.Op != OpConst16 { 7699 break 7700 } 7701 if v_1.Type != t { 7702 break 7703 } 7704 c := v_1.AuxInt 7705 v.reset(OpEq16) 7706 v0 := b.NewValue0(v.Pos, OpConst16, t) 7707 v0.AuxInt = int64(int16(c - d)) 7708 v.AddArg(v0) 7709 v.AddArg(x) 7710 return true 7711 } 7712 // match: (Eq16 (Const16 [c]) (Const16 [d])) 7713 // cond: 7714 // result: (ConstBool [b2i(c == d)]) 7715 for { 7716 _ = v.Args[1] 7717 v_0 := v.Args[0] 7718 if v_0.Op != OpConst16 { 7719 break 7720 } 7721 c := v_0.AuxInt 7722 v_1 := v.Args[1] 7723 if v_1.Op != OpConst16 { 7724 break 7725 } 7726 d := v_1.AuxInt 7727 v.reset(OpConstBool) 7728 v.AuxInt = b2i(c == d) 7729 return true 7730 } 7731 // match: (Eq16 (Const16 [d]) (Const16 [c])) 7732 // cond: 7733 // result: (ConstBool [b2i(c == d)]) 7734 for { 7735 _ = v.Args[1] 7736 v_0 := v.Args[0] 7737 if v_0.Op != OpConst16 { 7738 break 7739 } 7740 d := v_0.AuxInt 7741 v_1 := v.Args[1] 7742 if v_1.Op != OpConst16 { 7743 break 7744 } 7745 c := v_1.AuxInt 7746 v.reset(OpConstBool) 7747 v.AuxInt = b2i(c == d) 7748 return true 7749 } 7750 return false 7751 } 7752 func rewriteValuegeneric_OpEq32_0(v *Value) bool { 7753 b := v.Block 7754 _ = b 7755 // match: (Eq32 x x) 7756 // cond: 7757 // result: (ConstBool [1]) 7758 for { 7759 _ = v.Args[1] 7760 x := v.Args[0] 7761 if x != v.Args[1] { 7762 break 7763 } 7764 v.reset(OpConstBool) 7765 v.AuxInt = 1 7766 return true 7767 } 7768 // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 7769 // cond: 7770 // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x) 7771 for { 7772 _ = v.Args[1] 7773 v_0 := v.Args[0] 7774 if v_0.Op != OpConst32 { 7775 break 7776 } 7777 t := v_0.Type 7778 c := v_0.AuxInt 7779 v_1 := v.Args[1] 7780 if v_1.Op != OpAdd32 { 7781 break 7782 } 7783 _ = v_1.Args[1] 7784 v_1_0 := v_1.Args[0] 7785 if v_1_0.Op != OpConst32 { 7786 break 7787 } 7788 if v_1_0.Type != t { 7789 break 7790 } 7791 d := v_1_0.AuxInt 7792 x := v_1.Args[1] 7793 v.reset(OpEq32) 7794 v0 := b.NewValue0(v.Pos, OpConst32, t) 7795 v0.AuxInt = int64(int32(c - d)) 7796 v.AddArg(v0) 7797 v.AddArg(x) 7798 return true 7799 } 7800 // match: (Eq32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d]))) 7801 // cond: 7802 // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x) 7803 for { 7804 _ = v.Args[1] 7805 v_0 := v.Args[0] 7806 if v_0.Op != OpConst32 { 7807 break 7808 } 7809 t := v_0.Type 7810 c := v_0.AuxInt 7811 v_1 := v.Args[1] 7812 if v_1.Op != OpAdd32 { 7813 break 7814 } 7815 _ = v_1.Args[1] 7816 x := v_1.Args[0] 7817 v_1_1 := v_1.Args[1] 7818 if v_1_1.Op != OpConst32 { 7819 break 7820 } 7821 if v_1_1.Type != t { 7822 break 7823 } 7824 d := v_1_1.AuxInt 7825 v.reset(OpEq32) 7826 v0 := b.NewValue0(v.Pos, OpConst32, t) 7827 v0.AuxInt = int64(int32(c - d)) 7828 v.AddArg(v0) 7829 v.AddArg(x) 7830 return true 7831 } 7832 // match: (Eq32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c])) 7833 // cond: 7834 // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x) 7835 for { 7836 _ = v.Args[1] 7837 v_0 := v.Args[0] 7838 if v_0.Op != OpAdd32 { 7839 break 7840 } 7841 _ = v_0.Args[1] 7842 v_0_0 := v_0.Args[0] 7843 if v_0_0.Op != OpConst32 { 7844 break 7845 } 7846 t := v_0_0.Type 7847 d := v_0_0.AuxInt 7848 x := v_0.Args[1] 7849 v_1 := v.Args[1] 7850 if v_1.Op != OpConst32 { 7851 break 7852 } 7853 if v_1.Type != t { 7854 break 7855 } 7856 c := v_1.AuxInt 7857 v.reset(OpEq32) 7858 v0 := b.NewValue0(v.Pos, OpConst32, t) 7859 v0.AuxInt = int64(int32(c - d)) 7860 v.AddArg(v0) 7861 v.AddArg(x) 7862 return true 7863 } 7864 // match: (Eq32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c])) 7865 // cond: 7866 // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x) 7867 for { 7868 _ = v.Args[1] 7869 v_0 := v.Args[0] 7870 if v_0.Op != OpAdd32 { 7871 break 7872 } 7873 _ = v_0.Args[1] 7874 x := v_0.Args[0] 7875 v_0_1 := v_0.Args[1] 7876 if v_0_1.Op != OpConst32 { 7877 break 7878 } 7879 t := v_0_1.Type 7880 d := v_0_1.AuxInt 7881 v_1 := v.Args[1] 7882 if v_1.Op != OpConst32 { 7883 break 7884 } 7885 if v_1.Type != t { 7886 break 7887 } 7888 c := v_1.AuxInt 7889 v.reset(OpEq32) 7890 v0 := b.NewValue0(v.Pos, OpConst32, t) 7891 v0.AuxInt = int64(int32(c - d)) 7892 v.AddArg(v0) 7893 v.AddArg(x) 7894 return true 7895 } 7896 // match: (Eq32 (Const32 [c]) (Const32 [d])) 7897 // cond: 7898 // result: (ConstBool [b2i(c == d)]) 7899 for { 7900 _ = v.Args[1] 7901 v_0 := v.Args[0] 7902 if v_0.Op != OpConst32 { 7903 break 7904 } 7905 c := v_0.AuxInt 7906 v_1 := v.Args[1] 7907 if v_1.Op != OpConst32 { 7908 break 7909 } 7910 d := v_1.AuxInt 7911 v.reset(OpConstBool) 7912 v.AuxInt = b2i(c == d) 7913 return true 7914 } 7915 // match: (Eq32 (Const32 [d]) (Const32 [c])) 7916 // cond: 7917 // result: (ConstBool [b2i(c == d)]) 7918 for { 7919 _ = v.Args[1] 7920 v_0 := v.Args[0] 7921 if v_0.Op != OpConst32 { 7922 break 7923 } 7924 d := v_0.AuxInt 7925 v_1 := v.Args[1] 7926 if v_1.Op != OpConst32 { 7927 break 7928 } 7929 c := v_1.AuxInt 7930 v.reset(OpConstBool) 7931 v.AuxInt = b2i(c == d) 7932 return true 7933 } 7934 return false 7935 } 7936 func rewriteValuegeneric_OpEq64_0(v *Value) bool { 7937 b := v.Block 7938 _ = b 7939 // match: (Eq64 x x) 7940 // cond: 7941 // result: (ConstBool [1]) 7942 for { 7943 _ = v.Args[1] 7944 x := v.Args[0] 7945 if x != v.Args[1] { 7946 break 7947 } 7948 v.reset(OpConstBool) 7949 v.AuxInt = 1 7950 return true 7951 } 7952 // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 7953 // cond: 7954 // result: (Eq64 (Const64 <t> [c-d]) x) 7955 for { 7956 _ = v.Args[1] 7957 v_0 := v.Args[0] 7958 if v_0.Op != OpConst64 { 7959 break 7960 } 7961 t := v_0.Type 7962 c := v_0.AuxInt 7963 v_1 := v.Args[1] 7964 if v_1.Op != OpAdd64 { 7965 break 7966 } 7967 _ = v_1.Args[1] 7968 v_1_0 := v_1.Args[0] 7969 if v_1_0.Op != OpConst64 { 7970 break 7971 } 7972 if v_1_0.Type != t { 7973 break 7974 } 7975 d := v_1_0.AuxInt 7976 x := v_1.Args[1] 7977 v.reset(OpEq64) 7978 v0 := b.NewValue0(v.Pos, OpConst64, t) 7979 v0.AuxInt = c - d 7980 v.AddArg(v0) 7981 v.AddArg(x) 7982 return true 7983 } 7984 // match: (Eq64 (Const64 <t> [c]) (Add64 x (Const64 <t> [d]))) 7985 // cond: 7986 // result: (Eq64 (Const64 <t> [c-d]) x) 7987 for { 7988 _ = v.Args[1] 7989 v_0 := v.Args[0] 7990 if v_0.Op != OpConst64 { 7991 break 7992 } 7993 t := v_0.Type 7994 c := v_0.AuxInt 7995 v_1 := v.Args[1] 7996 if v_1.Op != OpAdd64 { 7997 break 7998 } 7999 _ = v_1.Args[1] 8000 x := v_1.Args[0] 8001 v_1_1 := v_1.Args[1] 8002 if v_1_1.Op != OpConst64 { 8003 break 8004 } 8005 if v_1_1.Type != t { 8006 break 8007 } 8008 d := v_1_1.AuxInt 8009 v.reset(OpEq64) 8010 v0 := b.NewValue0(v.Pos, OpConst64, t) 8011 v0.AuxInt = c - d 8012 v.AddArg(v0) 8013 v.AddArg(x) 8014 return true 8015 } 8016 // match: (Eq64 (Add64 (Const64 <t> [d]) x) (Const64 <t> [c])) 8017 // cond: 8018 // result: (Eq64 (Const64 <t> [c-d]) x) 8019 for { 8020 _ = v.Args[1] 8021 v_0 := v.Args[0] 8022 if v_0.Op != OpAdd64 { 8023 break 8024 } 8025 _ = v_0.Args[1] 8026 v_0_0 := v_0.Args[0] 8027 if v_0_0.Op != OpConst64 { 8028 break 8029 } 8030 t := v_0_0.Type 8031 d := v_0_0.AuxInt 8032 x := v_0.Args[1] 8033 v_1 := v.Args[1] 8034 if v_1.Op != OpConst64 { 8035 break 8036 } 8037 if v_1.Type != t { 8038 break 8039 } 8040 c := v_1.AuxInt 8041 v.reset(OpEq64) 8042 v0 := b.NewValue0(v.Pos, OpConst64, t) 8043 v0.AuxInt = c - d 8044 v.AddArg(v0) 8045 v.AddArg(x) 8046 return true 8047 } 8048 // match: (Eq64 (Add64 x (Const64 <t> [d])) (Const64 <t> [c])) 8049 // cond: 8050 // result: (Eq64 (Const64 <t> [c-d]) x) 8051 for { 8052 _ = v.Args[1] 8053 v_0 := v.Args[0] 8054 if v_0.Op != OpAdd64 { 8055 break 8056 } 8057 _ = v_0.Args[1] 8058 x := v_0.Args[0] 8059 v_0_1 := v_0.Args[1] 8060 if v_0_1.Op != OpConst64 { 8061 break 8062 } 8063 t := v_0_1.Type 8064 d := v_0_1.AuxInt 8065 v_1 := v.Args[1] 8066 if v_1.Op != OpConst64 { 8067 break 8068 } 8069 if v_1.Type != t { 8070 break 8071 } 8072 c := v_1.AuxInt 8073 v.reset(OpEq64) 8074 v0 := b.NewValue0(v.Pos, OpConst64, t) 8075 v0.AuxInt = c - d 8076 v.AddArg(v0) 8077 v.AddArg(x) 8078 return true 8079 } 8080 // match: (Eq64 (Const64 [c]) (Const64 [d])) 8081 // cond: 8082 // result: (ConstBool [b2i(c == d)]) 8083 for { 8084 _ = v.Args[1] 8085 v_0 := v.Args[0] 8086 if v_0.Op != OpConst64 { 8087 break 8088 } 8089 c := v_0.AuxInt 8090 v_1 := v.Args[1] 8091 if v_1.Op != OpConst64 { 8092 break 8093 } 8094 d := v_1.AuxInt 8095 v.reset(OpConstBool) 8096 v.AuxInt = b2i(c == d) 8097 return true 8098 } 8099 // match: (Eq64 (Const64 [d]) (Const64 [c])) 8100 // cond: 8101 // result: (ConstBool [b2i(c == d)]) 8102 for { 8103 _ = v.Args[1] 8104 v_0 := v.Args[0] 8105 if v_0.Op != OpConst64 { 8106 break 8107 } 8108 d := v_0.AuxInt 8109 v_1 := v.Args[1] 8110 if v_1.Op != OpConst64 { 8111 break 8112 } 8113 c := v_1.AuxInt 8114 v.reset(OpConstBool) 8115 v.AuxInt = b2i(c == d) 8116 return true 8117 } 8118 return false 8119 } 8120 func rewriteValuegeneric_OpEq8_0(v *Value) bool { 8121 b := v.Block 8122 _ = b 8123 // match: (Eq8 x x) 8124 // cond: 8125 // result: (ConstBool [1]) 8126 for { 8127 _ = v.Args[1] 8128 x := v.Args[0] 8129 if x != v.Args[1] { 8130 break 8131 } 8132 v.reset(OpConstBool) 8133 v.AuxInt = 1 8134 return true 8135 } 8136 // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 8137 // cond: 8138 // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x) 8139 for { 8140 _ = v.Args[1] 8141 v_0 := v.Args[0] 8142 if v_0.Op != OpConst8 { 8143 break 8144 } 8145 t := v_0.Type 8146 c := v_0.AuxInt 8147 v_1 := v.Args[1] 8148 if v_1.Op != OpAdd8 { 8149 break 8150 } 8151 _ = v_1.Args[1] 8152 v_1_0 := v_1.Args[0] 8153 if v_1_0.Op != OpConst8 { 8154 break 8155 } 8156 if v_1_0.Type != t { 8157 break 8158 } 8159 d := v_1_0.AuxInt 8160 x := v_1.Args[1] 8161 v.reset(OpEq8) 8162 v0 := b.NewValue0(v.Pos, OpConst8, t) 8163 v0.AuxInt = int64(int8(c - d)) 8164 v.AddArg(v0) 8165 v.AddArg(x) 8166 return true 8167 } 8168 // match: (Eq8 (Const8 <t> [c]) (Add8 x (Const8 <t> [d]))) 8169 // cond: 8170 // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x) 8171 for { 8172 _ = v.Args[1] 8173 v_0 := v.Args[0] 8174 if v_0.Op != OpConst8 { 8175 break 8176 } 8177 t := v_0.Type 8178 c := v_0.AuxInt 8179 v_1 := v.Args[1] 8180 if v_1.Op != OpAdd8 { 8181 break 8182 } 8183 _ = v_1.Args[1] 8184 x := v_1.Args[0] 8185 v_1_1 := v_1.Args[1] 8186 if v_1_1.Op != OpConst8 { 8187 break 8188 } 8189 if v_1_1.Type != t { 8190 break 8191 } 8192 d := v_1_1.AuxInt 8193 v.reset(OpEq8) 8194 v0 := b.NewValue0(v.Pos, OpConst8, t) 8195 v0.AuxInt = int64(int8(c - d)) 8196 v.AddArg(v0) 8197 v.AddArg(x) 8198 return true 8199 } 8200 // match: (Eq8 (Add8 (Const8 <t> [d]) x) (Const8 <t> [c])) 8201 // cond: 8202 // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x) 8203 for { 8204 _ = v.Args[1] 8205 v_0 := v.Args[0] 8206 if v_0.Op != OpAdd8 { 8207 break 8208 } 8209 _ = v_0.Args[1] 8210 v_0_0 := v_0.Args[0] 8211 if v_0_0.Op != OpConst8 { 8212 break 8213 } 8214 t := v_0_0.Type 8215 d := v_0_0.AuxInt 8216 x := v_0.Args[1] 8217 v_1 := v.Args[1] 8218 if v_1.Op != OpConst8 { 8219 break 8220 } 8221 if v_1.Type != t { 8222 break 8223 } 8224 c := v_1.AuxInt 8225 v.reset(OpEq8) 8226 v0 := b.NewValue0(v.Pos, OpConst8, t) 8227 v0.AuxInt = int64(int8(c - d)) 8228 v.AddArg(v0) 8229 v.AddArg(x) 8230 return true 8231 } 8232 // match: (Eq8 (Add8 x (Const8 <t> [d])) (Const8 <t> [c])) 8233 // cond: 8234 // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x) 8235 for { 8236 _ = v.Args[1] 8237 v_0 := v.Args[0] 8238 if v_0.Op != OpAdd8 { 8239 break 8240 } 8241 _ = v_0.Args[1] 8242 x := v_0.Args[0] 8243 v_0_1 := v_0.Args[1] 8244 if v_0_1.Op != OpConst8 { 8245 break 8246 } 8247 t := v_0_1.Type 8248 d := v_0_1.AuxInt 8249 v_1 := v.Args[1] 8250 if v_1.Op != OpConst8 { 8251 break 8252 } 8253 if v_1.Type != t { 8254 break 8255 } 8256 c := v_1.AuxInt 8257 v.reset(OpEq8) 8258 v0 := b.NewValue0(v.Pos, OpConst8, t) 8259 v0.AuxInt = int64(int8(c - d)) 8260 v.AddArg(v0) 8261 v.AddArg(x) 8262 return true 8263 } 8264 // match: (Eq8 (Const8 [c]) (Const8 [d])) 8265 // cond: 8266 // result: (ConstBool [b2i(c == d)]) 8267 for { 8268 _ = v.Args[1] 8269 v_0 := v.Args[0] 8270 if v_0.Op != OpConst8 { 8271 break 8272 } 8273 c := v_0.AuxInt 8274 v_1 := v.Args[1] 8275 if v_1.Op != OpConst8 { 8276 break 8277 } 8278 d := v_1.AuxInt 8279 v.reset(OpConstBool) 8280 v.AuxInt = b2i(c == d) 8281 return true 8282 } 8283 // match: (Eq8 (Const8 [d]) (Const8 [c])) 8284 // cond: 8285 // result: (ConstBool [b2i(c == d)]) 8286 for { 8287 _ = v.Args[1] 8288 v_0 := v.Args[0] 8289 if v_0.Op != OpConst8 { 8290 break 8291 } 8292 d := v_0.AuxInt 8293 v_1 := v.Args[1] 8294 if v_1.Op != OpConst8 { 8295 break 8296 } 8297 c := v_1.AuxInt 8298 v.reset(OpConstBool) 8299 v.AuxInt = b2i(c == d) 8300 return true 8301 } 8302 return false 8303 } 8304 func rewriteValuegeneric_OpEqB_0(v *Value) bool { 8305 // match: (EqB (ConstBool [c]) (ConstBool [d])) 8306 // cond: 8307 // result: (ConstBool [b2i(c == d)]) 8308 for { 8309 _ = v.Args[1] 8310 v_0 := v.Args[0] 8311 if v_0.Op != OpConstBool { 8312 break 8313 } 8314 c := v_0.AuxInt 8315 v_1 := v.Args[1] 8316 if v_1.Op != OpConstBool { 8317 break 8318 } 8319 d := v_1.AuxInt 8320 v.reset(OpConstBool) 8321 v.AuxInt = b2i(c == d) 8322 return true 8323 } 8324 // match: (EqB (ConstBool [d]) (ConstBool [c])) 8325 // cond: 8326 // result: (ConstBool [b2i(c == d)]) 8327 for { 8328 _ = v.Args[1] 8329 v_0 := v.Args[0] 8330 if v_0.Op != OpConstBool { 8331 break 8332 } 8333 d := v_0.AuxInt 8334 v_1 := v.Args[1] 8335 if v_1.Op != OpConstBool { 8336 break 8337 } 8338 c := v_1.AuxInt 8339 v.reset(OpConstBool) 8340 v.AuxInt = b2i(c == d) 8341 return true 8342 } 8343 // match: (EqB (ConstBool [0]) x) 8344 // cond: 8345 // result: (Not x) 8346 for { 8347 _ = v.Args[1] 8348 v_0 := v.Args[0] 8349 if v_0.Op != OpConstBool { 8350 break 8351 } 8352 if v_0.AuxInt != 0 { 8353 break 8354 } 8355 x := v.Args[1] 8356 v.reset(OpNot) 8357 v.AddArg(x) 8358 return true 8359 } 8360 // match: (EqB x (ConstBool [0])) 8361 // cond: 8362 // result: (Not x) 8363 for { 8364 _ = v.Args[1] 8365 x := v.Args[0] 8366 v_1 := v.Args[1] 8367 if v_1.Op != OpConstBool { 8368 break 8369 } 8370 if v_1.AuxInt != 0 { 8371 break 8372 } 8373 v.reset(OpNot) 8374 v.AddArg(x) 8375 return true 8376 } 8377 // match: (EqB (ConstBool [1]) x) 8378 // cond: 8379 // result: x 8380 for { 8381 _ = v.Args[1] 8382 v_0 := v.Args[0] 8383 if v_0.Op != OpConstBool { 8384 break 8385 } 8386 if v_0.AuxInt != 1 { 8387 break 8388 } 8389 x := v.Args[1] 8390 v.reset(OpCopy) 8391 v.Type = x.Type 8392 v.AddArg(x) 8393 return true 8394 } 8395 // match: (EqB x (ConstBool [1])) 8396 // cond: 8397 // result: x 8398 for { 8399 _ = v.Args[1] 8400 x := v.Args[0] 8401 v_1 := v.Args[1] 8402 if v_1.Op != OpConstBool { 8403 break 8404 } 8405 if v_1.AuxInt != 1 { 8406 break 8407 } 8408 v.reset(OpCopy) 8409 v.Type = x.Type 8410 v.AddArg(x) 8411 return true 8412 } 8413 return false 8414 } 8415 func rewriteValuegeneric_OpEqInter_0(v *Value) bool { 8416 b := v.Block 8417 _ = b 8418 typ := &b.Func.Config.Types 8419 _ = typ 8420 // match: (EqInter x y) 8421 // cond: 8422 // result: (EqPtr (ITab x) (ITab y)) 8423 for { 8424 _ = v.Args[1] 8425 x := v.Args[0] 8426 y := v.Args[1] 8427 v.reset(OpEqPtr) 8428 v0 := b.NewValue0(v.Pos, OpITab, typ.BytePtr) 8429 v0.AddArg(x) 8430 v.AddArg(v0) 8431 v1 := b.NewValue0(v.Pos, OpITab, typ.BytePtr) 8432 v1.AddArg(y) 8433 v.AddArg(v1) 8434 return true 8435 } 8436 } 8437 func rewriteValuegeneric_OpEqPtr_0(v *Value) bool { 8438 b := v.Block 8439 _ = b 8440 typ := &b.Func.Config.Types 8441 _ = typ 8442 // match: (EqPtr p (ConstNil)) 8443 // cond: 8444 // result: (Not (IsNonNil p)) 8445 for { 8446 _ = v.Args[1] 8447 p := v.Args[0] 8448 v_1 := v.Args[1] 8449 if v_1.Op != OpConstNil { 8450 break 8451 } 8452 v.reset(OpNot) 8453 v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool) 8454 v0.AddArg(p) 8455 v.AddArg(v0) 8456 return true 8457 } 8458 // match: (EqPtr (ConstNil) p) 8459 // cond: 8460 // result: (Not (IsNonNil p)) 8461 for { 8462 _ = v.Args[1] 8463 v_0 := v.Args[0] 8464 if v_0.Op != OpConstNil { 8465 break 8466 } 8467 p := v.Args[1] 8468 v.reset(OpNot) 8469 v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool) 8470 v0.AddArg(p) 8471 v.AddArg(v0) 8472 return true 8473 } 8474 // match: (EqPtr x x) 8475 // cond: 8476 // result: (ConstBool [1]) 8477 for { 8478 _ = v.Args[1] 8479 x := v.Args[0] 8480 if x != v.Args[1] { 8481 break 8482 } 8483 v.reset(OpConstBool) 8484 v.AuxInt = 1 8485 return true 8486 } 8487 // match: (EqPtr (Addr {a} x) (Addr {b} x)) 8488 // cond: 8489 // result: (ConstBool [b2i(a == b)]) 8490 for { 8491 _ = v.Args[1] 8492 v_0 := v.Args[0] 8493 if v_0.Op != OpAddr { 8494 break 8495 } 8496 a := v_0.Aux 8497 x := v_0.Args[0] 8498 v_1 := v.Args[1] 8499 if v_1.Op != OpAddr { 8500 break 8501 } 8502 b := v_1.Aux 8503 if x != v_1.Args[0] { 8504 break 8505 } 8506 v.reset(OpConstBool) 8507 v.AuxInt = b2i(a == b) 8508 return true 8509 } 8510 // match: (EqPtr (Addr {b} x) (Addr {a} x)) 8511 // cond: 8512 // result: (ConstBool [b2i(a == b)]) 8513 for { 8514 _ = v.Args[1] 8515 v_0 := v.Args[0] 8516 if v_0.Op != OpAddr { 8517 break 8518 } 8519 b := v_0.Aux 8520 x := v_0.Args[0] 8521 v_1 := v.Args[1] 8522 if v_1.Op != OpAddr { 8523 break 8524 } 8525 a := v_1.Aux 8526 if x != v_1.Args[0] { 8527 break 8528 } 8529 v.reset(OpConstBool) 8530 v.AuxInt = b2i(a == b) 8531 return true 8532 } 8533 return false 8534 } 8535 func rewriteValuegeneric_OpEqSlice_0(v *Value) bool { 8536 b := v.Block 8537 _ = b 8538 typ := &b.Func.Config.Types 8539 _ = typ 8540 // match: (EqSlice x y) 8541 // cond: 8542 // result: (EqPtr (SlicePtr x) (SlicePtr y)) 8543 for { 8544 _ = v.Args[1] 8545 x := v.Args[0] 8546 y := v.Args[1] 8547 v.reset(OpEqPtr) 8548 v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr) 8549 v0.AddArg(x) 8550 v.AddArg(v0) 8551 v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr) 8552 v1.AddArg(y) 8553 v.AddArg(v1) 8554 return true 8555 } 8556 } 8557 func rewriteValuegeneric_OpGeq16_0(v *Value) bool { 8558 // match: (Geq16 (Const16 [c]) (Const16 [d])) 8559 // cond: 8560 // result: (ConstBool [b2i(c >= d)]) 8561 for { 8562 _ = v.Args[1] 8563 v_0 := v.Args[0] 8564 if v_0.Op != OpConst16 { 8565 break 8566 } 8567 c := v_0.AuxInt 8568 v_1 := v.Args[1] 8569 if v_1.Op != OpConst16 { 8570 break 8571 } 8572 d := v_1.AuxInt 8573 v.reset(OpConstBool) 8574 v.AuxInt = b2i(c >= d) 8575 return true 8576 } 8577 return false 8578 } 8579 func rewriteValuegeneric_OpGeq16U_0(v *Value) bool { 8580 // match: (Geq16U (Const16 [c]) (Const16 [d])) 8581 // cond: 8582 // result: (ConstBool [b2i(uint16(c) >= uint16(d))]) 8583 for { 8584 _ = v.Args[1] 8585 v_0 := v.Args[0] 8586 if v_0.Op != OpConst16 { 8587 break 8588 } 8589 c := v_0.AuxInt 8590 v_1 := v.Args[1] 8591 if v_1.Op != OpConst16 { 8592 break 8593 } 8594 d := v_1.AuxInt 8595 v.reset(OpConstBool) 8596 v.AuxInt = b2i(uint16(c) >= uint16(d)) 8597 return true 8598 } 8599 return false 8600 } 8601 func rewriteValuegeneric_OpGeq32_0(v *Value) bool { 8602 // match: (Geq32 (Const32 [c]) (Const32 [d])) 8603 // cond: 8604 // result: (ConstBool [b2i(c >= d)]) 8605 for { 8606 _ = v.Args[1] 8607 v_0 := v.Args[0] 8608 if v_0.Op != OpConst32 { 8609 break 8610 } 8611 c := v_0.AuxInt 8612 v_1 := v.Args[1] 8613 if v_1.Op != OpConst32 { 8614 break 8615 } 8616 d := v_1.AuxInt 8617 v.reset(OpConstBool) 8618 v.AuxInt = b2i(c >= d) 8619 return true 8620 } 8621 return false 8622 } 8623 func rewriteValuegeneric_OpGeq32U_0(v *Value) bool { 8624 // match: (Geq32U (Const32 [c]) (Const32 [d])) 8625 // cond: 8626 // result: (ConstBool [b2i(uint32(c) >= uint32(d))]) 8627 for { 8628 _ = v.Args[1] 8629 v_0 := v.Args[0] 8630 if v_0.Op != OpConst32 { 8631 break 8632 } 8633 c := v_0.AuxInt 8634 v_1 := v.Args[1] 8635 if v_1.Op != OpConst32 { 8636 break 8637 } 8638 d := v_1.AuxInt 8639 v.reset(OpConstBool) 8640 v.AuxInt = b2i(uint32(c) >= uint32(d)) 8641 return true 8642 } 8643 return false 8644 } 8645 func rewriteValuegeneric_OpGeq64_0(v *Value) bool { 8646 // match: (Geq64 (Const64 [c]) (Const64 [d])) 8647 // cond: 8648 // result: (ConstBool [b2i(c >= d)]) 8649 for { 8650 _ = v.Args[1] 8651 v_0 := v.Args[0] 8652 if v_0.Op != OpConst64 { 8653 break 8654 } 8655 c := v_0.AuxInt 8656 v_1 := v.Args[1] 8657 if v_1.Op != OpConst64 { 8658 break 8659 } 8660 d := v_1.AuxInt 8661 v.reset(OpConstBool) 8662 v.AuxInt = b2i(c >= d) 8663 return true 8664 } 8665 return false 8666 } 8667 func rewriteValuegeneric_OpGeq64U_0(v *Value) bool { 8668 // match: (Geq64U (Const64 [c]) (Const64 [d])) 8669 // cond: 8670 // result: (ConstBool [b2i(uint64(c) >= uint64(d))]) 8671 for { 8672 _ = v.Args[1] 8673 v_0 := v.Args[0] 8674 if v_0.Op != OpConst64 { 8675 break 8676 } 8677 c := v_0.AuxInt 8678 v_1 := v.Args[1] 8679 if v_1.Op != OpConst64 { 8680 break 8681 } 8682 d := v_1.AuxInt 8683 v.reset(OpConstBool) 8684 v.AuxInt = b2i(uint64(c) >= uint64(d)) 8685 return true 8686 } 8687 return false 8688 } 8689 func rewriteValuegeneric_OpGeq8_0(v *Value) bool { 8690 // match: (Geq8 (Const8 [c]) (Const8 [d])) 8691 // cond: 8692 // result: (ConstBool [b2i(c >= d)]) 8693 for { 8694 _ = v.Args[1] 8695 v_0 := v.Args[0] 8696 if v_0.Op != OpConst8 { 8697 break 8698 } 8699 c := v_0.AuxInt 8700 v_1 := v.Args[1] 8701 if v_1.Op != OpConst8 { 8702 break 8703 } 8704 d := v_1.AuxInt 8705 v.reset(OpConstBool) 8706 v.AuxInt = b2i(c >= d) 8707 return true 8708 } 8709 return false 8710 } 8711 func rewriteValuegeneric_OpGeq8U_0(v *Value) bool { 8712 // match: (Geq8U (Const8 [c]) (Const8 [d])) 8713 // cond: 8714 // result: (ConstBool [b2i(uint8(c) >= uint8(d))]) 8715 for { 8716 _ = v.Args[1] 8717 v_0 := v.Args[0] 8718 if v_0.Op != OpConst8 { 8719 break 8720 } 8721 c := v_0.AuxInt 8722 v_1 := v.Args[1] 8723 if v_1.Op != OpConst8 { 8724 break 8725 } 8726 d := v_1.AuxInt 8727 v.reset(OpConstBool) 8728 v.AuxInt = b2i(uint8(c) >= uint8(d)) 8729 return true 8730 } 8731 return false 8732 } 8733 func rewriteValuegeneric_OpGreater16_0(v *Value) bool { 8734 // match: (Greater16 (Const16 [c]) (Const16 [d])) 8735 // cond: 8736 // result: (ConstBool [b2i(c > d)]) 8737 for { 8738 _ = v.Args[1] 8739 v_0 := v.Args[0] 8740 if v_0.Op != OpConst16 { 8741 break 8742 } 8743 c := v_0.AuxInt 8744 v_1 := v.Args[1] 8745 if v_1.Op != OpConst16 { 8746 break 8747 } 8748 d := v_1.AuxInt 8749 v.reset(OpConstBool) 8750 v.AuxInt = b2i(c > d) 8751 return true 8752 } 8753 return false 8754 } 8755 func rewriteValuegeneric_OpGreater16U_0(v *Value) bool { 8756 // match: (Greater16U (Const16 [c]) (Const16 [d])) 8757 // cond: 8758 // result: (ConstBool [b2i(uint16(c) > uint16(d))]) 8759 for { 8760 _ = v.Args[1] 8761 v_0 := v.Args[0] 8762 if v_0.Op != OpConst16 { 8763 break 8764 } 8765 c := v_0.AuxInt 8766 v_1 := v.Args[1] 8767 if v_1.Op != OpConst16 { 8768 break 8769 } 8770 d := v_1.AuxInt 8771 v.reset(OpConstBool) 8772 v.AuxInt = b2i(uint16(c) > uint16(d)) 8773 return true 8774 } 8775 return false 8776 } 8777 func rewriteValuegeneric_OpGreater32_0(v *Value) bool { 8778 // match: (Greater32 (Const32 [c]) (Const32 [d])) 8779 // cond: 8780 // result: (ConstBool [b2i(c > d)]) 8781 for { 8782 _ = v.Args[1] 8783 v_0 := v.Args[0] 8784 if v_0.Op != OpConst32 { 8785 break 8786 } 8787 c := v_0.AuxInt 8788 v_1 := v.Args[1] 8789 if v_1.Op != OpConst32 { 8790 break 8791 } 8792 d := v_1.AuxInt 8793 v.reset(OpConstBool) 8794 v.AuxInt = b2i(c > d) 8795 return true 8796 } 8797 return false 8798 } 8799 func rewriteValuegeneric_OpGreater32U_0(v *Value) bool { 8800 // match: (Greater32U (Const32 [c]) (Const32 [d])) 8801 // cond: 8802 // result: (ConstBool [b2i(uint32(c) > uint32(d))]) 8803 for { 8804 _ = v.Args[1] 8805 v_0 := v.Args[0] 8806 if v_0.Op != OpConst32 { 8807 break 8808 } 8809 c := v_0.AuxInt 8810 v_1 := v.Args[1] 8811 if v_1.Op != OpConst32 { 8812 break 8813 } 8814 d := v_1.AuxInt 8815 v.reset(OpConstBool) 8816 v.AuxInt = b2i(uint32(c) > uint32(d)) 8817 return true 8818 } 8819 return false 8820 } 8821 func rewriteValuegeneric_OpGreater64_0(v *Value) bool { 8822 // match: (Greater64 (Const64 [c]) (Const64 [d])) 8823 // cond: 8824 // result: (ConstBool [b2i(c > d)]) 8825 for { 8826 _ = v.Args[1] 8827 v_0 := v.Args[0] 8828 if v_0.Op != OpConst64 { 8829 break 8830 } 8831 c := v_0.AuxInt 8832 v_1 := v.Args[1] 8833 if v_1.Op != OpConst64 { 8834 break 8835 } 8836 d := v_1.AuxInt 8837 v.reset(OpConstBool) 8838 v.AuxInt = b2i(c > d) 8839 return true 8840 } 8841 return false 8842 } 8843 func rewriteValuegeneric_OpGreater64U_0(v *Value) bool { 8844 // match: (Greater64U (Const64 [c]) (Const64 [d])) 8845 // cond: 8846 // result: (ConstBool [b2i(uint64(c) > uint64(d))]) 8847 for { 8848 _ = v.Args[1] 8849 v_0 := v.Args[0] 8850 if v_0.Op != OpConst64 { 8851 break 8852 } 8853 c := v_0.AuxInt 8854 v_1 := v.Args[1] 8855 if v_1.Op != OpConst64 { 8856 break 8857 } 8858 d := v_1.AuxInt 8859 v.reset(OpConstBool) 8860 v.AuxInt = b2i(uint64(c) > uint64(d)) 8861 return true 8862 } 8863 return false 8864 } 8865 func rewriteValuegeneric_OpGreater8_0(v *Value) bool { 8866 // match: (Greater8 (Const8 [c]) (Const8 [d])) 8867 // cond: 8868 // result: (ConstBool [b2i(c > d)]) 8869 for { 8870 _ = v.Args[1] 8871 v_0 := v.Args[0] 8872 if v_0.Op != OpConst8 { 8873 break 8874 } 8875 c := v_0.AuxInt 8876 v_1 := v.Args[1] 8877 if v_1.Op != OpConst8 { 8878 break 8879 } 8880 d := v_1.AuxInt 8881 v.reset(OpConstBool) 8882 v.AuxInt = b2i(c > d) 8883 return true 8884 } 8885 return false 8886 } 8887 func rewriteValuegeneric_OpGreater8U_0(v *Value) bool { 8888 // match: (Greater8U (Const8 [c]) (Const8 [d])) 8889 // cond: 8890 // result: (ConstBool [b2i(uint8(c) > uint8(d))]) 8891 for { 8892 _ = v.Args[1] 8893 v_0 := v.Args[0] 8894 if v_0.Op != OpConst8 { 8895 break 8896 } 8897 c := v_0.AuxInt 8898 v_1 := v.Args[1] 8899 if v_1.Op != OpConst8 { 8900 break 8901 } 8902 d := v_1.AuxInt 8903 v.reset(OpConstBool) 8904 v.AuxInt = b2i(uint8(c) > uint8(d)) 8905 return true 8906 } 8907 return false 8908 } 8909 func rewriteValuegeneric_OpIMake_0(v *Value) bool { 8910 // match: (IMake typ (StructMake1 val)) 8911 // cond: 8912 // result: (IMake typ val) 8913 for { 8914 _ = v.Args[1] 8915 typ := v.Args[0] 8916 v_1 := v.Args[1] 8917 if v_1.Op != OpStructMake1 { 8918 break 8919 } 8920 val := v_1.Args[0] 8921 v.reset(OpIMake) 8922 v.AddArg(typ) 8923 v.AddArg(val) 8924 return true 8925 } 8926 // match: (IMake typ (ArrayMake1 val)) 8927 // cond: 8928 // result: (IMake typ val) 8929 for { 8930 _ = v.Args[1] 8931 typ := v.Args[0] 8932 v_1 := v.Args[1] 8933 if v_1.Op != OpArrayMake1 { 8934 break 8935 } 8936 val := v_1.Args[0] 8937 v.reset(OpIMake) 8938 v.AddArg(typ) 8939 v.AddArg(val) 8940 return true 8941 } 8942 return false 8943 } 8944 func rewriteValuegeneric_OpInterCall_0(v *Value) bool { 8945 // match: (InterCall [argsize] (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) mem) 8946 // cond: devirt(v, itab, off) != nil 8947 // result: (StaticCall [argsize] {devirt(v, itab, off)} mem) 8948 for { 8949 argsize := v.AuxInt 8950 _ = v.Args[1] 8951 v_0 := v.Args[0] 8952 if v_0.Op != OpLoad { 8953 break 8954 } 8955 _ = v_0.Args[1] 8956 v_0_0 := v_0.Args[0] 8957 if v_0_0.Op != OpOffPtr { 8958 break 8959 } 8960 off := v_0_0.AuxInt 8961 v_0_0_0 := v_0_0.Args[0] 8962 if v_0_0_0.Op != OpITab { 8963 break 8964 } 8965 v_0_0_0_0 := v_0_0_0.Args[0] 8966 if v_0_0_0_0.Op != OpIMake { 8967 break 8968 } 8969 _ = v_0_0_0_0.Args[1] 8970 v_0_0_0_0_0 := v_0_0_0_0.Args[0] 8971 if v_0_0_0_0_0.Op != OpAddr { 8972 break 8973 } 8974 itab := v_0_0_0_0_0.Aux 8975 v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0] 8976 if v_0_0_0_0_0_0.Op != OpSB { 8977 break 8978 } 8979 mem := v.Args[1] 8980 if !(devirt(v, itab, off) != nil) { 8981 break 8982 } 8983 v.reset(OpStaticCall) 8984 v.AuxInt = argsize 8985 v.Aux = devirt(v, itab, off) 8986 v.AddArg(mem) 8987 return true 8988 } 8989 return false 8990 } 8991 func rewriteValuegeneric_OpIsInBounds_0(v *Value) bool { 8992 // match: (IsInBounds (ZeroExt8to32 _) (Const32 [c])) 8993 // cond: (1 << 8) <= c 8994 // result: (ConstBool [1]) 8995 for { 8996 _ = v.Args[1] 8997 v_0 := v.Args[0] 8998 if v_0.Op != OpZeroExt8to32 { 8999 break 9000 } 9001 v_1 := v.Args[1] 9002 if v_1.Op != OpConst32 { 9003 break 9004 } 9005 c := v_1.AuxInt 9006 if !((1 << 8) <= c) { 9007 break 9008 } 9009 v.reset(OpConstBool) 9010 v.AuxInt = 1 9011 return true 9012 } 9013 // match: (IsInBounds (ZeroExt8to64 _) (Const64 [c])) 9014 // cond: (1 << 8) <= c 9015 // result: (ConstBool [1]) 9016 for { 9017 _ = v.Args[1] 9018 v_0 := v.Args[0] 9019 if v_0.Op != OpZeroExt8to64 { 9020 break 9021 } 9022 v_1 := v.Args[1] 9023 if v_1.Op != OpConst64 { 9024 break 9025 } 9026 c := v_1.AuxInt 9027 if !((1 << 8) <= c) { 9028 break 9029 } 9030 v.reset(OpConstBool) 9031 v.AuxInt = 1 9032 return true 9033 } 9034 // match: (IsInBounds (ZeroExt16to32 _) (Const32 [c])) 9035 // cond: (1 << 16) <= c 9036 // result: (ConstBool [1]) 9037 for { 9038 _ = v.Args[1] 9039 v_0 := v.Args[0] 9040 if v_0.Op != OpZeroExt16to32 { 9041 break 9042 } 9043 v_1 := v.Args[1] 9044 if v_1.Op != OpConst32 { 9045 break 9046 } 9047 c := v_1.AuxInt 9048 if !((1 << 16) <= c) { 9049 break 9050 } 9051 v.reset(OpConstBool) 9052 v.AuxInt = 1 9053 return true 9054 } 9055 // match: (IsInBounds (ZeroExt16to64 _) (Const64 [c])) 9056 // cond: (1 << 16) <= c 9057 // result: (ConstBool [1]) 9058 for { 9059 _ = v.Args[1] 9060 v_0 := v.Args[0] 9061 if v_0.Op != OpZeroExt16to64 { 9062 break 9063 } 9064 v_1 := v.Args[1] 9065 if v_1.Op != OpConst64 { 9066 break 9067 } 9068 c := v_1.AuxInt 9069 if !((1 << 16) <= c) { 9070 break 9071 } 9072 v.reset(OpConstBool) 9073 v.AuxInt = 1 9074 return true 9075 } 9076 // match: (IsInBounds x x) 9077 // cond: 9078 // result: (ConstBool [0]) 9079 for { 9080 _ = v.Args[1] 9081 x := v.Args[0] 9082 if x != v.Args[1] { 9083 break 9084 } 9085 v.reset(OpConstBool) 9086 v.AuxInt = 0 9087 return true 9088 } 9089 // match: (IsInBounds (And8 (Const8 [c]) _) (Const8 [d])) 9090 // cond: 0 <= c && c < d 9091 // result: (ConstBool [1]) 9092 for { 9093 _ = v.Args[1] 9094 v_0 := v.Args[0] 9095 if v_0.Op != OpAnd8 { 9096 break 9097 } 9098 _ = v_0.Args[1] 9099 v_0_0 := v_0.Args[0] 9100 if v_0_0.Op != OpConst8 { 9101 break 9102 } 9103 c := v_0_0.AuxInt 9104 v_1 := v.Args[1] 9105 if v_1.Op != OpConst8 { 9106 break 9107 } 9108 d := v_1.AuxInt 9109 if !(0 <= c && c < d) { 9110 break 9111 } 9112 v.reset(OpConstBool) 9113 v.AuxInt = 1 9114 return true 9115 } 9116 // match: (IsInBounds (And8 _ (Const8 [c])) (Const8 [d])) 9117 // cond: 0 <= c && c < d 9118 // result: (ConstBool [1]) 9119 for { 9120 _ = v.Args[1] 9121 v_0 := v.Args[0] 9122 if v_0.Op != OpAnd8 { 9123 break 9124 } 9125 _ = v_0.Args[1] 9126 v_0_1 := v_0.Args[1] 9127 if v_0_1.Op != OpConst8 { 9128 break 9129 } 9130 c := v_0_1.AuxInt 9131 v_1 := v.Args[1] 9132 if v_1.Op != OpConst8 { 9133 break 9134 } 9135 d := v_1.AuxInt 9136 if !(0 <= c && c < d) { 9137 break 9138 } 9139 v.reset(OpConstBool) 9140 v.AuxInt = 1 9141 return true 9142 } 9143 // match: (IsInBounds (ZeroExt8to16 (And8 (Const8 [c]) _)) (Const16 [d])) 9144 // cond: 0 <= c && c < d 9145 // result: (ConstBool [1]) 9146 for { 9147 _ = v.Args[1] 9148 v_0 := v.Args[0] 9149 if v_0.Op != OpZeroExt8to16 { 9150 break 9151 } 9152 v_0_0 := v_0.Args[0] 9153 if v_0_0.Op != OpAnd8 { 9154 break 9155 } 9156 _ = v_0_0.Args[1] 9157 v_0_0_0 := v_0_0.Args[0] 9158 if v_0_0_0.Op != OpConst8 { 9159 break 9160 } 9161 c := v_0_0_0.AuxInt 9162 v_1 := v.Args[1] 9163 if v_1.Op != OpConst16 { 9164 break 9165 } 9166 d := v_1.AuxInt 9167 if !(0 <= c && c < d) { 9168 break 9169 } 9170 v.reset(OpConstBool) 9171 v.AuxInt = 1 9172 return true 9173 } 9174 // match: (IsInBounds (ZeroExt8to16 (And8 _ (Const8 [c]))) (Const16 [d])) 9175 // cond: 0 <= c && c < d 9176 // result: (ConstBool [1]) 9177 for { 9178 _ = v.Args[1] 9179 v_0 := v.Args[0] 9180 if v_0.Op != OpZeroExt8to16 { 9181 break 9182 } 9183 v_0_0 := v_0.Args[0] 9184 if v_0_0.Op != OpAnd8 { 9185 break 9186 } 9187 _ = v_0_0.Args[1] 9188 v_0_0_1 := v_0_0.Args[1] 9189 if v_0_0_1.Op != OpConst8 { 9190 break 9191 } 9192 c := v_0_0_1.AuxInt 9193 v_1 := v.Args[1] 9194 if v_1.Op != OpConst16 { 9195 break 9196 } 9197 d := v_1.AuxInt 9198 if !(0 <= c && c < d) { 9199 break 9200 } 9201 v.reset(OpConstBool) 9202 v.AuxInt = 1 9203 return true 9204 } 9205 // match: (IsInBounds (ZeroExt8to32 (And8 (Const8 [c]) _)) (Const32 [d])) 9206 // cond: 0 <= c && c < d 9207 // result: (ConstBool [1]) 9208 for { 9209 _ = v.Args[1] 9210 v_0 := v.Args[0] 9211 if v_0.Op != OpZeroExt8to32 { 9212 break 9213 } 9214 v_0_0 := v_0.Args[0] 9215 if v_0_0.Op != OpAnd8 { 9216 break 9217 } 9218 _ = v_0_0.Args[1] 9219 v_0_0_0 := v_0_0.Args[0] 9220 if v_0_0_0.Op != OpConst8 { 9221 break 9222 } 9223 c := v_0_0_0.AuxInt 9224 v_1 := v.Args[1] 9225 if v_1.Op != OpConst32 { 9226 break 9227 } 9228 d := v_1.AuxInt 9229 if !(0 <= c && c < d) { 9230 break 9231 } 9232 v.reset(OpConstBool) 9233 v.AuxInt = 1 9234 return true 9235 } 9236 return false 9237 } 9238 func rewriteValuegeneric_OpIsInBounds_10(v *Value) bool { 9239 // match: (IsInBounds (ZeroExt8to32 (And8 _ (Const8 [c]))) (Const32 [d])) 9240 // cond: 0 <= c && c < d 9241 // result: (ConstBool [1]) 9242 for { 9243 _ = v.Args[1] 9244 v_0 := v.Args[0] 9245 if v_0.Op != OpZeroExt8to32 { 9246 break 9247 } 9248 v_0_0 := v_0.Args[0] 9249 if v_0_0.Op != OpAnd8 { 9250 break 9251 } 9252 _ = v_0_0.Args[1] 9253 v_0_0_1 := v_0_0.Args[1] 9254 if v_0_0_1.Op != OpConst8 { 9255 break 9256 } 9257 c := v_0_0_1.AuxInt 9258 v_1 := v.Args[1] 9259 if v_1.Op != OpConst32 { 9260 break 9261 } 9262 d := v_1.AuxInt 9263 if !(0 <= c && c < d) { 9264 break 9265 } 9266 v.reset(OpConstBool) 9267 v.AuxInt = 1 9268 return true 9269 } 9270 // match: (IsInBounds (ZeroExt8to64 (And8 (Const8 [c]) _)) (Const64 [d])) 9271 // cond: 0 <= c && c < d 9272 // result: (ConstBool [1]) 9273 for { 9274 _ = v.Args[1] 9275 v_0 := v.Args[0] 9276 if v_0.Op != OpZeroExt8to64 { 9277 break 9278 } 9279 v_0_0 := v_0.Args[0] 9280 if v_0_0.Op != OpAnd8 { 9281 break 9282 } 9283 _ = v_0_0.Args[1] 9284 v_0_0_0 := v_0_0.Args[0] 9285 if v_0_0_0.Op != OpConst8 { 9286 break 9287 } 9288 c := v_0_0_0.AuxInt 9289 v_1 := v.Args[1] 9290 if v_1.Op != OpConst64 { 9291 break 9292 } 9293 d := v_1.AuxInt 9294 if !(0 <= c && c < d) { 9295 break 9296 } 9297 v.reset(OpConstBool) 9298 v.AuxInt = 1 9299 return true 9300 } 9301 // match: (IsInBounds (ZeroExt8to64 (And8 _ (Const8 [c]))) (Const64 [d])) 9302 // cond: 0 <= c && c < d 9303 // result: (ConstBool [1]) 9304 for { 9305 _ = v.Args[1] 9306 v_0 := v.Args[0] 9307 if v_0.Op != OpZeroExt8to64 { 9308 break 9309 } 9310 v_0_0 := v_0.Args[0] 9311 if v_0_0.Op != OpAnd8 { 9312 break 9313 } 9314 _ = v_0_0.Args[1] 9315 v_0_0_1 := v_0_0.Args[1] 9316 if v_0_0_1.Op != OpConst8 { 9317 break 9318 } 9319 c := v_0_0_1.AuxInt 9320 v_1 := v.Args[1] 9321 if v_1.Op != OpConst64 { 9322 break 9323 } 9324 d := v_1.AuxInt 9325 if !(0 <= c && c < d) { 9326 break 9327 } 9328 v.reset(OpConstBool) 9329 v.AuxInt = 1 9330 return true 9331 } 9332 // match: (IsInBounds (And16 (Const16 [c]) _) (Const16 [d])) 9333 // cond: 0 <= c && c < d 9334 // result: (ConstBool [1]) 9335 for { 9336 _ = v.Args[1] 9337 v_0 := v.Args[0] 9338 if v_0.Op != OpAnd16 { 9339 break 9340 } 9341 _ = v_0.Args[1] 9342 v_0_0 := v_0.Args[0] 9343 if v_0_0.Op != OpConst16 { 9344 break 9345 } 9346 c := v_0_0.AuxInt 9347 v_1 := v.Args[1] 9348 if v_1.Op != OpConst16 { 9349 break 9350 } 9351 d := v_1.AuxInt 9352 if !(0 <= c && c < d) { 9353 break 9354 } 9355 v.reset(OpConstBool) 9356 v.AuxInt = 1 9357 return true 9358 } 9359 // match: (IsInBounds (And16 _ (Const16 [c])) (Const16 [d])) 9360 // cond: 0 <= c && c < d 9361 // result: (ConstBool [1]) 9362 for { 9363 _ = v.Args[1] 9364 v_0 := v.Args[0] 9365 if v_0.Op != OpAnd16 { 9366 break 9367 } 9368 _ = v_0.Args[1] 9369 v_0_1 := v_0.Args[1] 9370 if v_0_1.Op != OpConst16 { 9371 break 9372 } 9373 c := v_0_1.AuxInt 9374 v_1 := v.Args[1] 9375 if v_1.Op != OpConst16 { 9376 break 9377 } 9378 d := v_1.AuxInt 9379 if !(0 <= c && c < d) { 9380 break 9381 } 9382 v.reset(OpConstBool) 9383 v.AuxInt = 1 9384 return true 9385 } 9386 // match: (IsInBounds (ZeroExt16to32 (And16 (Const16 [c]) _)) (Const32 [d])) 9387 // cond: 0 <= c && c < d 9388 // result: (ConstBool [1]) 9389 for { 9390 _ = v.Args[1] 9391 v_0 := v.Args[0] 9392 if v_0.Op != OpZeroExt16to32 { 9393 break 9394 } 9395 v_0_0 := v_0.Args[0] 9396 if v_0_0.Op != OpAnd16 { 9397 break 9398 } 9399 _ = v_0_0.Args[1] 9400 v_0_0_0 := v_0_0.Args[0] 9401 if v_0_0_0.Op != OpConst16 { 9402 break 9403 } 9404 c := v_0_0_0.AuxInt 9405 v_1 := v.Args[1] 9406 if v_1.Op != OpConst32 { 9407 break 9408 } 9409 d := v_1.AuxInt 9410 if !(0 <= c && c < d) { 9411 break 9412 } 9413 v.reset(OpConstBool) 9414 v.AuxInt = 1 9415 return true 9416 } 9417 // match: (IsInBounds (ZeroExt16to32 (And16 _ (Const16 [c]))) (Const32 [d])) 9418 // cond: 0 <= c && c < d 9419 // result: (ConstBool [1]) 9420 for { 9421 _ = v.Args[1] 9422 v_0 := v.Args[0] 9423 if v_0.Op != OpZeroExt16to32 { 9424 break 9425 } 9426 v_0_0 := v_0.Args[0] 9427 if v_0_0.Op != OpAnd16 { 9428 break 9429 } 9430 _ = v_0_0.Args[1] 9431 v_0_0_1 := v_0_0.Args[1] 9432 if v_0_0_1.Op != OpConst16 { 9433 break 9434 } 9435 c := v_0_0_1.AuxInt 9436 v_1 := v.Args[1] 9437 if v_1.Op != OpConst32 { 9438 break 9439 } 9440 d := v_1.AuxInt 9441 if !(0 <= c && c < d) { 9442 break 9443 } 9444 v.reset(OpConstBool) 9445 v.AuxInt = 1 9446 return true 9447 } 9448 // match: (IsInBounds (ZeroExt16to64 (And16 (Const16 [c]) _)) (Const64 [d])) 9449 // cond: 0 <= c && c < d 9450 // result: (ConstBool [1]) 9451 for { 9452 _ = v.Args[1] 9453 v_0 := v.Args[0] 9454 if v_0.Op != OpZeroExt16to64 { 9455 break 9456 } 9457 v_0_0 := v_0.Args[0] 9458 if v_0_0.Op != OpAnd16 { 9459 break 9460 } 9461 _ = v_0_0.Args[1] 9462 v_0_0_0 := v_0_0.Args[0] 9463 if v_0_0_0.Op != OpConst16 { 9464 break 9465 } 9466 c := v_0_0_0.AuxInt 9467 v_1 := v.Args[1] 9468 if v_1.Op != OpConst64 { 9469 break 9470 } 9471 d := v_1.AuxInt 9472 if !(0 <= c && c < d) { 9473 break 9474 } 9475 v.reset(OpConstBool) 9476 v.AuxInt = 1 9477 return true 9478 } 9479 // match: (IsInBounds (ZeroExt16to64 (And16 _ (Const16 [c]))) (Const64 [d])) 9480 // cond: 0 <= c && c < d 9481 // result: (ConstBool [1]) 9482 for { 9483 _ = v.Args[1] 9484 v_0 := v.Args[0] 9485 if v_0.Op != OpZeroExt16to64 { 9486 break 9487 } 9488 v_0_0 := v_0.Args[0] 9489 if v_0_0.Op != OpAnd16 { 9490 break 9491 } 9492 _ = v_0_0.Args[1] 9493 v_0_0_1 := v_0_0.Args[1] 9494 if v_0_0_1.Op != OpConst16 { 9495 break 9496 } 9497 c := v_0_0_1.AuxInt 9498 v_1 := v.Args[1] 9499 if v_1.Op != OpConst64 { 9500 break 9501 } 9502 d := v_1.AuxInt 9503 if !(0 <= c && c < d) { 9504 break 9505 } 9506 v.reset(OpConstBool) 9507 v.AuxInt = 1 9508 return true 9509 } 9510 // match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d])) 9511 // cond: 0 <= c && c < d 9512 // result: (ConstBool [1]) 9513 for { 9514 _ = v.Args[1] 9515 v_0 := v.Args[0] 9516 if v_0.Op != OpAnd32 { 9517 break 9518 } 9519 _ = v_0.Args[1] 9520 v_0_0 := v_0.Args[0] 9521 if v_0_0.Op != OpConst32 { 9522 break 9523 } 9524 c := v_0_0.AuxInt 9525 v_1 := v.Args[1] 9526 if v_1.Op != OpConst32 { 9527 break 9528 } 9529 d := v_1.AuxInt 9530 if !(0 <= c && c < d) { 9531 break 9532 } 9533 v.reset(OpConstBool) 9534 v.AuxInt = 1 9535 return true 9536 } 9537 return false 9538 } 9539 func rewriteValuegeneric_OpIsInBounds_20(v *Value) bool { 9540 // match: (IsInBounds (And32 _ (Const32 [c])) (Const32 [d])) 9541 // cond: 0 <= c && c < d 9542 // result: (ConstBool [1]) 9543 for { 9544 _ = v.Args[1] 9545 v_0 := v.Args[0] 9546 if v_0.Op != OpAnd32 { 9547 break 9548 } 9549 _ = v_0.Args[1] 9550 v_0_1 := v_0.Args[1] 9551 if v_0_1.Op != OpConst32 { 9552 break 9553 } 9554 c := v_0_1.AuxInt 9555 v_1 := v.Args[1] 9556 if v_1.Op != OpConst32 { 9557 break 9558 } 9559 d := v_1.AuxInt 9560 if !(0 <= c && c < d) { 9561 break 9562 } 9563 v.reset(OpConstBool) 9564 v.AuxInt = 1 9565 return true 9566 } 9567 // match: (IsInBounds (ZeroExt32to64 (And32 (Const32 [c]) _)) (Const64 [d])) 9568 // cond: 0 <= c && c < d 9569 // result: (ConstBool [1]) 9570 for { 9571 _ = v.Args[1] 9572 v_0 := v.Args[0] 9573 if v_0.Op != OpZeroExt32to64 { 9574 break 9575 } 9576 v_0_0 := v_0.Args[0] 9577 if v_0_0.Op != OpAnd32 { 9578 break 9579 } 9580 _ = v_0_0.Args[1] 9581 v_0_0_0 := v_0_0.Args[0] 9582 if v_0_0_0.Op != OpConst32 { 9583 break 9584 } 9585 c := v_0_0_0.AuxInt 9586 v_1 := v.Args[1] 9587 if v_1.Op != OpConst64 { 9588 break 9589 } 9590 d := v_1.AuxInt 9591 if !(0 <= c && c < d) { 9592 break 9593 } 9594 v.reset(OpConstBool) 9595 v.AuxInt = 1 9596 return true 9597 } 9598 // match: (IsInBounds (ZeroExt32to64 (And32 _ (Const32 [c]))) (Const64 [d])) 9599 // cond: 0 <= c && c < d 9600 // result: (ConstBool [1]) 9601 for { 9602 _ = v.Args[1] 9603 v_0 := v.Args[0] 9604 if v_0.Op != OpZeroExt32to64 { 9605 break 9606 } 9607 v_0_0 := v_0.Args[0] 9608 if v_0_0.Op != OpAnd32 { 9609 break 9610 } 9611 _ = v_0_0.Args[1] 9612 v_0_0_1 := v_0_0.Args[1] 9613 if v_0_0_1.Op != OpConst32 { 9614 break 9615 } 9616 c := v_0_0_1.AuxInt 9617 v_1 := v.Args[1] 9618 if v_1.Op != OpConst64 { 9619 break 9620 } 9621 d := v_1.AuxInt 9622 if !(0 <= c && c < d) { 9623 break 9624 } 9625 v.reset(OpConstBool) 9626 v.AuxInt = 1 9627 return true 9628 } 9629 // match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d])) 9630 // cond: 0 <= c && c < d 9631 // result: (ConstBool [1]) 9632 for { 9633 _ = v.Args[1] 9634 v_0 := v.Args[0] 9635 if v_0.Op != OpAnd64 { 9636 break 9637 } 9638 _ = v_0.Args[1] 9639 v_0_0 := v_0.Args[0] 9640 if v_0_0.Op != OpConst64 { 9641 break 9642 } 9643 c := v_0_0.AuxInt 9644 v_1 := v.Args[1] 9645 if v_1.Op != OpConst64 { 9646 break 9647 } 9648 d := v_1.AuxInt 9649 if !(0 <= c && c < d) { 9650 break 9651 } 9652 v.reset(OpConstBool) 9653 v.AuxInt = 1 9654 return true 9655 } 9656 // match: (IsInBounds (And64 _ (Const64 [c])) (Const64 [d])) 9657 // cond: 0 <= c && c < d 9658 // result: (ConstBool [1]) 9659 for { 9660 _ = v.Args[1] 9661 v_0 := v.Args[0] 9662 if v_0.Op != OpAnd64 { 9663 break 9664 } 9665 _ = v_0.Args[1] 9666 v_0_1 := v_0.Args[1] 9667 if v_0_1.Op != OpConst64 { 9668 break 9669 } 9670 c := v_0_1.AuxInt 9671 v_1 := v.Args[1] 9672 if v_1.Op != OpConst64 { 9673 break 9674 } 9675 d := v_1.AuxInt 9676 if !(0 <= c && c < d) { 9677 break 9678 } 9679 v.reset(OpConstBool) 9680 v.AuxInt = 1 9681 return true 9682 } 9683 // match: (IsInBounds (Const32 [c]) (Const32 [d])) 9684 // cond: 9685 // result: (ConstBool [b2i(0 <= c && c < d)]) 9686 for { 9687 _ = v.Args[1] 9688 v_0 := v.Args[0] 9689 if v_0.Op != OpConst32 { 9690 break 9691 } 9692 c := v_0.AuxInt 9693 v_1 := v.Args[1] 9694 if v_1.Op != OpConst32 { 9695 break 9696 } 9697 d := v_1.AuxInt 9698 v.reset(OpConstBool) 9699 v.AuxInt = b2i(0 <= c && c < d) 9700 return true 9701 } 9702 // match: (IsInBounds (Const64 [c]) (Const64 [d])) 9703 // cond: 9704 // result: (ConstBool [b2i(0 <= c && c < d)]) 9705 for { 9706 _ = v.Args[1] 9707 v_0 := v.Args[0] 9708 if v_0.Op != OpConst64 { 9709 break 9710 } 9711 c := v_0.AuxInt 9712 v_1 := v.Args[1] 9713 if v_1.Op != OpConst64 { 9714 break 9715 } 9716 d := v_1.AuxInt 9717 v.reset(OpConstBool) 9718 v.AuxInt = b2i(0 <= c && c < d) 9719 return true 9720 } 9721 // match: (IsInBounds (Mod32u _ y) y) 9722 // cond: 9723 // result: (ConstBool [1]) 9724 for { 9725 _ = v.Args[1] 9726 v_0 := v.Args[0] 9727 if v_0.Op != OpMod32u { 9728 break 9729 } 9730 _ = v_0.Args[1] 9731 y := v_0.Args[1] 9732 if y != v.Args[1] { 9733 break 9734 } 9735 v.reset(OpConstBool) 9736 v.AuxInt = 1 9737 return true 9738 } 9739 // match: (IsInBounds (Mod64u _ y) y) 9740 // cond: 9741 // result: (ConstBool [1]) 9742 for { 9743 _ = v.Args[1] 9744 v_0 := v.Args[0] 9745 if v_0.Op != OpMod64u { 9746 break 9747 } 9748 _ = v_0.Args[1] 9749 y := v_0.Args[1] 9750 if y != v.Args[1] { 9751 break 9752 } 9753 v.reset(OpConstBool) 9754 v.AuxInt = 1 9755 return true 9756 } 9757 return false 9758 } 9759 func rewriteValuegeneric_OpIsNonNil_0(v *Value) bool { 9760 // match: (IsNonNil (ConstNil)) 9761 // cond: 9762 // result: (ConstBool [0]) 9763 for { 9764 v_0 := v.Args[0] 9765 if v_0.Op != OpConstNil { 9766 break 9767 } 9768 v.reset(OpConstBool) 9769 v.AuxInt = 0 9770 return true 9771 } 9772 return false 9773 } 9774 func rewriteValuegeneric_OpIsSliceInBounds_0(v *Value) bool { 9775 // match: (IsSliceInBounds x x) 9776 // cond: 9777 // result: (ConstBool [1]) 9778 for { 9779 _ = v.Args[1] 9780 x := v.Args[0] 9781 if x != v.Args[1] { 9782 break 9783 } 9784 v.reset(OpConstBool) 9785 v.AuxInt = 1 9786 return true 9787 } 9788 // match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d])) 9789 // cond: 0 <= c && c <= d 9790 // result: (ConstBool [1]) 9791 for { 9792 _ = v.Args[1] 9793 v_0 := v.Args[0] 9794 if v_0.Op != OpAnd32 { 9795 break 9796 } 9797 _ = v_0.Args[1] 9798 v_0_0 := v_0.Args[0] 9799 if v_0_0.Op != OpConst32 { 9800 break 9801 } 9802 c := v_0_0.AuxInt 9803 v_1 := v.Args[1] 9804 if v_1.Op != OpConst32 { 9805 break 9806 } 9807 d := v_1.AuxInt 9808 if !(0 <= c && c <= d) { 9809 break 9810 } 9811 v.reset(OpConstBool) 9812 v.AuxInt = 1 9813 return true 9814 } 9815 // match: (IsSliceInBounds (And32 _ (Const32 [c])) (Const32 [d])) 9816 // cond: 0 <= c && c <= d 9817 // result: (ConstBool [1]) 9818 for { 9819 _ = v.Args[1] 9820 v_0 := v.Args[0] 9821 if v_0.Op != OpAnd32 { 9822 break 9823 } 9824 _ = v_0.Args[1] 9825 v_0_1 := v_0.Args[1] 9826 if v_0_1.Op != OpConst32 { 9827 break 9828 } 9829 c := v_0_1.AuxInt 9830 v_1 := v.Args[1] 9831 if v_1.Op != OpConst32 { 9832 break 9833 } 9834 d := v_1.AuxInt 9835 if !(0 <= c && c <= d) { 9836 break 9837 } 9838 v.reset(OpConstBool) 9839 v.AuxInt = 1 9840 return true 9841 } 9842 // match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d])) 9843 // cond: 0 <= c && c <= d 9844 // result: (ConstBool [1]) 9845 for { 9846 _ = v.Args[1] 9847 v_0 := v.Args[0] 9848 if v_0.Op != OpAnd64 { 9849 break 9850 } 9851 _ = v_0.Args[1] 9852 v_0_0 := v_0.Args[0] 9853 if v_0_0.Op != OpConst64 { 9854 break 9855 } 9856 c := v_0_0.AuxInt 9857 v_1 := v.Args[1] 9858 if v_1.Op != OpConst64 { 9859 break 9860 } 9861 d := v_1.AuxInt 9862 if !(0 <= c && c <= d) { 9863 break 9864 } 9865 v.reset(OpConstBool) 9866 v.AuxInt = 1 9867 return true 9868 } 9869 // match: (IsSliceInBounds (And64 _ (Const64 [c])) (Const64 [d])) 9870 // cond: 0 <= c && c <= d 9871 // result: (ConstBool [1]) 9872 for { 9873 _ = v.Args[1] 9874 v_0 := v.Args[0] 9875 if v_0.Op != OpAnd64 { 9876 break 9877 } 9878 _ = v_0.Args[1] 9879 v_0_1 := v_0.Args[1] 9880 if v_0_1.Op != OpConst64 { 9881 break 9882 } 9883 c := v_0_1.AuxInt 9884 v_1 := v.Args[1] 9885 if v_1.Op != OpConst64 { 9886 break 9887 } 9888 d := v_1.AuxInt 9889 if !(0 <= c && c <= d) { 9890 break 9891 } 9892 v.reset(OpConstBool) 9893 v.AuxInt = 1 9894 return true 9895 } 9896 // match: (IsSliceInBounds (Const32 [0]) _) 9897 // cond: 9898 // result: (ConstBool [1]) 9899 for { 9900 _ = v.Args[1] 9901 v_0 := v.Args[0] 9902 if v_0.Op != OpConst32 { 9903 break 9904 } 9905 if v_0.AuxInt != 0 { 9906 break 9907 } 9908 v.reset(OpConstBool) 9909 v.AuxInt = 1 9910 return true 9911 } 9912 // match: (IsSliceInBounds (Const64 [0]) _) 9913 // cond: 9914 // result: (ConstBool [1]) 9915 for { 9916 _ = v.Args[1] 9917 v_0 := v.Args[0] 9918 if v_0.Op != OpConst64 { 9919 break 9920 } 9921 if v_0.AuxInt != 0 { 9922 break 9923 } 9924 v.reset(OpConstBool) 9925 v.AuxInt = 1 9926 return true 9927 } 9928 // match: (IsSliceInBounds (Const32 [c]) (Const32 [d])) 9929 // cond: 9930 // result: (ConstBool [b2i(0 <= c && c <= d)]) 9931 for { 9932 _ = v.Args[1] 9933 v_0 := v.Args[0] 9934 if v_0.Op != OpConst32 { 9935 break 9936 } 9937 c := v_0.AuxInt 9938 v_1 := v.Args[1] 9939 if v_1.Op != OpConst32 { 9940 break 9941 } 9942 d := v_1.AuxInt 9943 v.reset(OpConstBool) 9944 v.AuxInt = b2i(0 <= c && c <= d) 9945 return true 9946 } 9947 // match: (IsSliceInBounds (Const64 [c]) (Const64 [d])) 9948 // cond: 9949 // result: (ConstBool [b2i(0 <= c && c <= d)]) 9950 for { 9951 _ = v.Args[1] 9952 v_0 := v.Args[0] 9953 if v_0.Op != OpConst64 { 9954 break 9955 } 9956 c := v_0.AuxInt 9957 v_1 := v.Args[1] 9958 if v_1.Op != OpConst64 { 9959 break 9960 } 9961 d := v_1.AuxInt 9962 v.reset(OpConstBool) 9963 v.AuxInt = b2i(0 <= c && c <= d) 9964 return true 9965 } 9966 // match: (IsSliceInBounds (SliceLen x) (SliceCap x)) 9967 // cond: 9968 // result: (ConstBool [1]) 9969 for { 9970 _ = v.Args[1] 9971 v_0 := v.Args[0] 9972 if v_0.Op != OpSliceLen { 9973 break 9974 } 9975 x := v_0.Args[0] 9976 v_1 := v.Args[1] 9977 if v_1.Op != OpSliceCap { 9978 break 9979 } 9980 if x != v_1.Args[0] { 9981 break 9982 } 9983 v.reset(OpConstBool) 9984 v.AuxInt = 1 9985 return true 9986 } 9987 return false 9988 } 9989 func rewriteValuegeneric_OpLeq16_0(v *Value) bool { 9990 // match: (Leq16 (Const16 [c]) (Const16 [d])) 9991 // cond: 9992 // result: (ConstBool [b2i(c <= d)]) 9993 for { 9994 _ = v.Args[1] 9995 v_0 := v.Args[0] 9996 if v_0.Op != OpConst16 { 9997 break 9998 } 9999 c := v_0.AuxInt 10000 v_1 := v.Args[1] 10001 if v_1.Op != OpConst16 { 10002 break 10003 } 10004 d := v_1.AuxInt 10005 v.reset(OpConstBool) 10006 v.AuxInt = b2i(c <= d) 10007 return true 10008 } 10009 return false 10010 } 10011 func rewriteValuegeneric_OpLeq16U_0(v *Value) bool { 10012 // match: (Leq16U (Const16 [c]) (Const16 [d])) 10013 // cond: 10014 // result: (ConstBool [b2i(uint16(c) <= uint16(d))]) 10015 for { 10016 _ = v.Args[1] 10017 v_0 := v.Args[0] 10018 if v_0.Op != OpConst16 { 10019 break 10020 } 10021 c := v_0.AuxInt 10022 v_1 := v.Args[1] 10023 if v_1.Op != OpConst16 { 10024 break 10025 } 10026 d := v_1.AuxInt 10027 v.reset(OpConstBool) 10028 v.AuxInt = b2i(uint16(c) <= uint16(d)) 10029 return true 10030 } 10031 return false 10032 } 10033 func rewriteValuegeneric_OpLeq32_0(v *Value) bool { 10034 // match: (Leq32 (Const32 [c]) (Const32 [d])) 10035 // cond: 10036 // result: (ConstBool [b2i(c <= d)]) 10037 for { 10038 _ = v.Args[1] 10039 v_0 := v.Args[0] 10040 if v_0.Op != OpConst32 { 10041 break 10042 } 10043 c := v_0.AuxInt 10044 v_1 := v.Args[1] 10045 if v_1.Op != OpConst32 { 10046 break 10047 } 10048 d := v_1.AuxInt 10049 v.reset(OpConstBool) 10050 v.AuxInt = b2i(c <= d) 10051 return true 10052 } 10053 return false 10054 } 10055 func rewriteValuegeneric_OpLeq32U_0(v *Value) bool { 10056 // match: (Leq32U (Const32 [c]) (Const32 [d])) 10057 // cond: 10058 // result: (ConstBool [b2i(uint32(c) <= uint32(d))]) 10059 for { 10060 _ = v.Args[1] 10061 v_0 := v.Args[0] 10062 if v_0.Op != OpConst32 { 10063 break 10064 } 10065 c := v_0.AuxInt 10066 v_1 := v.Args[1] 10067 if v_1.Op != OpConst32 { 10068 break 10069 } 10070 d := v_1.AuxInt 10071 v.reset(OpConstBool) 10072 v.AuxInt = b2i(uint32(c) <= uint32(d)) 10073 return true 10074 } 10075 return false 10076 } 10077 func rewriteValuegeneric_OpLeq64_0(v *Value) bool { 10078 // match: (Leq64 (Const64 [c]) (Const64 [d])) 10079 // cond: 10080 // result: (ConstBool [b2i(c <= d)]) 10081 for { 10082 _ = v.Args[1] 10083 v_0 := v.Args[0] 10084 if v_0.Op != OpConst64 { 10085 break 10086 } 10087 c := v_0.AuxInt 10088 v_1 := v.Args[1] 10089 if v_1.Op != OpConst64 { 10090 break 10091 } 10092 d := v_1.AuxInt 10093 v.reset(OpConstBool) 10094 v.AuxInt = b2i(c <= d) 10095 return true 10096 } 10097 return false 10098 } 10099 func rewriteValuegeneric_OpLeq64U_0(v *Value) bool { 10100 // match: (Leq64U (Const64 [c]) (Const64 [d])) 10101 // cond: 10102 // result: (ConstBool [b2i(uint64(c) <= uint64(d))]) 10103 for { 10104 _ = v.Args[1] 10105 v_0 := v.Args[0] 10106 if v_0.Op != OpConst64 { 10107 break 10108 } 10109 c := v_0.AuxInt 10110 v_1 := v.Args[1] 10111 if v_1.Op != OpConst64 { 10112 break 10113 } 10114 d := v_1.AuxInt 10115 v.reset(OpConstBool) 10116 v.AuxInt = b2i(uint64(c) <= uint64(d)) 10117 return true 10118 } 10119 return false 10120 } 10121 func rewriteValuegeneric_OpLeq8_0(v *Value) bool { 10122 // match: (Leq8 (Const8 [c]) (Const8 [d])) 10123 // cond: 10124 // result: (ConstBool [b2i(c <= d)]) 10125 for { 10126 _ = v.Args[1] 10127 v_0 := v.Args[0] 10128 if v_0.Op != OpConst8 { 10129 break 10130 } 10131 c := v_0.AuxInt 10132 v_1 := v.Args[1] 10133 if v_1.Op != OpConst8 { 10134 break 10135 } 10136 d := v_1.AuxInt 10137 v.reset(OpConstBool) 10138 v.AuxInt = b2i(c <= d) 10139 return true 10140 } 10141 return false 10142 } 10143 func rewriteValuegeneric_OpLeq8U_0(v *Value) bool { 10144 // match: (Leq8U (Const8 [c]) (Const8 [d])) 10145 // cond: 10146 // result: (ConstBool [b2i(uint8(c) <= uint8(d))]) 10147 for { 10148 _ = v.Args[1] 10149 v_0 := v.Args[0] 10150 if v_0.Op != OpConst8 { 10151 break 10152 } 10153 c := v_0.AuxInt 10154 v_1 := v.Args[1] 10155 if v_1.Op != OpConst8 { 10156 break 10157 } 10158 d := v_1.AuxInt 10159 v.reset(OpConstBool) 10160 v.AuxInt = b2i(uint8(c) <= uint8(d)) 10161 return true 10162 } 10163 return false 10164 } 10165 func rewriteValuegeneric_OpLess16_0(v *Value) bool { 10166 // match: (Less16 (Const16 [c]) (Const16 [d])) 10167 // cond: 10168 // result: (ConstBool [b2i(c < d)]) 10169 for { 10170 _ = v.Args[1] 10171 v_0 := v.Args[0] 10172 if v_0.Op != OpConst16 { 10173 break 10174 } 10175 c := v_0.AuxInt 10176 v_1 := v.Args[1] 10177 if v_1.Op != OpConst16 { 10178 break 10179 } 10180 d := v_1.AuxInt 10181 v.reset(OpConstBool) 10182 v.AuxInt = b2i(c < d) 10183 return true 10184 } 10185 return false 10186 } 10187 func rewriteValuegeneric_OpLess16U_0(v *Value) bool { 10188 // match: (Less16U (Const16 [c]) (Const16 [d])) 10189 // cond: 10190 // result: (ConstBool [b2i(uint16(c) < uint16(d))]) 10191 for { 10192 _ = v.Args[1] 10193 v_0 := v.Args[0] 10194 if v_0.Op != OpConst16 { 10195 break 10196 } 10197 c := v_0.AuxInt 10198 v_1 := v.Args[1] 10199 if v_1.Op != OpConst16 { 10200 break 10201 } 10202 d := v_1.AuxInt 10203 v.reset(OpConstBool) 10204 v.AuxInt = b2i(uint16(c) < uint16(d)) 10205 return true 10206 } 10207 return false 10208 } 10209 func rewriteValuegeneric_OpLess32_0(v *Value) bool { 10210 // match: (Less32 (Const32 [c]) (Const32 [d])) 10211 // cond: 10212 // result: (ConstBool [b2i(c < d)]) 10213 for { 10214 _ = v.Args[1] 10215 v_0 := v.Args[0] 10216 if v_0.Op != OpConst32 { 10217 break 10218 } 10219 c := v_0.AuxInt 10220 v_1 := v.Args[1] 10221 if v_1.Op != OpConst32 { 10222 break 10223 } 10224 d := v_1.AuxInt 10225 v.reset(OpConstBool) 10226 v.AuxInt = b2i(c < d) 10227 return true 10228 } 10229 return false 10230 } 10231 func rewriteValuegeneric_OpLess32U_0(v *Value) bool { 10232 // match: (Less32U (Const32 [c]) (Const32 [d])) 10233 // cond: 10234 // result: (ConstBool [b2i(uint32(c) < uint32(d))]) 10235 for { 10236 _ = v.Args[1] 10237 v_0 := v.Args[0] 10238 if v_0.Op != OpConst32 { 10239 break 10240 } 10241 c := v_0.AuxInt 10242 v_1 := v.Args[1] 10243 if v_1.Op != OpConst32 { 10244 break 10245 } 10246 d := v_1.AuxInt 10247 v.reset(OpConstBool) 10248 v.AuxInt = b2i(uint32(c) < uint32(d)) 10249 return true 10250 } 10251 return false 10252 } 10253 func rewriteValuegeneric_OpLess64_0(v *Value) bool { 10254 // match: (Less64 (Const64 [c]) (Const64 [d])) 10255 // cond: 10256 // result: (ConstBool [b2i(c < d)]) 10257 for { 10258 _ = v.Args[1] 10259 v_0 := v.Args[0] 10260 if v_0.Op != OpConst64 { 10261 break 10262 } 10263 c := v_0.AuxInt 10264 v_1 := v.Args[1] 10265 if v_1.Op != OpConst64 { 10266 break 10267 } 10268 d := v_1.AuxInt 10269 v.reset(OpConstBool) 10270 v.AuxInt = b2i(c < d) 10271 return true 10272 } 10273 return false 10274 } 10275 func rewriteValuegeneric_OpLess64U_0(v *Value) bool { 10276 // match: (Less64U (Const64 [c]) (Const64 [d])) 10277 // cond: 10278 // result: (ConstBool [b2i(uint64(c) < uint64(d))]) 10279 for { 10280 _ = v.Args[1] 10281 v_0 := v.Args[0] 10282 if v_0.Op != OpConst64 { 10283 break 10284 } 10285 c := v_0.AuxInt 10286 v_1 := v.Args[1] 10287 if v_1.Op != OpConst64 { 10288 break 10289 } 10290 d := v_1.AuxInt 10291 v.reset(OpConstBool) 10292 v.AuxInt = b2i(uint64(c) < uint64(d)) 10293 return true 10294 } 10295 return false 10296 } 10297 func rewriteValuegeneric_OpLess8_0(v *Value) bool { 10298 // match: (Less8 (Const8 [c]) (Const8 [d])) 10299 // cond: 10300 // result: (ConstBool [b2i(c < d)]) 10301 for { 10302 _ = v.Args[1] 10303 v_0 := v.Args[0] 10304 if v_0.Op != OpConst8 { 10305 break 10306 } 10307 c := v_0.AuxInt 10308 v_1 := v.Args[1] 10309 if v_1.Op != OpConst8 { 10310 break 10311 } 10312 d := v_1.AuxInt 10313 v.reset(OpConstBool) 10314 v.AuxInt = b2i(c < d) 10315 return true 10316 } 10317 return false 10318 } 10319 func rewriteValuegeneric_OpLess8U_0(v *Value) bool { 10320 // match: (Less8U (Const8 [c]) (Const8 [d])) 10321 // cond: 10322 // result: (ConstBool [b2i(uint8(c) < uint8(d))]) 10323 for { 10324 _ = v.Args[1] 10325 v_0 := v.Args[0] 10326 if v_0.Op != OpConst8 { 10327 break 10328 } 10329 c := v_0.AuxInt 10330 v_1 := v.Args[1] 10331 if v_1.Op != OpConst8 { 10332 break 10333 } 10334 d := v_1.AuxInt 10335 v.reset(OpConstBool) 10336 v.AuxInt = b2i(uint8(c) < uint8(d)) 10337 return true 10338 } 10339 return false 10340 } 10341 func rewriteValuegeneric_OpLoad_0(v *Value) bool { 10342 b := v.Block 10343 _ = b 10344 fe := b.Func.fe 10345 _ = fe 10346 // match: (Load <t1> p1 (Store {t2} p2 x _)) 10347 // cond: isSamePtr(p1,p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.(*types.Type).Size() 10348 // result: x 10349 for { 10350 t1 := v.Type 10351 _ = v.Args[1] 10352 p1 := v.Args[0] 10353 v_1 := v.Args[1] 10354 if v_1.Op != OpStore { 10355 break 10356 } 10357 t2 := v_1.Aux 10358 _ = v_1.Args[2] 10359 p2 := v_1.Args[0] 10360 x := v_1.Args[1] 10361 if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.(*types.Type).Size()) { 10362 break 10363 } 10364 v.reset(OpCopy) 10365 v.Type = x.Type 10366 v.AddArg(x) 10367 return true 10368 } 10369 // match: (Load <t> _ _) 10370 // cond: t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t) 10371 // result: (StructMake0) 10372 for { 10373 t := v.Type 10374 _ = v.Args[1] 10375 if !(t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)) { 10376 break 10377 } 10378 v.reset(OpStructMake0) 10379 return true 10380 } 10381 // match: (Load <t> ptr mem) 10382 // cond: t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t) 10383 // result: (StructMake1 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem)) 10384 for { 10385 t := v.Type 10386 _ = v.Args[1] 10387 ptr := v.Args[0] 10388 mem := v.Args[1] 10389 if !(t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)) { 10390 break 10391 } 10392 v.reset(OpStructMake1) 10393 v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0)) 10394 v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) 10395 v1.AuxInt = 0 10396 v1.AddArg(ptr) 10397 v0.AddArg(v1) 10398 v0.AddArg(mem) 10399 v.AddArg(v0) 10400 return true 10401 } 10402 // match: (Load <t> ptr mem) 10403 // cond: t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t) 10404 // result: (StructMake2 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)) 10405 for { 10406 t := v.Type 10407 _ = v.Args[1] 10408 ptr := v.Args[0] 10409 mem := v.Args[1] 10410 if !(t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)) { 10411 break 10412 } 10413 v.reset(OpStructMake2) 10414 v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0)) 10415 v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) 10416 v1.AuxInt = 0 10417 v1.AddArg(ptr) 10418 v0.AddArg(v1) 10419 v0.AddArg(mem) 10420 v.AddArg(v0) 10421 v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1)) 10422 v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo()) 10423 v3.AuxInt = t.FieldOff(1) 10424 v3.AddArg(ptr) 10425 v2.AddArg(v3) 10426 v2.AddArg(mem) 10427 v.AddArg(v2) 10428 return true 10429 } 10430 // match: (Load <t> ptr mem) 10431 // cond: t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t) 10432 // result: (StructMake3 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)) 10433 for { 10434 t := v.Type 10435 _ = v.Args[1] 10436 ptr := v.Args[0] 10437 mem := v.Args[1] 10438 if !(t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)) { 10439 break 10440 } 10441 v.reset(OpStructMake3) 10442 v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0)) 10443 v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) 10444 v1.AuxInt = 0 10445 v1.AddArg(ptr) 10446 v0.AddArg(v1) 10447 v0.AddArg(mem) 10448 v.AddArg(v0) 10449 v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1)) 10450 v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo()) 10451 v3.AuxInt = t.FieldOff(1) 10452 v3.AddArg(ptr) 10453 v2.AddArg(v3) 10454 v2.AddArg(mem) 10455 v.AddArg(v2) 10456 v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2)) 10457 v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo()) 10458 v5.AuxInt = t.FieldOff(2) 10459 v5.AddArg(ptr) 10460 v4.AddArg(v5) 10461 v4.AddArg(mem) 10462 v.AddArg(v4) 10463 return true 10464 } 10465 // match: (Load <t> ptr mem) 10466 // cond: t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t) 10467 // result: (StructMake4 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem)) 10468 for { 10469 t := v.Type 10470 _ = v.Args[1] 10471 ptr := v.Args[0] 10472 mem := v.Args[1] 10473 if !(t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)) { 10474 break 10475 } 10476 v.reset(OpStructMake4) 10477 v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0)) 10478 v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) 10479 v1.AuxInt = 0 10480 v1.AddArg(ptr) 10481 v0.AddArg(v1) 10482 v0.AddArg(mem) 10483 v.AddArg(v0) 10484 v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1)) 10485 v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo()) 10486 v3.AuxInt = t.FieldOff(1) 10487 v3.AddArg(ptr) 10488 v2.AddArg(v3) 10489 v2.AddArg(mem) 10490 v.AddArg(v2) 10491 v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2)) 10492 v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo()) 10493 v5.AuxInt = t.FieldOff(2) 10494 v5.AddArg(ptr) 10495 v4.AddArg(v5) 10496 v4.AddArg(mem) 10497 v.AddArg(v4) 10498 v6 := b.NewValue0(v.Pos, OpLoad, t.FieldType(3)) 10499 v7 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo()) 10500 v7.AuxInt = t.FieldOff(3) 10501 v7.AddArg(ptr) 10502 v6.AddArg(v7) 10503 v6.AddArg(mem) 10504 v.AddArg(v6) 10505 return true 10506 } 10507 // match: (Load <t> _ _) 10508 // cond: t.IsArray() && t.NumElem() == 0 10509 // result: (ArrayMake0) 10510 for { 10511 t := v.Type 10512 _ = v.Args[1] 10513 if !(t.IsArray() && t.NumElem() == 0) { 10514 break 10515 } 10516 v.reset(OpArrayMake0) 10517 return true 10518 } 10519 // match: (Load <t> ptr mem) 10520 // cond: t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t) 10521 // result: (ArrayMake1 (Load <t.ElemType()> ptr mem)) 10522 for { 10523 t := v.Type 10524 _ = v.Args[1] 10525 ptr := v.Args[0] 10526 mem := v.Args[1] 10527 if !(t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)) { 10528 break 10529 } 10530 v.reset(OpArrayMake1) 10531 v0 := b.NewValue0(v.Pos, OpLoad, t.ElemType()) 10532 v0.AddArg(ptr) 10533 v0.AddArg(mem) 10534 v.AddArg(v0) 10535 return true 10536 } 10537 return false 10538 } 10539 func rewriteValuegeneric_OpLsh16x16_0(v *Value) bool { 10540 b := v.Block 10541 _ = b 10542 // match: (Lsh16x16 <t> x (Const16 [c])) 10543 // cond: 10544 // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))])) 10545 for { 10546 t := v.Type 10547 _ = v.Args[1] 10548 x := v.Args[0] 10549 v_1 := v.Args[1] 10550 if v_1.Op != OpConst16 { 10551 break 10552 } 10553 c := v_1.AuxInt 10554 v.reset(OpLsh16x64) 10555 v.AddArg(x) 10556 v0 := b.NewValue0(v.Pos, OpConst64, t) 10557 v0.AuxInt = int64(uint16(c)) 10558 v.AddArg(v0) 10559 return true 10560 } 10561 // match: (Lsh16x16 (Const16 [0]) _) 10562 // cond: 10563 // result: (Const16 [0]) 10564 for { 10565 _ = v.Args[1] 10566 v_0 := v.Args[0] 10567 if v_0.Op != OpConst16 { 10568 break 10569 } 10570 if v_0.AuxInt != 0 { 10571 break 10572 } 10573 v.reset(OpConst16) 10574 v.AuxInt = 0 10575 return true 10576 } 10577 return false 10578 } 10579 func rewriteValuegeneric_OpLsh16x32_0(v *Value) bool { 10580 b := v.Block 10581 _ = b 10582 // match: (Lsh16x32 <t> x (Const32 [c])) 10583 // cond: 10584 // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))])) 10585 for { 10586 t := v.Type 10587 _ = v.Args[1] 10588 x := v.Args[0] 10589 v_1 := v.Args[1] 10590 if v_1.Op != OpConst32 { 10591 break 10592 } 10593 c := v_1.AuxInt 10594 v.reset(OpLsh16x64) 10595 v.AddArg(x) 10596 v0 := b.NewValue0(v.Pos, OpConst64, t) 10597 v0.AuxInt = int64(uint32(c)) 10598 v.AddArg(v0) 10599 return true 10600 } 10601 // match: (Lsh16x32 (Const16 [0]) _) 10602 // cond: 10603 // result: (Const16 [0]) 10604 for { 10605 _ = v.Args[1] 10606 v_0 := v.Args[0] 10607 if v_0.Op != OpConst16 { 10608 break 10609 } 10610 if v_0.AuxInt != 0 { 10611 break 10612 } 10613 v.reset(OpConst16) 10614 v.AuxInt = 0 10615 return true 10616 } 10617 return false 10618 } 10619 func rewriteValuegeneric_OpLsh16x64_0(v *Value) bool { 10620 b := v.Block 10621 _ = b 10622 typ := &b.Func.Config.Types 10623 _ = typ 10624 // match: (Lsh16x64 (Const16 [c]) (Const64 [d])) 10625 // cond: 10626 // result: (Const16 [int64(int16(c) << uint64(d))]) 10627 for { 10628 _ = v.Args[1] 10629 v_0 := v.Args[0] 10630 if v_0.Op != OpConst16 { 10631 break 10632 } 10633 c := v_0.AuxInt 10634 v_1 := v.Args[1] 10635 if v_1.Op != OpConst64 { 10636 break 10637 } 10638 d := v_1.AuxInt 10639 v.reset(OpConst16) 10640 v.AuxInt = int64(int16(c) << uint64(d)) 10641 return true 10642 } 10643 // match: (Lsh16x64 x (Const64 [0])) 10644 // cond: 10645 // result: x 10646 for { 10647 _ = v.Args[1] 10648 x := v.Args[0] 10649 v_1 := v.Args[1] 10650 if v_1.Op != OpConst64 { 10651 break 10652 } 10653 if v_1.AuxInt != 0 { 10654 break 10655 } 10656 v.reset(OpCopy) 10657 v.Type = x.Type 10658 v.AddArg(x) 10659 return true 10660 } 10661 // match: (Lsh16x64 (Const16 [0]) _) 10662 // cond: 10663 // result: (Const16 [0]) 10664 for { 10665 _ = v.Args[1] 10666 v_0 := v.Args[0] 10667 if v_0.Op != OpConst16 { 10668 break 10669 } 10670 if v_0.AuxInt != 0 { 10671 break 10672 } 10673 v.reset(OpConst16) 10674 v.AuxInt = 0 10675 return true 10676 } 10677 // match: (Lsh16x64 _ (Const64 [c])) 10678 // cond: uint64(c) >= 16 10679 // result: (Const16 [0]) 10680 for { 10681 _ = v.Args[1] 10682 v_1 := v.Args[1] 10683 if v_1.Op != OpConst64 { 10684 break 10685 } 10686 c := v_1.AuxInt 10687 if !(uint64(c) >= 16) { 10688 break 10689 } 10690 v.reset(OpConst16) 10691 v.AuxInt = 0 10692 return true 10693 } 10694 // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) 10695 // cond: !uaddOvf(c,d) 10696 // result: (Lsh16x64 x (Const64 <t> [c+d])) 10697 for { 10698 t := v.Type 10699 _ = v.Args[1] 10700 v_0 := v.Args[0] 10701 if v_0.Op != OpLsh16x64 { 10702 break 10703 } 10704 _ = v_0.Args[1] 10705 x := v_0.Args[0] 10706 v_0_1 := v_0.Args[1] 10707 if v_0_1.Op != OpConst64 { 10708 break 10709 } 10710 c := v_0_1.AuxInt 10711 v_1 := v.Args[1] 10712 if v_1.Op != OpConst64 { 10713 break 10714 } 10715 d := v_1.AuxInt 10716 if !(!uaddOvf(c, d)) { 10717 break 10718 } 10719 v.reset(OpLsh16x64) 10720 v.AddArg(x) 10721 v0 := b.NewValue0(v.Pos, OpConst64, t) 10722 v0.AuxInt = c + d 10723 v.AddArg(v0) 10724 return true 10725 } 10726 // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 10727 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 10728 // result: (Lsh16x64 x (Const64 <typ.UInt64> [c1-c2+c3])) 10729 for { 10730 _ = v.Args[1] 10731 v_0 := v.Args[0] 10732 if v_0.Op != OpRsh16Ux64 { 10733 break 10734 } 10735 _ = v_0.Args[1] 10736 v_0_0 := v_0.Args[0] 10737 if v_0_0.Op != OpLsh16x64 { 10738 break 10739 } 10740 _ = v_0_0.Args[1] 10741 x := v_0_0.Args[0] 10742 v_0_0_1 := v_0_0.Args[1] 10743 if v_0_0_1.Op != OpConst64 { 10744 break 10745 } 10746 c1 := v_0_0_1.AuxInt 10747 v_0_1 := v_0.Args[1] 10748 if v_0_1.Op != OpConst64 { 10749 break 10750 } 10751 c2 := v_0_1.AuxInt 10752 v_1 := v.Args[1] 10753 if v_1.Op != OpConst64 { 10754 break 10755 } 10756 c3 := v_1.AuxInt 10757 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 10758 break 10759 } 10760 v.reset(OpLsh16x64) 10761 v.AddArg(x) 10762 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 10763 v0.AuxInt = c1 - c2 + c3 10764 v.AddArg(v0) 10765 return true 10766 } 10767 return false 10768 } 10769 func rewriteValuegeneric_OpLsh16x8_0(v *Value) bool { 10770 b := v.Block 10771 _ = b 10772 // match: (Lsh16x8 <t> x (Const8 [c])) 10773 // cond: 10774 // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))])) 10775 for { 10776 t := v.Type 10777 _ = v.Args[1] 10778 x := v.Args[0] 10779 v_1 := v.Args[1] 10780 if v_1.Op != OpConst8 { 10781 break 10782 } 10783 c := v_1.AuxInt 10784 v.reset(OpLsh16x64) 10785 v.AddArg(x) 10786 v0 := b.NewValue0(v.Pos, OpConst64, t) 10787 v0.AuxInt = int64(uint8(c)) 10788 v.AddArg(v0) 10789 return true 10790 } 10791 // match: (Lsh16x8 (Const16 [0]) _) 10792 // cond: 10793 // result: (Const16 [0]) 10794 for { 10795 _ = v.Args[1] 10796 v_0 := v.Args[0] 10797 if v_0.Op != OpConst16 { 10798 break 10799 } 10800 if v_0.AuxInt != 0 { 10801 break 10802 } 10803 v.reset(OpConst16) 10804 v.AuxInt = 0 10805 return true 10806 } 10807 return false 10808 } 10809 func rewriteValuegeneric_OpLsh32x16_0(v *Value) bool { 10810 b := v.Block 10811 _ = b 10812 // match: (Lsh32x16 <t> x (Const16 [c])) 10813 // cond: 10814 // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))])) 10815 for { 10816 t := v.Type 10817 _ = v.Args[1] 10818 x := v.Args[0] 10819 v_1 := v.Args[1] 10820 if v_1.Op != OpConst16 { 10821 break 10822 } 10823 c := v_1.AuxInt 10824 v.reset(OpLsh32x64) 10825 v.AddArg(x) 10826 v0 := b.NewValue0(v.Pos, OpConst64, t) 10827 v0.AuxInt = int64(uint16(c)) 10828 v.AddArg(v0) 10829 return true 10830 } 10831 // match: (Lsh32x16 (Const32 [0]) _) 10832 // cond: 10833 // result: (Const32 [0]) 10834 for { 10835 _ = v.Args[1] 10836 v_0 := v.Args[0] 10837 if v_0.Op != OpConst32 { 10838 break 10839 } 10840 if v_0.AuxInt != 0 { 10841 break 10842 } 10843 v.reset(OpConst32) 10844 v.AuxInt = 0 10845 return true 10846 } 10847 return false 10848 } 10849 func rewriteValuegeneric_OpLsh32x32_0(v *Value) bool { 10850 b := v.Block 10851 _ = b 10852 // match: (Lsh32x32 <t> x (Const32 [c])) 10853 // cond: 10854 // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))])) 10855 for { 10856 t := v.Type 10857 _ = v.Args[1] 10858 x := v.Args[0] 10859 v_1 := v.Args[1] 10860 if v_1.Op != OpConst32 { 10861 break 10862 } 10863 c := v_1.AuxInt 10864 v.reset(OpLsh32x64) 10865 v.AddArg(x) 10866 v0 := b.NewValue0(v.Pos, OpConst64, t) 10867 v0.AuxInt = int64(uint32(c)) 10868 v.AddArg(v0) 10869 return true 10870 } 10871 // match: (Lsh32x32 (Const32 [0]) _) 10872 // cond: 10873 // result: (Const32 [0]) 10874 for { 10875 _ = v.Args[1] 10876 v_0 := v.Args[0] 10877 if v_0.Op != OpConst32 { 10878 break 10879 } 10880 if v_0.AuxInt != 0 { 10881 break 10882 } 10883 v.reset(OpConst32) 10884 v.AuxInt = 0 10885 return true 10886 } 10887 return false 10888 } 10889 func rewriteValuegeneric_OpLsh32x64_0(v *Value) bool { 10890 b := v.Block 10891 _ = b 10892 typ := &b.Func.Config.Types 10893 _ = typ 10894 // match: (Lsh32x64 (Const32 [c]) (Const64 [d])) 10895 // cond: 10896 // result: (Const32 [int64(int32(c) << uint64(d))]) 10897 for { 10898 _ = v.Args[1] 10899 v_0 := v.Args[0] 10900 if v_0.Op != OpConst32 { 10901 break 10902 } 10903 c := v_0.AuxInt 10904 v_1 := v.Args[1] 10905 if v_1.Op != OpConst64 { 10906 break 10907 } 10908 d := v_1.AuxInt 10909 v.reset(OpConst32) 10910 v.AuxInt = int64(int32(c) << uint64(d)) 10911 return true 10912 } 10913 // match: (Lsh32x64 x (Const64 [0])) 10914 // cond: 10915 // result: x 10916 for { 10917 _ = v.Args[1] 10918 x := v.Args[0] 10919 v_1 := v.Args[1] 10920 if v_1.Op != OpConst64 { 10921 break 10922 } 10923 if v_1.AuxInt != 0 { 10924 break 10925 } 10926 v.reset(OpCopy) 10927 v.Type = x.Type 10928 v.AddArg(x) 10929 return true 10930 } 10931 // match: (Lsh32x64 (Const32 [0]) _) 10932 // cond: 10933 // result: (Const32 [0]) 10934 for { 10935 _ = v.Args[1] 10936 v_0 := v.Args[0] 10937 if v_0.Op != OpConst32 { 10938 break 10939 } 10940 if v_0.AuxInt != 0 { 10941 break 10942 } 10943 v.reset(OpConst32) 10944 v.AuxInt = 0 10945 return true 10946 } 10947 // match: (Lsh32x64 _ (Const64 [c])) 10948 // cond: uint64(c) >= 32 10949 // result: (Const32 [0]) 10950 for { 10951 _ = v.Args[1] 10952 v_1 := v.Args[1] 10953 if v_1.Op != OpConst64 { 10954 break 10955 } 10956 c := v_1.AuxInt 10957 if !(uint64(c) >= 32) { 10958 break 10959 } 10960 v.reset(OpConst32) 10961 v.AuxInt = 0 10962 return true 10963 } 10964 // match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d])) 10965 // cond: !uaddOvf(c,d) 10966 // result: (Lsh32x64 x (Const64 <t> [c+d])) 10967 for { 10968 t := v.Type 10969 _ = v.Args[1] 10970 v_0 := v.Args[0] 10971 if v_0.Op != OpLsh32x64 { 10972 break 10973 } 10974 _ = v_0.Args[1] 10975 x := v_0.Args[0] 10976 v_0_1 := v_0.Args[1] 10977 if v_0_1.Op != OpConst64 { 10978 break 10979 } 10980 c := v_0_1.AuxInt 10981 v_1 := v.Args[1] 10982 if v_1.Op != OpConst64 { 10983 break 10984 } 10985 d := v_1.AuxInt 10986 if !(!uaddOvf(c, d)) { 10987 break 10988 } 10989 v.reset(OpLsh32x64) 10990 v.AddArg(x) 10991 v0 := b.NewValue0(v.Pos, OpConst64, t) 10992 v0.AuxInt = c + d 10993 v.AddArg(v0) 10994 return true 10995 } 10996 // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 10997 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 10998 // result: (Lsh32x64 x (Const64 <typ.UInt64> [c1-c2+c3])) 10999 for { 11000 _ = v.Args[1] 11001 v_0 := v.Args[0] 11002 if v_0.Op != OpRsh32Ux64 { 11003 break 11004 } 11005 _ = v_0.Args[1] 11006 v_0_0 := v_0.Args[0] 11007 if v_0_0.Op != OpLsh32x64 { 11008 break 11009 } 11010 _ = v_0_0.Args[1] 11011 x := v_0_0.Args[0] 11012 v_0_0_1 := v_0_0.Args[1] 11013 if v_0_0_1.Op != OpConst64 { 11014 break 11015 } 11016 c1 := v_0_0_1.AuxInt 11017 v_0_1 := v_0.Args[1] 11018 if v_0_1.Op != OpConst64 { 11019 break 11020 } 11021 c2 := v_0_1.AuxInt 11022 v_1 := v.Args[1] 11023 if v_1.Op != OpConst64 { 11024 break 11025 } 11026 c3 := v_1.AuxInt 11027 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 11028 break 11029 } 11030 v.reset(OpLsh32x64) 11031 v.AddArg(x) 11032 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 11033 v0.AuxInt = c1 - c2 + c3 11034 v.AddArg(v0) 11035 return true 11036 } 11037 return false 11038 } 11039 func rewriteValuegeneric_OpLsh32x8_0(v *Value) bool { 11040 b := v.Block 11041 _ = b 11042 // match: (Lsh32x8 <t> x (Const8 [c])) 11043 // cond: 11044 // result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))])) 11045 for { 11046 t := v.Type 11047 _ = v.Args[1] 11048 x := v.Args[0] 11049 v_1 := v.Args[1] 11050 if v_1.Op != OpConst8 { 11051 break 11052 } 11053 c := v_1.AuxInt 11054 v.reset(OpLsh32x64) 11055 v.AddArg(x) 11056 v0 := b.NewValue0(v.Pos, OpConst64, t) 11057 v0.AuxInt = int64(uint8(c)) 11058 v.AddArg(v0) 11059 return true 11060 } 11061 // match: (Lsh32x8 (Const32 [0]) _) 11062 // cond: 11063 // result: (Const32 [0]) 11064 for { 11065 _ = v.Args[1] 11066 v_0 := v.Args[0] 11067 if v_0.Op != OpConst32 { 11068 break 11069 } 11070 if v_0.AuxInt != 0 { 11071 break 11072 } 11073 v.reset(OpConst32) 11074 v.AuxInt = 0 11075 return true 11076 } 11077 return false 11078 } 11079 func rewriteValuegeneric_OpLsh64x16_0(v *Value) bool { 11080 b := v.Block 11081 _ = b 11082 // match: (Lsh64x16 <t> x (Const16 [c])) 11083 // cond: 11084 // result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))])) 11085 for { 11086 t := v.Type 11087 _ = v.Args[1] 11088 x := v.Args[0] 11089 v_1 := v.Args[1] 11090 if v_1.Op != OpConst16 { 11091 break 11092 } 11093 c := v_1.AuxInt 11094 v.reset(OpLsh64x64) 11095 v.AddArg(x) 11096 v0 := b.NewValue0(v.Pos, OpConst64, t) 11097 v0.AuxInt = int64(uint16(c)) 11098 v.AddArg(v0) 11099 return true 11100 } 11101 // match: (Lsh64x16 (Const64 [0]) _) 11102 // cond: 11103 // result: (Const64 [0]) 11104 for { 11105 _ = v.Args[1] 11106 v_0 := v.Args[0] 11107 if v_0.Op != OpConst64 { 11108 break 11109 } 11110 if v_0.AuxInt != 0 { 11111 break 11112 } 11113 v.reset(OpConst64) 11114 v.AuxInt = 0 11115 return true 11116 } 11117 return false 11118 } 11119 func rewriteValuegeneric_OpLsh64x32_0(v *Value) bool { 11120 b := v.Block 11121 _ = b 11122 // match: (Lsh64x32 <t> x (Const32 [c])) 11123 // cond: 11124 // result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))])) 11125 for { 11126 t := v.Type 11127 _ = v.Args[1] 11128 x := v.Args[0] 11129 v_1 := v.Args[1] 11130 if v_1.Op != OpConst32 { 11131 break 11132 } 11133 c := v_1.AuxInt 11134 v.reset(OpLsh64x64) 11135 v.AddArg(x) 11136 v0 := b.NewValue0(v.Pos, OpConst64, t) 11137 v0.AuxInt = int64(uint32(c)) 11138 v.AddArg(v0) 11139 return true 11140 } 11141 // match: (Lsh64x32 (Const64 [0]) _) 11142 // cond: 11143 // result: (Const64 [0]) 11144 for { 11145 _ = v.Args[1] 11146 v_0 := v.Args[0] 11147 if v_0.Op != OpConst64 { 11148 break 11149 } 11150 if v_0.AuxInt != 0 { 11151 break 11152 } 11153 v.reset(OpConst64) 11154 v.AuxInt = 0 11155 return true 11156 } 11157 return false 11158 } 11159 func rewriteValuegeneric_OpLsh64x64_0(v *Value) bool { 11160 b := v.Block 11161 _ = b 11162 typ := &b.Func.Config.Types 11163 _ = typ 11164 // match: (Lsh64x64 (Const64 [c]) (Const64 [d])) 11165 // cond: 11166 // result: (Const64 [c << uint64(d)]) 11167 for { 11168 _ = v.Args[1] 11169 v_0 := v.Args[0] 11170 if v_0.Op != OpConst64 { 11171 break 11172 } 11173 c := v_0.AuxInt 11174 v_1 := v.Args[1] 11175 if v_1.Op != OpConst64 { 11176 break 11177 } 11178 d := v_1.AuxInt 11179 v.reset(OpConst64) 11180 v.AuxInt = c << uint64(d) 11181 return true 11182 } 11183 // match: (Lsh64x64 x (Const64 [0])) 11184 // cond: 11185 // result: x 11186 for { 11187 _ = v.Args[1] 11188 x := v.Args[0] 11189 v_1 := v.Args[1] 11190 if v_1.Op != OpConst64 { 11191 break 11192 } 11193 if v_1.AuxInt != 0 { 11194 break 11195 } 11196 v.reset(OpCopy) 11197 v.Type = x.Type 11198 v.AddArg(x) 11199 return true 11200 } 11201 // match: (Lsh64x64 (Const64 [0]) _) 11202 // cond: 11203 // result: (Const64 [0]) 11204 for { 11205 _ = v.Args[1] 11206 v_0 := v.Args[0] 11207 if v_0.Op != OpConst64 { 11208 break 11209 } 11210 if v_0.AuxInt != 0 { 11211 break 11212 } 11213 v.reset(OpConst64) 11214 v.AuxInt = 0 11215 return true 11216 } 11217 // match: (Lsh64x64 _ (Const64 [c])) 11218 // cond: uint64(c) >= 64 11219 // result: (Const64 [0]) 11220 for { 11221 _ = v.Args[1] 11222 v_1 := v.Args[1] 11223 if v_1.Op != OpConst64 { 11224 break 11225 } 11226 c := v_1.AuxInt 11227 if !(uint64(c) >= 64) { 11228 break 11229 } 11230 v.reset(OpConst64) 11231 v.AuxInt = 0 11232 return true 11233 } 11234 // match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d])) 11235 // cond: !uaddOvf(c,d) 11236 // result: (Lsh64x64 x (Const64 <t> [c+d])) 11237 for { 11238 t := v.Type 11239 _ = v.Args[1] 11240 v_0 := v.Args[0] 11241 if v_0.Op != OpLsh64x64 { 11242 break 11243 } 11244 _ = v_0.Args[1] 11245 x := v_0.Args[0] 11246 v_0_1 := v_0.Args[1] 11247 if v_0_1.Op != OpConst64 { 11248 break 11249 } 11250 c := v_0_1.AuxInt 11251 v_1 := v.Args[1] 11252 if v_1.Op != OpConst64 { 11253 break 11254 } 11255 d := v_1.AuxInt 11256 if !(!uaddOvf(c, d)) { 11257 break 11258 } 11259 v.reset(OpLsh64x64) 11260 v.AddArg(x) 11261 v0 := b.NewValue0(v.Pos, OpConst64, t) 11262 v0.AuxInt = c + d 11263 v.AddArg(v0) 11264 return true 11265 } 11266 // match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 11267 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 11268 // result: (Lsh64x64 x (Const64 <typ.UInt64> [c1-c2+c3])) 11269 for { 11270 _ = v.Args[1] 11271 v_0 := v.Args[0] 11272 if v_0.Op != OpRsh64Ux64 { 11273 break 11274 } 11275 _ = v_0.Args[1] 11276 v_0_0 := v_0.Args[0] 11277 if v_0_0.Op != OpLsh64x64 { 11278 break 11279 } 11280 _ = v_0_0.Args[1] 11281 x := v_0_0.Args[0] 11282 v_0_0_1 := v_0_0.Args[1] 11283 if v_0_0_1.Op != OpConst64 { 11284 break 11285 } 11286 c1 := v_0_0_1.AuxInt 11287 v_0_1 := v_0.Args[1] 11288 if v_0_1.Op != OpConst64 { 11289 break 11290 } 11291 c2 := v_0_1.AuxInt 11292 v_1 := v.Args[1] 11293 if v_1.Op != OpConst64 { 11294 break 11295 } 11296 c3 := v_1.AuxInt 11297 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 11298 break 11299 } 11300 v.reset(OpLsh64x64) 11301 v.AddArg(x) 11302 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 11303 v0.AuxInt = c1 - c2 + c3 11304 v.AddArg(v0) 11305 return true 11306 } 11307 return false 11308 } 11309 func rewriteValuegeneric_OpLsh64x8_0(v *Value) bool { 11310 b := v.Block 11311 _ = b 11312 // match: (Lsh64x8 <t> x (Const8 [c])) 11313 // cond: 11314 // result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))])) 11315 for { 11316 t := v.Type 11317 _ = v.Args[1] 11318 x := v.Args[0] 11319 v_1 := v.Args[1] 11320 if v_1.Op != OpConst8 { 11321 break 11322 } 11323 c := v_1.AuxInt 11324 v.reset(OpLsh64x64) 11325 v.AddArg(x) 11326 v0 := b.NewValue0(v.Pos, OpConst64, t) 11327 v0.AuxInt = int64(uint8(c)) 11328 v.AddArg(v0) 11329 return true 11330 } 11331 // match: (Lsh64x8 (Const64 [0]) _) 11332 // cond: 11333 // result: (Const64 [0]) 11334 for { 11335 _ = v.Args[1] 11336 v_0 := v.Args[0] 11337 if v_0.Op != OpConst64 { 11338 break 11339 } 11340 if v_0.AuxInt != 0 { 11341 break 11342 } 11343 v.reset(OpConst64) 11344 v.AuxInt = 0 11345 return true 11346 } 11347 return false 11348 } 11349 func rewriteValuegeneric_OpLsh8x16_0(v *Value) bool { 11350 b := v.Block 11351 _ = b 11352 // match: (Lsh8x16 <t> x (Const16 [c])) 11353 // cond: 11354 // result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))])) 11355 for { 11356 t := v.Type 11357 _ = v.Args[1] 11358 x := v.Args[0] 11359 v_1 := v.Args[1] 11360 if v_1.Op != OpConst16 { 11361 break 11362 } 11363 c := v_1.AuxInt 11364 v.reset(OpLsh8x64) 11365 v.AddArg(x) 11366 v0 := b.NewValue0(v.Pos, OpConst64, t) 11367 v0.AuxInt = int64(uint16(c)) 11368 v.AddArg(v0) 11369 return true 11370 } 11371 // match: (Lsh8x16 (Const8 [0]) _) 11372 // cond: 11373 // result: (Const8 [0]) 11374 for { 11375 _ = v.Args[1] 11376 v_0 := v.Args[0] 11377 if v_0.Op != OpConst8 { 11378 break 11379 } 11380 if v_0.AuxInt != 0 { 11381 break 11382 } 11383 v.reset(OpConst8) 11384 v.AuxInt = 0 11385 return true 11386 } 11387 return false 11388 } 11389 func rewriteValuegeneric_OpLsh8x32_0(v *Value) bool { 11390 b := v.Block 11391 _ = b 11392 // match: (Lsh8x32 <t> x (Const32 [c])) 11393 // cond: 11394 // result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))])) 11395 for { 11396 t := v.Type 11397 _ = v.Args[1] 11398 x := v.Args[0] 11399 v_1 := v.Args[1] 11400 if v_1.Op != OpConst32 { 11401 break 11402 } 11403 c := v_1.AuxInt 11404 v.reset(OpLsh8x64) 11405 v.AddArg(x) 11406 v0 := b.NewValue0(v.Pos, OpConst64, t) 11407 v0.AuxInt = int64(uint32(c)) 11408 v.AddArg(v0) 11409 return true 11410 } 11411 // match: (Lsh8x32 (Const8 [0]) _) 11412 // cond: 11413 // result: (Const8 [0]) 11414 for { 11415 _ = v.Args[1] 11416 v_0 := v.Args[0] 11417 if v_0.Op != OpConst8 { 11418 break 11419 } 11420 if v_0.AuxInt != 0 { 11421 break 11422 } 11423 v.reset(OpConst8) 11424 v.AuxInt = 0 11425 return true 11426 } 11427 return false 11428 } 11429 func rewriteValuegeneric_OpLsh8x64_0(v *Value) bool { 11430 b := v.Block 11431 _ = b 11432 typ := &b.Func.Config.Types 11433 _ = typ 11434 // match: (Lsh8x64 (Const8 [c]) (Const64 [d])) 11435 // cond: 11436 // result: (Const8 [int64(int8(c) << uint64(d))]) 11437 for { 11438 _ = v.Args[1] 11439 v_0 := v.Args[0] 11440 if v_0.Op != OpConst8 { 11441 break 11442 } 11443 c := v_0.AuxInt 11444 v_1 := v.Args[1] 11445 if v_1.Op != OpConst64 { 11446 break 11447 } 11448 d := v_1.AuxInt 11449 v.reset(OpConst8) 11450 v.AuxInt = int64(int8(c) << uint64(d)) 11451 return true 11452 } 11453 // match: (Lsh8x64 x (Const64 [0])) 11454 // cond: 11455 // result: x 11456 for { 11457 _ = v.Args[1] 11458 x := v.Args[0] 11459 v_1 := v.Args[1] 11460 if v_1.Op != OpConst64 { 11461 break 11462 } 11463 if v_1.AuxInt != 0 { 11464 break 11465 } 11466 v.reset(OpCopy) 11467 v.Type = x.Type 11468 v.AddArg(x) 11469 return true 11470 } 11471 // match: (Lsh8x64 (Const8 [0]) _) 11472 // cond: 11473 // result: (Const8 [0]) 11474 for { 11475 _ = v.Args[1] 11476 v_0 := v.Args[0] 11477 if v_0.Op != OpConst8 { 11478 break 11479 } 11480 if v_0.AuxInt != 0 { 11481 break 11482 } 11483 v.reset(OpConst8) 11484 v.AuxInt = 0 11485 return true 11486 } 11487 // match: (Lsh8x64 _ (Const64 [c])) 11488 // cond: uint64(c) >= 8 11489 // result: (Const8 [0]) 11490 for { 11491 _ = v.Args[1] 11492 v_1 := v.Args[1] 11493 if v_1.Op != OpConst64 { 11494 break 11495 } 11496 c := v_1.AuxInt 11497 if !(uint64(c) >= 8) { 11498 break 11499 } 11500 v.reset(OpConst8) 11501 v.AuxInt = 0 11502 return true 11503 } 11504 // match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d])) 11505 // cond: !uaddOvf(c,d) 11506 // result: (Lsh8x64 x (Const64 <t> [c+d])) 11507 for { 11508 t := v.Type 11509 _ = v.Args[1] 11510 v_0 := v.Args[0] 11511 if v_0.Op != OpLsh8x64 { 11512 break 11513 } 11514 _ = v_0.Args[1] 11515 x := v_0.Args[0] 11516 v_0_1 := v_0.Args[1] 11517 if v_0_1.Op != OpConst64 { 11518 break 11519 } 11520 c := v_0_1.AuxInt 11521 v_1 := v.Args[1] 11522 if v_1.Op != OpConst64 { 11523 break 11524 } 11525 d := v_1.AuxInt 11526 if !(!uaddOvf(c, d)) { 11527 break 11528 } 11529 v.reset(OpLsh8x64) 11530 v.AddArg(x) 11531 v0 := b.NewValue0(v.Pos, OpConst64, t) 11532 v0.AuxInt = c + d 11533 v.AddArg(v0) 11534 return true 11535 } 11536 // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 11537 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 11538 // result: (Lsh8x64 x (Const64 <typ.UInt64> [c1-c2+c3])) 11539 for { 11540 _ = v.Args[1] 11541 v_0 := v.Args[0] 11542 if v_0.Op != OpRsh8Ux64 { 11543 break 11544 } 11545 _ = v_0.Args[1] 11546 v_0_0 := v_0.Args[0] 11547 if v_0_0.Op != OpLsh8x64 { 11548 break 11549 } 11550 _ = v_0_0.Args[1] 11551 x := v_0_0.Args[0] 11552 v_0_0_1 := v_0_0.Args[1] 11553 if v_0_0_1.Op != OpConst64 { 11554 break 11555 } 11556 c1 := v_0_0_1.AuxInt 11557 v_0_1 := v_0.Args[1] 11558 if v_0_1.Op != OpConst64 { 11559 break 11560 } 11561 c2 := v_0_1.AuxInt 11562 v_1 := v.Args[1] 11563 if v_1.Op != OpConst64 { 11564 break 11565 } 11566 c3 := v_1.AuxInt 11567 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 11568 break 11569 } 11570 v.reset(OpLsh8x64) 11571 v.AddArg(x) 11572 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 11573 v0.AuxInt = c1 - c2 + c3 11574 v.AddArg(v0) 11575 return true 11576 } 11577 return false 11578 } 11579 func rewriteValuegeneric_OpLsh8x8_0(v *Value) bool { 11580 b := v.Block 11581 _ = b 11582 // match: (Lsh8x8 <t> x (Const8 [c])) 11583 // cond: 11584 // result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))])) 11585 for { 11586 t := v.Type 11587 _ = v.Args[1] 11588 x := v.Args[0] 11589 v_1 := v.Args[1] 11590 if v_1.Op != OpConst8 { 11591 break 11592 } 11593 c := v_1.AuxInt 11594 v.reset(OpLsh8x64) 11595 v.AddArg(x) 11596 v0 := b.NewValue0(v.Pos, OpConst64, t) 11597 v0.AuxInt = int64(uint8(c)) 11598 v.AddArg(v0) 11599 return true 11600 } 11601 // match: (Lsh8x8 (Const8 [0]) _) 11602 // cond: 11603 // result: (Const8 [0]) 11604 for { 11605 _ = v.Args[1] 11606 v_0 := v.Args[0] 11607 if v_0.Op != OpConst8 { 11608 break 11609 } 11610 if v_0.AuxInt != 0 { 11611 break 11612 } 11613 v.reset(OpConst8) 11614 v.AuxInt = 0 11615 return true 11616 } 11617 return false 11618 } 11619 func rewriteValuegeneric_OpMod16_0(v *Value) bool { 11620 b := v.Block 11621 _ = b 11622 // match: (Mod16 (Const16 [c]) (Const16 [d])) 11623 // cond: d != 0 11624 // result: (Const16 [int64(int16(c % d))]) 11625 for { 11626 _ = v.Args[1] 11627 v_0 := v.Args[0] 11628 if v_0.Op != OpConst16 { 11629 break 11630 } 11631 c := v_0.AuxInt 11632 v_1 := v.Args[1] 11633 if v_1.Op != OpConst16 { 11634 break 11635 } 11636 d := v_1.AuxInt 11637 if !(d != 0) { 11638 break 11639 } 11640 v.reset(OpConst16) 11641 v.AuxInt = int64(int16(c % d)) 11642 return true 11643 } 11644 // match: (Mod16 <t> n (Const16 [c])) 11645 // cond: c < 0 && c != -1<<15 11646 // result: (Mod16 <t> n (Const16 <t> [-c])) 11647 for { 11648 t := v.Type 11649 _ = v.Args[1] 11650 n := v.Args[0] 11651 v_1 := v.Args[1] 11652 if v_1.Op != OpConst16 { 11653 break 11654 } 11655 c := v_1.AuxInt 11656 if !(c < 0 && c != -1<<15) { 11657 break 11658 } 11659 v.reset(OpMod16) 11660 v.Type = t 11661 v.AddArg(n) 11662 v0 := b.NewValue0(v.Pos, OpConst16, t) 11663 v0.AuxInt = -c 11664 v.AddArg(v0) 11665 return true 11666 } 11667 // match: (Mod16 <t> x (Const16 [c])) 11668 // cond: x.Op != OpConst16 && (c > 0 || c == -1<<15) 11669 // result: (Sub16 x (Mul16 <t> (Div16 <t> x (Const16 <t> [c])) (Const16 <t> [c]))) 11670 for { 11671 t := v.Type 11672 _ = v.Args[1] 11673 x := v.Args[0] 11674 v_1 := v.Args[1] 11675 if v_1.Op != OpConst16 { 11676 break 11677 } 11678 c := v_1.AuxInt 11679 if !(x.Op != OpConst16 && (c > 0 || c == -1<<15)) { 11680 break 11681 } 11682 v.reset(OpSub16) 11683 v.AddArg(x) 11684 v0 := b.NewValue0(v.Pos, OpMul16, t) 11685 v1 := b.NewValue0(v.Pos, OpDiv16, t) 11686 v1.AddArg(x) 11687 v2 := b.NewValue0(v.Pos, OpConst16, t) 11688 v2.AuxInt = c 11689 v1.AddArg(v2) 11690 v0.AddArg(v1) 11691 v3 := b.NewValue0(v.Pos, OpConst16, t) 11692 v3.AuxInt = c 11693 v0.AddArg(v3) 11694 v.AddArg(v0) 11695 return true 11696 } 11697 return false 11698 } 11699 func rewriteValuegeneric_OpMod16u_0(v *Value) bool { 11700 b := v.Block 11701 _ = b 11702 // match: (Mod16u (Const16 [c]) (Const16 [d])) 11703 // cond: d != 0 11704 // result: (Const16 [int64(uint16(c) % uint16(d))]) 11705 for { 11706 _ = v.Args[1] 11707 v_0 := v.Args[0] 11708 if v_0.Op != OpConst16 { 11709 break 11710 } 11711 c := v_0.AuxInt 11712 v_1 := v.Args[1] 11713 if v_1.Op != OpConst16 { 11714 break 11715 } 11716 d := v_1.AuxInt 11717 if !(d != 0) { 11718 break 11719 } 11720 v.reset(OpConst16) 11721 v.AuxInt = int64(uint16(c) % uint16(d)) 11722 return true 11723 } 11724 // match: (Mod16u <t> n (Const16 [c])) 11725 // cond: isPowerOfTwo(c&0xffff) 11726 // result: (And16 n (Const16 <t> [(c&0xffff)-1])) 11727 for { 11728 t := v.Type 11729 _ = v.Args[1] 11730 n := v.Args[0] 11731 v_1 := v.Args[1] 11732 if v_1.Op != OpConst16 { 11733 break 11734 } 11735 c := v_1.AuxInt 11736 if !(isPowerOfTwo(c & 0xffff)) { 11737 break 11738 } 11739 v.reset(OpAnd16) 11740 v.AddArg(n) 11741 v0 := b.NewValue0(v.Pos, OpConst16, t) 11742 v0.AuxInt = (c & 0xffff) - 1 11743 v.AddArg(v0) 11744 return true 11745 } 11746 // match: (Mod16u <t> x (Const16 [c])) 11747 // cond: x.Op != OpConst16 && c > 0 && umagicOK(16,c) 11748 // result: (Sub16 x (Mul16 <t> (Div16u <t> x (Const16 <t> [c])) (Const16 <t> [c]))) 11749 for { 11750 t := v.Type 11751 _ = v.Args[1] 11752 x := v.Args[0] 11753 v_1 := v.Args[1] 11754 if v_1.Op != OpConst16 { 11755 break 11756 } 11757 c := v_1.AuxInt 11758 if !(x.Op != OpConst16 && c > 0 && umagicOK(16, c)) { 11759 break 11760 } 11761 v.reset(OpSub16) 11762 v.AddArg(x) 11763 v0 := b.NewValue0(v.Pos, OpMul16, t) 11764 v1 := b.NewValue0(v.Pos, OpDiv16u, t) 11765 v1.AddArg(x) 11766 v2 := b.NewValue0(v.Pos, OpConst16, t) 11767 v2.AuxInt = c 11768 v1.AddArg(v2) 11769 v0.AddArg(v1) 11770 v3 := b.NewValue0(v.Pos, OpConst16, t) 11771 v3.AuxInt = c 11772 v0.AddArg(v3) 11773 v.AddArg(v0) 11774 return true 11775 } 11776 return false 11777 } 11778 func rewriteValuegeneric_OpMod32_0(v *Value) bool { 11779 b := v.Block 11780 _ = b 11781 // match: (Mod32 (Const32 [c]) (Const32 [d])) 11782 // cond: d != 0 11783 // result: (Const32 [int64(int32(c % d))]) 11784 for { 11785 _ = v.Args[1] 11786 v_0 := v.Args[0] 11787 if v_0.Op != OpConst32 { 11788 break 11789 } 11790 c := v_0.AuxInt 11791 v_1 := v.Args[1] 11792 if v_1.Op != OpConst32 { 11793 break 11794 } 11795 d := v_1.AuxInt 11796 if !(d != 0) { 11797 break 11798 } 11799 v.reset(OpConst32) 11800 v.AuxInt = int64(int32(c % d)) 11801 return true 11802 } 11803 // match: (Mod32 <t> n (Const32 [c])) 11804 // cond: c < 0 && c != -1<<31 11805 // result: (Mod32 <t> n (Const32 <t> [-c])) 11806 for { 11807 t := v.Type 11808 _ = v.Args[1] 11809 n := v.Args[0] 11810 v_1 := v.Args[1] 11811 if v_1.Op != OpConst32 { 11812 break 11813 } 11814 c := v_1.AuxInt 11815 if !(c < 0 && c != -1<<31) { 11816 break 11817 } 11818 v.reset(OpMod32) 11819 v.Type = t 11820 v.AddArg(n) 11821 v0 := b.NewValue0(v.Pos, OpConst32, t) 11822 v0.AuxInt = -c 11823 v.AddArg(v0) 11824 return true 11825 } 11826 // match: (Mod32 <t> x (Const32 [c])) 11827 // cond: x.Op != OpConst32 && (c > 0 || c == -1<<31) 11828 // result: (Sub32 x (Mul32 <t> (Div32 <t> x (Const32 <t> [c])) (Const32 <t> [c]))) 11829 for { 11830 t := v.Type 11831 _ = v.Args[1] 11832 x := v.Args[0] 11833 v_1 := v.Args[1] 11834 if v_1.Op != OpConst32 { 11835 break 11836 } 11837 c := v_1.AuxInt 11838 if !(x.Op != OpConst32 && (c > 0 || c == -1<<31)) { 11839 break 11840 } 11841 v.reset(OpSub32) 11842 v.AddArg(x) 11843 v0 := b.NewValue0(v.Pos, OpMul32, t) 11844 v1 := b.NewValue0(v.Pos, OpDiv32, t) 11845 v1.AddArg(x) 11846 v2 := b.NewValue0(v.Pos, OpConst32, t) 11847 v2.AuxInt = c 11848 v1.AddArg(v2) 11849 v0.AddArg(v1) 11850 v3 := b.NewValue0(v.Pos, OpConst32, t) 11851 v3.AuxInt = c 11852 v0.AddArg(v3) 11853 v.AddArg(v0) 11854 return true 11855 } 11856 return false 11857 } 11858 func rewriteValuegeneric_OpMod32u_0(v *Value) bool { 11859 b := v.Block 11860 _ = b 11861 // match: (Mod32u (Const32 [c]) (Const32 [d])) 11862 // cond: d != 0 11863 // result: (Const32 [int64(uint32(c) % uint32(d))]) 11864 for { 11865 _ = v.Args[1] 11866 v_0 := v.Args[0] 11867 if v_0.Op != OpConst32 { 11868 break 11869 } 11870 c := v_0.AuxInt 11871 v_1 := v.Args[1] 11872 if v_1.Op != OpConst32 { 11873 break 11874 } 11875 d := v_1.AuxInt 11876 if !(d != 0) { 11877 break 11878 } 11879 v.reset(OpConst32) 11880 v.AuxInt = int64(uint32(c) % uint32(d)) 11881 return true 11882 } 11883 // match: (Mod32u <t> n (Const32 [c])) 11884 // cond: isPowerOfTwo(c&0xffffffff) 11885 // result: (And32 n (Const32 <t> [(c&0xffffffff)-1])) 11886 for { 11887 t := v.Type 11888 _ = v.Args[1] 11889 n := v.Args[0] 11890 v_1 := v.Args[1] 11891 if v_1.Op != OpConst32 { 11892 break 11893 } 11894 c := v_1.AuxInt 11895 if !(isPowerOfTwo(c & 0xffffffff)) { 11896 break 11897 } 11898 v.reset(OpAnd32) 11899 v.AddArg(n) 11900 v0 := b.NewValue0(v.Pos, OpConst32, t) 11901 v0.AuxInt = (c & 0xffffffff) - 1 11902 v.AddArg(v0) 11903 return true 11904 } 11905 // match: (Mod32u <t> x (Const32 [c])) 11906 // cond: x.Op != OpConst32 && c > 0 && umagicOK(32,c) 11907 // result: (Sub32 x (Mul32 <t> (Div32u <t> x (Const32 <t> [c])) (Const32 <t> [c]))) 11908 for { 11909 t := v.Type 11910 _ = v.Args[1] 11911 x := v.Args[0] 11912 v_1 := v.Args[1] 11913 if v_1.Op != OpConst32 { 11914 break 11915 } 11916 c := v_1.AuxInt 11917 if !(x.Op != OpConst32 && c > 0 && umagicOK(32, c)) { 11918 break 11919 } 11920 v.reset(OpSub32) 11921 v.AddArg(x) 11922 v0 := b.NewValue0(v.Pos, OpMul32, t) 11923 v1 := b.NewValue0(v.Pos, OpDiv32u, t) 11924 v1.AddArg(x) 11925 v2 := b.NewValue0(v.Pos, OpConst32, t) 11926 v2.AuxInt = c 11927 v1.AddArg(v2) 11928 v0.AddArg(v1) 11929 v3 := b.NewValue0(v.Pos, OpConst32, t) 11930 v3.AuxInt = c 11931 v0.AddArg(v3) 11932 v.AddArg(v0) 11933 return true 11934 } 11935 return false 11936 } 11937 func rewriteValuegeneric_OpMod64_0(v *Value) bool { 11938 b := v.Block 11939 _ = b 11940 // match: (Mod64 (Const64 [c]) (Const64 [d])) 11941 // cond: d != 0 11942 // result: (Const64 [c % d]) 11943 for { 11944 _ = v.Args[1] 11945 v_0 := v.Args[0] 11946 if v_0.Op != OpConst64 { 11947 break 11948 } 11949 c := v_0.AuxInt 11950 v_1 := v.Args[1] 11951 if v_1.Op != OpConst64 { 11952 break 11953 } 11954 d := v_1.AuxInt 11955 if !(d != 0) { 11956 break 11957 } 11958 v.reset(OpConst64) 11959 v.AuxInt = c % d 11960 return true 11961 } 11962 // match: (Mod64 <t> n (Const64 [c])) 11963 // cond: c < 0 && c != -1<<63 11964 // result: (Mod64 <t> n (Const64 <t> [-c])) 11965 for { 11966 t := v.Type 11967 _ = v.Args[1] 11968 n := v.Args[0] 11969 v_1 := v.Args[1] 11970 if v_1.Op != OpConst64 { 11971 break 11972 } 11973 c := v_1.AuxInt 11974 if !(c < 0 && c != -1<<63) { 11975 break 11976 } 11977 v.reset(OpMod64) 11978 v.Type = t 11979 v.AddArg(n) 11980 v0 := b.NewValue0(v.Pos, OpConst64, t) 11981 v0.AuxInt = -c 11982 v.AddArg(v0) 11983 return true 11984 } 11985 // match: (Mod64 <t> x (Const64 [c])) 11986 // cond: x.Op != OpConst64 && (c > 0 || c == -1<<63) 11987 // result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 11988 for { 11989 t := v.Type 11990 _ = v.Args[1] 11991 x := v.Args[0] 11992 v_1 := v.Args[1] 11993 if v_1.Op != OpConst64 { 11994 break 11995 } 11996 c := v_1.AuxInt 11997 if !(x.Op != OpConst64 && (c > 0 || c == -1<<63)) { 11998 break 11999 } 12000 v.reset(OpSub64) 12001 v.AddArg(x) 12002 v0 := b.NewValue0(v.Pos, OpMul64, t) 12003 v1 := b.NewValue0(v.Pos, OpDiv64, t) 12004 v1.AddArg(x) 12005 v2 := b.NewValue0(v.Pos, OpConst64, t) 12006 v2.AuxInt = c 12007 v1.AddArg(v2) 12008 v0.AddArg(v1) 12009 v3 := b.NewValue0(v.Pos, OpConst64, t) 12010 v3.AuxInt = c 12011 v0.AddArg(v3) 12012 v.AddArg(v0) 12013 return true 12014 } 12015 return false 12016 } 12017 func rewriteValuegeneric_OpMod64u_0(v *Value) bool { 12018 b := v.Block 12019 _ = b 12020 // match: (Mod64u (Const64 [c]) (Const64 [d])) 12021 // cond: d != 0 12022 // result: (Const64 [int64(uint64(c) % uint64(d))]) 12023 for { 12024 _ = v.Args[1] 12025 v_0 := v.Args[0] 12026 if v_0.Op != OpConst64 { 12027 break 12028 } 12029 c := v_0.AuxInt 12030 v_1 := v.Args[1] 12031 if v_1.Op != OpConst64 { 12032 break 12033 } 12034 d := v_1.AuxInt 12035 if !(d != 0) { 12036 break 12037 } 12038 v.reset(OpConst64) 12039 v.AuxInt = int64(uint64(c) % uint64(d)) 12040 return true 12041 } 12042 // match: (Mod64u <t> n (Const64 [c])) 12043 // cond: isPowerOfTwo(c) 12044 // result: (And64 n (Const64 <t> [c-1])) 12045 for { 12046 t := v.Type 12047 _ = v.Args[1] 12048 n := v.Args[0] 12049 v_1 := v.Args[1] 12050 if v_1.Op != OpConst64 { 12051 break 12052 } 12053 c := v_1.AuxInt 12054 if !(isPowerOfTwo(c)) { 12055 break 12056 } 12057 v.reset(OpAnd64) 12058 v.AddArg(n) 12059 v0 := b.NewValue0(v.Pos, OpConst64, t) 12060 v0.AuxInt = c - 1 12061 v.AddArg(v0) 12062 return true 12063 } 12064 // match: (Mod64u <t> x (Const64 [c])) 12065 // cond: x.Op != OpConst64 && c > 0 && umagicOK(64,c) 12066 // result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 12067 for { 12068 t := v.Type 12069 _ = v.Args[1] 12070 x := v.Args[0] 12071 v_1 := v.Args[1] 12072 if v_1.Op != OpConst64 { 12073 break 12074 } 12075 c := v_1.AuxInt 12076 if !(x.Op != OpConst64 && c > 0 && umagicOK(64, c)) { 12077 break 12078 } 12079 v.reset(OpSub64) 12080 v.AddArg(x) 12081 v0 := b.NewValue0(v.Pos, OpMul64, t) 12082 v1 := b.NewValue0(v.Pos, OpDiv64u, t) 12083 v1.AddArg(x) 12084 v2 := b.NewValue0(v.Pos, OpConst64, t) 12085 v2.AuxInt = c 12086 v1.AddArg(v2) 12087 v0.AddArg(v1) 12088 v3 := b.NewValue0(v.Pos, OpConst64, t) 12089 v3.AuxInt = c 12090 v0.AddArg(v3) 12091 v.AddArg(v0) 12092 return true 12093 } 12094 return false 12095 } 12096 func rewriteValuegeneric_OpMod8_0(v *Value) bool { 12097 b := v.Block 12098 _ = b 12099 // match: (Mod8 (Const8 [c]) (Const8 [d])) 12100 // cond: d != 0 12101 // result: (Const8 [int64(int8(c % d))]) 12102 for { 12103 _ = v.Args[1] 12104 v_0 := v.Args[0] 12105 if v_0.Op != OpConst8 { 12106 break 12107 } 12108 c := v_0.AuxInt 12109 v_1 := v.Args[1] 12110 if v_1.Op != OpConst8 { 12111 break 12112 } 12113 d := v_1.AuxInt 12114 if !(d != 0) { 12115 break 12116 } 12117 v.reset(OpConst8) 12118 v.AuxInt = int64(int8(c % d)) 12119 return true 12120 } 12121 // match: (Mod8 <t> n (Const8 [c])) 12122 // cond: c < 0 && c != -1<<7 12123 // result: (Mod8 <t> n (Const8 <t> [-c])) 12124 for { 12125 t := v.Type 12126 _ = v.Args[1] 12127 n := v.Args[0] 12128 v_1 := v.Args[1] 12129 if v_1.Op != OpConst8 { 12130 break 12131 } 12132 c := v_1.AuxInt 12133 if !(c < 0 && c != -1<<7) { 12134 break 12135 } 12136 v.reset(OpMod8) 12137 v.Type = t 12138 v.AddArg(n) 12139 v0 := b.NewValue0(v.Pos, OpConst8, t) 12140 v0.AuxInt = -c 12141 v.AddArg(v0) 12142 return true 12143 } 12144 // match: (Mod8 <t> x (Const8 [c])) 12145 // cond: x.Op != OpConst8 && (c > 0 || c == -1<<7) 12146 // result: (Sub8 x (Mul8 <t> (Div8 <t> x (Const8 <t> [c])) (Const8 <t> [c]))) 12147 for { 12148 t := v.Type 12149 _ = v.Args[1] 12150 x := v.Args[0] 12151 v_1 := v.Args[1] 12152 if v_1.Op != OpConst8 { 12153 break 12154 } 12155 c := v_1.AuxInt 12156 if !(x.Op != OpConst8 && (c > 0 || c == -1<<7)) { 12157 break 12158 } 12159 v.reset(OpSub8) 12160 v.AddArg(x) 12161 v0 := b.NewValue0(v.Pos, OpMul8, t) 12162 v1 := b.NewValue0(v.Pos, OpDiv8, t) 12163 v1.AddArg(x) 12164 v2 := b.NewValue0(v.Pos, OpConst8, t) 12165 v2.AuxInt = c 12166 v1.AddArg(v2) 12167 v0.AddArg(v1) 12168 v3 := b.NewValue0(v.Pos, OpConst8, t) 12169 v3.AuxInt = c 12170 v0.AddArg(v3) 12171 v.AddArg(v0) 12172 return true 12173 } 12174 return false 12175 } 12176 func rewriteValuegeneric_OpMod8u_0(v *Value) bool { 12177 b := v.Block 12178 _ = b 12179 // match: (Mod8u (Const8 [c]) (Const8 [d])) 12180 // cond: d != 0 12181 // result: (Const8 [int64(uint8(c) % uint8(d))]) 12182 for { 12183 _ = v.Args[1] 12184 v_0 := v.Args[0] 12185 if v_0.Op != OpConst8 { 12186 break 12187 } 12188 c := v_0.AuxInt 12189 v_1 := v.Args[1] 12190 if v_1.Op != OpConst8 { 12191 break 12192 } 12193 d := v_1.AuxInt 12194 if !(d != 0) { 12195 break 12196 } 12197 v.reset(OpConst8) 12198 v.AuxInt = int64(uint8(c) % uint8(d)) 12199 return true 12200 } 12201 // match: (Mod8u <t> n (Const8 [c])) 12202 // cond: isPowerOfTwo(c&0xff) 12203 // result: (And8 n (Const8 <t> [(c&0xff)-1])) 12204 for { 12205 t := v.Type 12206 _ = v.Args[1] 12207 n := v.Args[0] 12208 v_1 := v.Args[1] 12209 if v_1.Op != OpConst8 { 12210 break 12211 } 12212 c := v_1.AuxInt 12213 if !(isPowerOfTwo(c & 0xff)) { 12214 break 12215 } 12216 v.reset(OpAnd8) 12217 v.AddArg(n) 12218 v0 := b.NewValue0(v.Pos, OpConst8, t) 12219 v0.AuxInt = (c & 0xff) - 1 12220 v.AddArg(v0) 12221 return true 12222 } 12223 // match: (Mod8u <t> x (Const8 [c])) 12224 // cond: x.Op != OpConst8 && c > 0 && umagicOK(8 ,c) 12225 // result: (Sub8 x (Mul8 <t> (Div8u <t> x (Const8 <t> [c])) (Const8 <t> [c]))) 12226 for { 12227 t := v.Type 12228 _ = v.Args[1] 12229 x := v.Args[0] 12230 v_1 := v.Args[1] 12231 if v_1.Op != OpConst8 { 12232 break 12233 } 12234 c := v_1.AuxInt 12235 if !(x.Op != OpConst8 && c > 0 && umagicOK(8, c)) { 12236 break 12237 } 12238 v.reset(OpSub8) 12239 v.AddArg(x) 12240 v0 := b.NewValue0(v.Pos, OpMul8, t) 12241 v1 := b.NewValue0(v.Pos, OpDiv8u, t) 12242 v1.AddArg(x) 12243 v2 := b.NewValue0(v.Pos, OpConst8, t) 12244 v2.AuxInt = c 12245 v1.AddArg(v2) 12246 v0.AddArg(v1) 12247 v3 := b.NewValue0(v.Pos, OpConst8, t) 12248 v3.AuxInt = c 12249 v0.AddArg(v3) 12250 v.AddArg(v0) 12251 return true 12252 } 12253 return false 12254 } 12255 func rewriteValuegeneric_OpMul16_0(v *Value) bool { 12256 b := v.Block 12257 _ = b 12258 typ := &b.Func.Config.Types 12259 _ = typ 12260 // match: (Mul16 (Const16 [c]) (Const16 [d])) 12261 // cond: 12262 // result: (Const16 [int64(int16(c*d))]) 12263 for { 12264 _ = v.Args[1] 12265 v_0 := v.Args[0] 12266 if v_0.Op != OpConst16 { 12267 break 12268 } 12269 c := v_0.AuxInt 12270 v_1 := v.Args[1] 12271 if v_1.Op != OpConst16 { 12272 break 12273 } 12274 d := v_1.AuxInt 12275 v.reset(OpConst16) 12276 v.AuxInt = int64(int16(c * d)) 12277 return true 12278 } 12279 // match: (Mul16 (Const16 [d]) (Const16 [c])) 12280 // cond: 12281 // result: (Const16 [int64(int16(c*d))]) 12282 for { 12283 _ = v.Args[1] 12284 v_0 := v.Args[0] 12285 if v_0.Op != OpConst16 { 12286 break 12287 } 12288 d := v_0.AuxInt 12289 v_1 := v.Args[1] 12290 if v_1.Op != OpConst16 { 12291 break 12292 } 12293 c := v_1.AuxInt 12294 v.reset(OpConst16) 12295 v.AuxInt = int64(int16(c * d)) 12296 return true 12297 } 12298 // match: (Mul16 (Const16 [1]) x) 12299 // cond: 12300 // result: x 12301 for { 12302 _ = v.Args[1] 12303 v_0 := v.Args[0] 12304 if v_0.Op != OpConst16 { 12305 break 12306 } 12307 if v_0.AuxInt != 1 { 12308 break 12309 } 12310 x := v.Args[1] 12311 v.reset(OpCopy) 12312 v.Type = x.Type 12313 v.AddArg(x) 12314 return true 12315 } 12316 // match: (Mul16 x (Const16 [1])) 12317 // cond: 12318 // result: x 12319 for { 12320 _ = v.Args[1] 12321 x := v.Args[0] 12322 v_1 := v.Args[1] 12323 if v_1.Op != OpConst16 { 12324 break 12325 } 12326 if v_1.AuxInt != 1 { 12327 break 12328 } 12329 v.reset(OpCopy) 12330 v.Type = x.Type 12331 v.AddArg(x) 12332 return true 12333 } 12334 // match: (Mul16 (Const16 [-1]) x) 12335 // cond: 12336 // result: (Neg16 x) 12337 for { 12338 _ = v.Args[1] 12339 v_0 := v.Args[0] 12340 if v_0.Op != OpConst16 { 12341 break 12342 } 12343 if v_0.AuxInt != -1 { 12344 break 12345 } 12346 x := v.Args[1] 12347 v.reset(OpNeg16) 12348 v.AddArg(x) 12349 return true 12350 } 12351 // match: (Mul16 x (Const16 [-1])) 12352 // cond: 12353 // result: (Neg16 x) 12354 for { 12355 _ = v.Args[1] 12356 x := v.Args[0] 12357 v_1 := v.Args[1] 12358 if v_1.Op != OpConst16 { 12359 break 12360 } 12361 if v_1.AuxInt != -1 { 12362 break 12363 } 12364 v.reset(OpNeg16) 12365 v.AddArg(x) 12366 return true 12367 } 12368 // match: (Mul16 <t> n (Const16 [c])) 12369 // cond: isPowerOfTwo(c) 12370 // result: (Lsh16x64 <t> n (Const64 <typ.UInt64> [log2(c)])) 12371 for { 12372 t := v.Type 12373 _ = v.Args[1] 12374 n := v.Args[0] 12375 v_1 := v.Args[1] 12376 if v_1.Op != OpConst16 { 12377 break 12378 } 12379 c := v_1.AuxInt 12380 if !(isPowerOfTwo(c)) { 12381 break 12382 } 12383 v.reset(OpLsh16x64) 12384 v.Type = t 12385 v.AddArg(n) 12386 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 12387 v0.AuxInt = log2(c) 12388 v.AddArg(v0) 12389 return true 12390 } 12391 // match: (Mul16 <t> (Const16 [c]) n) 12392 // cond: isPowerOfTwo(c) 12393 // result: (Lsh16x64 <t> n (Const64 <typ.UInt64> [log2(c)])) 12394 for { 12395 t := v.Type 12396 _ = v.Args[1] 12397 v_0 := v.Args[0] 12398 if v_0.Op != OpConst16 { 12399 break 12400 } 12401 c := v_0.AuxInt 12402 n := v.Args[1] 12403 if !(isPowerOfTwo(c)) { 12404 break 12405 } 12406 v.reset(OpLsh16x64) 12407 v.Type = t 12408 v.AddArg(n) 12409 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 12410 v0.AuxInt = log2(c) 12411 v.AddArg(v0) 12412 return true 12413 } 12414 // match: (Mul16 <t> n (Const16 [c])) 12415 // cond: t.IsSigned() && isPowerOfTwo(-c) 12416 // result: (Neg16 (Lsh16x64 <t> n (Const64 <typ.UInt64> [log2(-c)]))) 12417 for { 12418 t := v.Type 12419 _ = v.Args[1] 12420 n := v.Args[0] 12421 v_1 := v.Args[1] 12422 if v_1.Op != OpConst16 { 12423 break 12424 } 12425 c := v_1.AuxInt 12426 if !(t.IsSigned() && isPowerOfTwo(-c)) { 12427 break 12428 } 12429 v.reset(OpNeg16) 12430 v0 := b.NewValue0(v.Pos, OpLsh16x64, t) 12431 v0.AddArg(n) 12432 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 12433 v1.AuxInt = log2(-c) 12434 v0.AddArg(v1) 12435 v.AddArg(v0) 12436 return true 12437 } 12438 // match: (Mul16 <t> (Const16 [c]) n) 12439 // cond: t.IsSigned() && isPowerOfTwo(-c) 12440 // result: (Neg16 (Lsh16x64 <t> n (Const64 <typ.UInt64> [log2(-c)]))) 12441 for { 12442 t := v.Type 12443 _ = v.Args[1] 12444 v_0 := v.Args[0] 12445 if v_0.Op != OpConst16 { 12446 break 12447 } 12448 c := v_0.AuxInt 12449 n := v.Args[1] 12450 if !(t.IsSigned() && isPowerOfTwo(-c)) { 12451 break 12452 } 12453 v.reset(OpNeg16) 12454 v0 := b.NewValue0(v.Pos, OpLsh16x64, t) 12455 v0.AddArg(n) 12456 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 12457 v1.AuxInt = log2(-c) 12458 v0.AddArg(v1) 12459 v.AddArg(v0) 12460 return true 12461 } 12462 return false 12463 } 12464 func rewriteValuegeneric_OpMul16_10(v *Value) bool { 12465 b := v.Block 12466 _ = b 12467 // match: (Mul16 (Const16 [0]) _) 12468 // cond: 12469 // result: (Const16 [0]) 12470 for { 12471 _ = v.Args[1] 12472 v_0 := v.Args[0] 12473 if v_0.Op != OpConst16 { 12474 break 12475 } 12476 if v_0.AuxInt != 0 { 12477 break 12478 } 12479 v.reset(OpConst16) 12480 v.AuxInt = 0 12481 return true 12482 } 12483 // match: (Mul16 _ (Const16 [0])) 12484 // cond: 12485 // result: (Const16 [0]) 12486 for { 12487 _ = v.Args[1] 12488 v_1 := v.Args[1] 12489 if v_1.Op != OpConst16 { 12490 break 12491 } 12492 if v_1.AuxInt != 0 { 12493 break 12494 } 12495 v.reset(OpConst16) 12496 v.AuxInt = 0 12497 return true 12498 } 12499 // match: (Mul16 (Const16 <t> [c]) (Mul16 (Const16 <t> [d]) x)) 12500 // cond: 12501 // result: (Mul16 (Const16 <t> [int64(int16(c*d))]) x) 12502 for { 12503 _ = v.Args[1] 12504 v_0 := v.Args[0] 12505 if v_0.Op != OpConst16 { 12506 break 12507 } 12508 t := v_0.Type 12509 c := v_0.AuxInt 12510 v_1 := v.Args[1] 12511 if v_1.Op != OpMul16 { 12512 break 12513 } 12514 _ = v_1.Args[1] 12515 v_1_0 := v_1.Args[0] 12516 if v_1_0.Op != OpConst16 { 12517 break 12518 } 12519 if v_1_0.Type != t { 12520 break 12521 } 12522 d := v_1_0.AuxInt 12523 x := v_1.Args[1] 12524 v.reset(OpMul16) 12525 v0 := b.NewValue0(v.Pos, OpConst16, t) 12526 v0.AuxInt = int64(int16(c * d)) 12527 v.AddArg(v0) 12528 v.AddArg(x) 12529 return true 12530 } 12531 // match: (Mul16 (Const16 <t> [c]) (Mul16 x (Const16 <t> [d]))) 12532 // cond: 12533 // result: (Mul16 (Const16 <t> [int64(int16(c*d))]) x) 12534 for { 12535 _ = v.Args[1] 12536 v_0 := v.Args[0] 12537 if v_0.Op != OpConst16 { 12538 break 12539 } 12540 t := v_0.Type 12541 c := v_0.AuxInt 12542 v_1 := v.Args[1] 12543 if v_1.Op != OpMul16 { 12544 break 12545 } 12546 _ = v_1.Args[1] 12547 x := v_1.Args[0] 12548 v_1_1 := v_1.Args[1] 12549 if v_1_1.Op != OpConst16 { 12550 break 12551 } 12552 if v_1_1.Type != t { 12553 break 12554 } 12555 d := v_1_1.AuxInt 12556 v.reset(OpMul16) 12557 v0 := b.NewValue0(v.Pos, OpConst16, t) 12558 v0.AuxInt = int64(int16(c * d)) 12559 v.AddArg(v0) 12560 v.AddArg(x) 12561 return true 12562 } 12563 // match: (Mul16 (Mul16 (Const16 <t> [d]) x) (Const16 <t> [c])) 12564 // cond: 12565 // result: (Mul16 (Const16 <t> [int64(int16(c*d))]) x) 12566 for { 12567 _ = v.Args[1] 12568 v_0 := v.Args[0] 12569 if v_0.Op != OpMul16 { 12570 break 12571 } 12572 _ = v_0.Args[1] 12573 v_0_0 := v_0.Args[0] 12574 if v_0_0.Op != OpConst16 { 12575 break 12576 } 12577 t := v_0_0.Type 12578 d := v_0_0.AuxInt 12579 x := v_0.Args[1] 12580 v_1 := v.Args[1] 12581 if v_1.Op != OpConst16 { 12582 break 12583 } 12584 if v_1.Type != t { 12585 break 12586 } 12587 c := v_1.AuxInt 12588 v.reset(OpMul16) 12589 v0 := b.NewValue0(v.Pos, OpConst16, t) 12590 v0.AuxInt = int64(int16(c * d)) 12591 v.AddArg(v0) 12592 v.AddArg(x) 12593 return true 12594 } 12595 // match: (Mul16 (Mul16 x (Const16 <t> [d])) (Const16 <t> [c])) 12596 // cond: 12597 // result: (Mul16 (Const16 <t> [int64(int16(c*d))]) x) 12598 for { 12599 _ = v.Args[1] 12600 v_0 := v.Args[0] 12601 if v_0.Op != OpMul16 { 12602 break 12603 } 12604 _ = v_0.Args[1] 12605 x := v_0.Args[0] 12606 v_0_1 := v_0.Args[1] 12607 if v_0_1.Op != OpConst16 { 12608 break 12609 } 12610 t := v_0_1.Type 12611 d := v_0_1.AuxInt 12612 v_1 := v.Args[1] 12613 if v_1.Op != OpConst16 { 12614 break 12615 } 12616 if v_1.Type != t { 12617 break 12618 } 12619 c := v_1.AuxInt 12620 v.reset(OpMul16) 12621 v0 := b.NewValue0(v.Pos, OpConst16, t) 12622 v0.AuxInt = int64(int16(c * d)) 12623 v.AddArg(v0) 12624 v.AddArg(x) 12625 return true 12626 } 12627 return false 12628 } 12629 func rewriteValuegeneric_OpMul32_0(v *Value) bool { 12630 b := v.Block 12631 _ = b 12632 typ := &b.Func.Config.Types 12633 _ = typ 12634 // match: (Mul32 (Const32 [c]) (Const32 [d])) 12635 // cond: 12636 // result: (Const32 [int64(int32(c*d))]) 12637 for { 12638 _ = v.Args[1] 12639 v_0 := v.Args[0] 12640 if v_0.Op != OpConst32 { 12641 break 12642 } 12643 c := v_0.AuxInt 12644 v_1 := v.Args[1] 12645 if v_1.Op != OpConst32 { 12646 break 12647 } 12648 d := v_1.AuxInt 12649 v.reset(OpConst32) 12650 v.AuxInt = int64(int32(c * d)) 12651 return true 12652 } 12653 // match: (Mul32 (Const32 [d]) (Const32 [c])) 12654 // cond: 12655 // result: (Const32 [int64(int32(c*d))]) 12656 for { 12657 _ = v.Args[1] 12658 v_0 := v.Args[0] 12659 if v_0.Op != OpConst32 { 12660 break 12661 } 12662 d := v_0.AuxInt 12663 v_1 := v.Args[1] 12664 if v_1.Op != OpConst32 { 12665 break 12666 } 12667 c := v_1.AuxInt 12668 v.reset(OpConst32) 12669 v.AuxInt = int64(int32(c * d)) 12670 return true 12671 } 12672 // match: (Mul32 (Const32 [1]) x) 12673 // cond: 12674 // result: x 12675 for { 12676 _ = v.Args[1] 12677 v_0 := v.Args[0] 12678 if v_0.Op != OpConst32 { 12679 break 12680 } 12681 if v_0.AuxInt != 1 { 12682 break 12683 } 12684 x := v.Args[1] 12685 v.reset(OpCopy) 12686 v.Type = x.Type 12687 v.AddArg(x) 12688 return true 12689 } 12690 // match: (Mul32 x (Const32 [1])) 12691 // cond: 12692 // result: x 12693 for { 12694 _ = v.Args[1] 12695 x := v.Args[0] 12696 v_1 := v.Args[1] 12697 if v_1.Op != OpConst32 { 12698 break 12699 } 12700 if v_1.AuxInt != 1 { 12701 break 12702 } 12703 v.reset(OpCopy) 12704 v.Type = x.Type 12705 v.AddArg(x) 12706 return true 12707 } 12708 // match: (Mul32 (Const32 [-1]) x) 12709 // cond: 12710 // result: (Neg32 x) 12711 for { 12712 _ = v.Args[1] 12713 v_0 := v.Args[0] 12714 if v_0.Op != OpConst32 { 12715 break 12716 } 12717 if v_0.AuxInt != -1 { 12718 break 12719 } 12720 x := v.Args[1] 12721 v.reset(OpNeg32) 12722 v.AddArg(x) 12723 return true 12724 } 12725 // match: (Mul32 x (Const32 [-1])) 12726 // cond: 12727 // result: (Neg32 x) 12728 for { 12729 _ = v.Args[1] 12730 x := v.Args[0] 12731 v_1 := v.Args[1] 12732 if v_1.Op != OpConst32 { 12733 break 12734 } 12735 if v_1.AuxInt != -1 { 12736 break 12737 } 12738 v.reset(OpNeg32) 12739 v.AddArg(x) 12740 return true 12741 } 12742 // match: (Mul32 <t> n (Const32 [c])) 12743 // cond: isPowerOfTwo(c) 12744 // result: (Lsh32x64 <t> n (Const64 <typ.UInt64> [log2(c)])) 12745 for { 12746 t := v.Type 12747 _ = v.Args[1] 12748 n := v.Args[0] 12749 v_1 := v.Args[1] 12750 if v_1.Op != OpConst32 { 12751 break 12752 } 12753 c := v_1.AuxInt 12754 if !(isPowerOfTwo(c)) { 12755 break 12756 } 12757 v.reset(OpLsh32x64) 12758 v.Type = t 12759 v.AddArg(n) 12760 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 12761 v0.AuxInt = log2(c) 12762 v.AddArg(v0) 12763 return true 12764 } 12765 // match: (Mul32 <t> (Const32 [c]) n) 12766 // cond: isPowerOfTwo(c) 12767 // result: (Lsh32x64 <t> n (Const64 <typ.UInt64> [log2(c)])) 12768 for { 12769 t := v.Type 12770 _ = v.Args[1] 12771 v_0 := v.Args[0] 12772 if v_0.Op != OpConst32 { 12773 break 12774 } 12775 c := v_0.AuxInt 12776 n := v.Args[1] 12777 if !(isPowerOfTwo(c)) { 12778 break 12779 } 12780 v.reset(OpLsh32x64) 12781 v.Type = t 12782 v.AddArg(n) 12783 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 12784 v0.AuxInt = log2(c) 12785 v.AddArg(v0) 12786 return true 12787 } 12788 // match: (Mul32 <t> n (Const32 [c])) 12789 // cond: t.IsSigned() && isPowerOfTwo(-c) 12790 // result: (Neg32 (Lsh32x64 <t> n (Const64 <typ.UInt64> [log2(-c)]))) 12791 for { 12792 t := v.Type 12793 _ = v.Args[1] 12794 n := v.Args[0] 12795 v_1 := v.Args[1] 12796 if v_1.Op != OpConst32 { 12797 break 12798 } 12799 c := v_1.AuxInt 12800 if !(t.IsSigned() && isPowerOfTwo(-c)) { 12801 break 12802 } 12803 v.reset(OpNeg32) 12804 v0 := b.NewValue0(v.Pos, OpLsh32x64, t) 12805 v0.AddArg(n) 12806 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 12807 v1.AuxInt = log2(-c) 12808 v0.AddArg(v1) 12809 v.AddArg(v0) 12810 return true 12811 } 12812 // match: (Mul32 <t> (Const32 [c]) n) 12813 // cond: t.IsSigned() && isPowerOfTwo(-c) 12814 // result: (Neg32 (Lsh32x64 <t> n (Const64 <typ.UInt64> [log2(-c)]))) 12815 for { 12816 t := v.Type 12817 _ = v.Args[1] 12818 v_0 := v.Args[0] 12819 if v_0.Op != OpConst32 { 12820 break 12821 } 12822 c := v_0.AuxInt 12823 n := v.Args[1] 12824 if !(t.IsSigned() && isPowerOfTwo(-c)) { 12825 break 12826 } 12827 v.reset(OpNeg32) 12828 v0 := b.NewValue0(v.Pos, OpLsh32x64, t) 12829 v0.AddArg(n) 12830 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 12831 v1.AuxInt = log2(-c) 12832 v0.AddArg(v1) 12833 v.AddArg(v0) 12834 return true 12835 } 12836 return false 12837 } 12838 func rewriteValuegeneric_OpMul32_10(v *Value) bool { 12839 b := v.Block 12840 _ = b 12841 // match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x)) 12842 // cond: 12843 // result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x)) 12844 for { 12845 _ = v.Args[1] 12846 v_0 := v.Args[0] 12847 if v_0.Op != OpConst32 { 12848 break 12849 } 12850 t := v_0.Type 12851 c := v_0.AuxInt 12852 v_1 := v.Args[1] 12853 if v_1.Op != OpAdd32 { 12854 break 12855 } 12856 if v_1.Type != t { 12857 break 12858 } 12859 _ = v_1.Args[1] 12860 v_1_0 := v_1.Args[0] 12861 if v_1_0.Op != OpConst32 { 12862 break 12863 } 12864 if v_1_0.Type != t { 12865 break 12866 } 12867 d := v_1_0.AuxInt 12868 x := v_1.Args[1] 12869 v.reset(OpAdd32) 12870 v0 := b.NewValue0(v.Pos, OpConst32, t) 12871 v0.AuxInt = int64(int32(c * d)) 12872 v.AddArg(v0) 12873 v1 := b.NewValue0(v.Pos, OpMul32, t) 12874 v2 := b.NewValue0(v.Pos, OpConst32, t) 12875 v2.AuxInt = c 12876 v1.AddArg(v2) 12877 v1.AddArg(x) 12878 v.AddArg(v1) 12879 return true 12880 } 12881 // match: (Mul32 (Const32 <t> [c]) (Add32 <t> x (Const32 <t> [d]))) 12882 // cond: 12883 // result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x)) 12884 for { 12885 _ = v.Args[1] 12886 v_0 := v.Args[0] 12887 if v_0.Op != OpConst32 { 12888 break 12889 } 12890 t := v_0.Type 12891 c := v_0.AuxInt 12892 v_1 := v.Args[1] 12893 if v_1.Op != OpAdd32 { 12894 break 12895 } 12896 if v_1.Type != t { 12897 break 12898 } 12899 _ = v_1.Args[1] 12900 x := v_1.Args[0] 12901 v_1_1 := v_1.Args[1] 12902 if v_1_1.Op != OpConst32 { 12903 break 12904 } 12905 if v_1_1.Type != t { 12906 break 12907 } 12908 d := v_1_1.AuxInt 12909 v.reset(OpAdd32) 12910 v0 := b.NewValue0(v.Pos, OpConst32, t) 12911 v0.AuxInt = int64(int32(c * d)) 12912 v.AddArg(v0) 12913 v1 := b.NewValue0(v.Pos, OpMul32, t) 12914 v2 := b.NewValue0(v.Pos, OpConst32, t) 12915 v2.AuxInt = c 12916 v1.AddArg(v2) 12917 v1.AddArg(x) 12918 v.AddArg(v1) 12919 return true 12920 } 12921 // match: (Mul32 (Add32 <t> (Const32 <t> [d]) x) (Const32 <t> [c])) 12922 // cond: 12923 // result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x)) 12924 for { 12925 _ = v.Args[1] 12926 v_0 := v.Args[0] 12927 if v_0.Op != OpAdd32 { 12928 break 12929 } 12930 t := v_0.Type 12931 _ = v_0.Args[1] 12932 v_0_0 := v_0.Args[0] 12933 if v_0_0.Op != OpConst32 { 12934 break 12935 } 12936 if v_0_0.Type != t { 12937 break 12938 } 12939 d := v_0_0.AuxInt 12940 x := v_0.Args[1] 12941 v_1 := v.Args[1] 12942 if v_1.Op != OpConst32 { 12943 break 12944 } 12945 if v_1.Type != t { 12946 break 12947 } 12948 c := v_1.AuxInt 12949 v.reset(OpAdd32) 12950 v0 := b.NewValue0(v.Pos, OpConst32, t) 12951 v0.AuxInt = int64(int32(c * d)) 12952 v.AddArg(v0) 12953 v1 := b.NewValue0(v.Pos, OpMul32, t) 12954 v2 := b.NewValue0(v.Pos, OpConst32, t) 12955 v2.AuxInt = c 12956 v1.AddArg(v2) 12957 v1.AddArg(x) 12958 v.AddArg(v1) 12959 return true 12960 } 12961 // match: (Mul32 (Add32 <t> x (Const32 <t> [d])) (Const32 <t> [c])) 12962 // cond: 12963 // result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x)) 12964 for { 12965 _ = v.Args[1] 12966 v_0 := v.Args[0] 12967 if v_0.Op != OpAdd32 { 12968 break 12969 } 12970 t := v_0.Type 12971 _ = v_0.Args[1] 12972 x := v_0.Args[0] 12973 v_0_1 := v_0.Args[1] 12974 if v_0_1.Op != OpConst32 { 12975 break 12976 } 12977 if v_0_1.Type != t { 12978 break 12979 } 12980 d := v_0_1.AuxInt 12981 v_1 := v.Args[1] 12982 if v_1.Op != OpConst32 { 12983 break 12984 } 12985 if v_1.Type != t { 12986 break 12987 } 12988 c := v_1.AuxInt 12989 v.reset(OpAdd32) 12990 v0 := b.NewValue0(v.Pos, OpConst32, t) 12991 v0.AuxInt = int64(int32(c * d)) 12992 v.AddArg(v0) 12993 v1 := b.NewValue0(v.Pos, OpMul32, t) 12994 v2 := b.NewValue0(v.Pos, OpConst32, t) 12995 v2.AuxInt = c 12996 v1.AddArg(v2) 12997 v1.AddArg(x) 12998 v.AddArg(v1) 12999 return true 13000 } 13001 // match: (Mul32 (Const32 [0]) _) 13002 // cond: 13003 // result: (Const32 [0]) 13004 for { 13005 _ = v.Args[1] 13006 v_0 := v.Args[0] 13007 if v_0.Op != OpConst32 { 13008 break 13009 } 13010 if v_0.AuxInt != 0 { 13011 break 13012 } 13013 v.reset(OpConst32) 13014 v.AuxInt = 0 13015 return true 13016 } 13017 // match: (Mul32 _ (Const32 [0])) 13018 // cond: 13019 // result: (Const32 [0]) 13020 for { 13021 _ = v.Args[1] 13022 v_1 := v.Args[1] 13023 if v_1.Op != OpConst32 { 13024 break 13025 } 13026 if v_1.AuxInt != 0 { 13027 break 13028 } 13029 v.reset(OpConst32) 13030 v.AuxInt = 0 13031 return true 13032 } 13033 // match: (Mul32 (Const32 <t> [c]) (Mul32 (Const32 <t> [d]) x)) 13034 // cond: 13035 // result: (Mul32 (Const32 <t> [int64(int32(c*d))]) x) 13036 for { 13037 _ = v.Args[1] 13038 v_0 := v.Args[0] 13039 if v_0.Op != OpConst32 { 13040 break 13041 } 13042 t := v_0.Type 13043 c := v_0.AuxInt 13044 v_1 := v.Args[1] 13045 if v_1.Op != OpMul32 { 13046 break 13047 } 13048 _ = v_1.Args[1] 13049 v_1_0 := v_1.Args[0] 13050 if v_1_0.Op != OpConst32 { 13051 break 13052 } 13053 if v_1_0.Type != t { 13054 break 13055 } 13056 d := v_1_0.AuxInt 13057 x := v_1.Args[1] 13058 v.reset(OpMul32) 13059 v0 := b.NewValue0(v.Pos, OpConst32, t) 13060 v0.AuxInt = int64(int32(c * d)) 13061 v.AddArg(v0) 13062 v.AddArg(x) 13063 return true 13064 } 13065 // match: (Mul32 (Const32 <t> [c]) (Mul32 x (Const32 <t> [d]))) 13066 // cond: 13067 // result: (Mul32 (Const32 <t> [int64(int32(c*d))]) x) 13068 for { 13069 _ = v.Args[1] 13070 v_0 := v.Args[0] 13071 if v_0.Op != OpConst32 { 13072 break 13073 } 13074 t := v_0.Type 13075 c := v_0.AuxInt 13076 v_1 := v.Args[1] 13077 if v_1.Op != OpMul32 { 13078 break 13079 } 13080 _ = v_1.Args[1] 13081 x := v_1.Args[0] 13082 v_1_1 := v_1.Args[1] 13083 if v_1_1.Op != OpConst32 { 13084 break 13085 } 13086 if v_1_1.Type != t { 13087 break 13088 } 13089 d := v_1_1.AuxInt 13090 v.reset(OpMul32) 13091 v0 := b.NewValue0(v.Pos, OpConst32, t) 13092 v0.AuxInt = int64(int32(c * d)) 13093 v.AddArg(v0) 13094 v.AddArg(x) 13095 return true 13096 } 13097 // match: (Mul32 (Mul32 (Const32 <t> [d]) x) (Const32 <t> [c])) 13098 // cond: 13099 // result: (Mul32 (Const32 <t> [int64(int32(c*d))]) x) 13100 for { 13101 _ = v.Args[1] 13102 v_0 := v.Args[0] 13103 if v_0.Op != OpMul32 { 13104 break 13105 } 13106 _ = v_0.Args[1] 13107 v_0_0 := v_0.Args[0] 13108 if v_0_0.Op != OpConst32 { 13109 break 13110 } 13111 t := v_0_0.Type 13112 d := v_0_0.AuxInt 13113 x := v_0.Args[1] 13114 v_1 := v.Args[1] 13115 if v_1.Op != OpConst32 { 13116 break 13117 } 13118 if v_1.Type != t { 13119 break 13120 } 13121 c := v_1.AuxInt 13122 v.reset(OpMul32) 13123 v0 := b.NewValue0(v.Pos, OpConst32, t) 13124 v0.AuxInt = int64(int32(c * d)) 13125 v.AddArg(v0) 13126 v.AddArg(x) 13127 return true 13128 } 13129 // match: (Mul32 (Mul32 x (Const32 <t> [d])) (Const32 <t> [c])) 13130 // cond: 13131 // result: (Mul32 (Const32 <t> [int64(int32(c*d))]) x) 13132 for { 13133 _ = v.Args[1] 13134 v_0 := v.Args[0] 13135 if v_0.Op != OpMul32 { 13136 break 13137 } 13138 _ = v_0.Args[1] 13139 x := v_0.Args[0] 13140 v_0_1 := v_0.Args[1] 13141 if v_0_1.Op != OpConst32 { 13142 break 13143 } 13144 t := v_0_1.Type 13145 d := v_0_1.AuxInt 13146 v_1 := v.Args[1] 13147 if v_1.Op != OpConst32 { 13148 break 13149 } 13150 if v_1.Type != t { 13151 break 13152 } 13153 c := v_1.AuxInt 13154 v.reset(OpMul32) 13155 v0 := b.NewValue0(v.Pos, OpConst32, t) 13156 v0.AuxInt = int64(int32(c * d)) 13157 v.AddArg(v0) 13158 v.AddArg(x) 13159 return true 13160 } 13161 return false 13162 } 13163 func rewriteValuegeneric_OpMul32F_0(v *Value) bool { 13164 // match: (Mul32F (Const32F [c]) (Const32F [d])) 13165 // cond: 13166 // result: (Const32F [f2i(float64(i2f32(c) * i2f32(d)))]) 13167 for { 13168 _ = v.Args[1] 13169 v_0 := v.Args[0] 13170 if v_0.Op != OpConst32F { 13171 break 13172 } 13173 c := v_0.AuxInt 13174 v_1 := v.Args[1] 13175 if v_1.Op != OpConst32F { 13176 break 13177 } 13178 d := v_1.AuxInt 13179 v.reset(OpConst32F) 13180 v.AuxInt = f2i(float64(i2f32(c) * i2f32(d))) 13181 return true 13182 } 13183 // match: (Mul32F (Const32F [d]) (Const32F [c])) 13184 // cond: 13185 // result: (Const32F [f2i(float64(i2f32(c) * i2f32(d)))]) 13186 for { 13187 _ = v.Args[1] 13188 v_0 := v.Args[0] 13189 if v_0.Op != OpConst32F { 13190 break 13191 } 13192 d := v_0.AuxInt 13193 v_1 := v.Args[1] 13194 if v_1.Op != OpConst32F { 13195 break 13196 } 13197 c := v_1.AuxInt 13198 v.reset(OpConst32F) 13199 v.AuxInt = f2i(float64(i2f32(c) * i2f32(d))) 13200 return true 13201 } 13202 // match: (Mul32F x (Const32F [f2i(1)])) 13203 // cond: 13204 // result: x 13205 for { 13206 _ = v.Args[1] 13207 x := v.Args[0] 13208 v_1 := v.Args[1] 13209 if v_1.Op != OpConst32F { 13210 break 13211 } 13212 if v_1.AuxInt != f2i(1) { 13213 break 13214 } 13215 v.reset(OpCopy) 13216 v.Type = x.Type 13217 v.AddArg(x) 13218 return true 13219 } 13220 // match: (Mul32F (Const32F [f2i(1)]) x) 13221 // cond: 13222 // result: x 13223 for { 13224 _ = v.Args[1] 13225 v_0 := v.Args[0] 13226 if v_0.Op != OpConst32F { 13227 break 13228 } 13229 if v_0.AuxInt != f2i(1) { 13230 break 13231 } 13232 x := v.Args[1] 13233 v.reset(OpCopy) 13234 v.Type = x.Type 13235 v.AddArg(x) 13236 return true 13237 } 13238 // match: (Mul32F x (Const32F [f2i(-1)])) 13239 // cond: 13240 // result: (Neg32F x) 13241 for { 13242 _ = v.Args[1] 13243 x := v.Args[0] 13244 v_1 := v.Args[1] 13245 if v_1.Op != OpConst32F { 13246 break 13247 } 13248 if v_1.AuxInt != f2i(-1) { 13249 break 13250 } 13251 v.reset(OpNeg32F) 13252 v.AddArg(x) 13253 return true 13254 } 13255 // match: (Mul32F (Const32F [f2i(-1)]) x) 13256 // cond: 13257 // result: (Neg32F x) 13258 for { 13259 _ = v.Args[1] 13260 v_0 := v.Args[0] 13261 if v_0.Op != OpConst32F { 13262 break 13263 } 13264 if v_0.AuxInt != f2i(-1) { 13265 break 13266 } 13267 x := v.Args[1] 13268 v.reset(OpNeg32F) 13269 v.AddArg(x) 13270 return true 13271 } 13272 // match: (Mul32F x (Const32F [f2i(2)])) 13273 // cond: 13274 // result: (Add32F x x) 13275 for { 13276 _ = v.Args[1] 13277 x := v.Args[0] 13278 v_1 := v.Args[1] 13279 if v_1.Op != OpConst32F { 13280 break 13281 } 13282 if v_1.AuxInt != f2i(2) { 13283 break 13284 } 13285 v.reset(OpAdd32F) 13286 v.AddArg(x) 13287 v.AddArg(x) 13288 return true 13289 } 13290 // match: (Mul32F (Const32F [f2i(2)]) x) 13291 // cond: 13292 // result: (Add32F x x) 13293 for { 13294 _ = v.Args[1] 13295 v_0 := v.Args[0] 13296 if v_0.Op != OpConst32F { 13297 break 13298 } 13299 if v_0.AuxInt != f2i(2) { 13300 break 13301 } 13302 x := v.Args[1] 13303 v.reset(OpAdd32F) 13304 v.AddArg(x) 13305 v.AddArg(x) 13306 return true 13307 } 13308 return false 13309 } 13310 func rewriteValuegeneric_OpMul64_0(v *Value) bool { 13311 b := v.Block 13312 _ = b 13313 typ := &b.Func.Config.Types 13314 _ = typ 13315 // match: (Mul64 (Const64 [c]) (Const64 [d])) 13316 // cond: 13317 // result: (Const64 [c*d]) 13318 for { 13319 _ = v.Args[1] 13320 v_0 := v.Args[0] 13321 if v_0.Op != OpConst64 { 13322 break 13323 } 13324 c := v_0.AuxInt 13325 v_1 := v.Args[1] 13326 if v_1.Op != OpConst64 { 13327 break 13328 } 13329 d := v_1.AuxInt 13330 v.reset(OpConst64) 13331 v.AuxInt = c * d 13332 return true 13333 } 13334 // match: (Mul64 (Const64 [d]) (Const64 [c])) 13335 // cond: 13336 // result: (Const64 [c*d]) 13337 for { 13338 _ = v.Args[1] 13339 v_0 := v.Args[0] 13340 if v_0.Op != OpConst64 { 13341 break 13342 } 13343 d := v_0.AuxInt 13344 v_1 := v.Args[1] 13345 if v_1.Op != OpConst64 { 13346 break 13347 } 13348 c := v_1.AuxInt 13349 v.reset(OpConst64) 13350 v.AuxInt = c * d 13351 return true 13352 } 13353 // match: (Mul64 (Const64 [1]) x) 13354 // cond: 13355 // result: x 13356 for { 13357 _ = v.Args[1] 13358 v_0 := v.Args[0] 13359 if v_0.Op != OpConst64 { 13360 break 13361 } 13362 if v_0.AuxInt != 1 { 13363 break 13364 } 13365 x := v.Args[1] 13366 v.reset(OpCopy) 13367 v.Type = x.Type 13368 v.AddArg(x) 13369 return true 13370 } 13371 // match: (Mul64 x (Const64 [1])) 13372 // cond: 13373 // result: x 13374 for { 13375 _ = v.Args[1] 13376 x := v.Args[0] 13377 v_1 := v.Args[1] 13378 if v_1.Op != OpConst64 { 13379 break 13380 } 13381 if v_1.AuxInt != 1 { 13382 break 13383 } 13384 v.reset(OpCopy) 13385 v.Type = x.Type 13386 v.AddArg(x) 13387 return true 13388 } 13389 // match: (Mul64 (Const64 [-1]) x) 13390 // cond: 13391 // result: (Neg64 x) 13392 for { 13393 _ = v.Args[1] 13394 v_0 := v.Args[0] 13395 if v_0.Op != OpConst64 { 13396 break 13397 } 13398 if v_0.AuxInt != -1 { 13399 break 13400 } 13401 x := v.Args[1] 13402 v.reset(OpNeg64) 13403 v.AddArg(x) 13404 return true 13405 } 13406 // match: (Mul64 x (Const64 [-1])) 13407 // cond: 13408 // result: (Neg64 x) 13409 for { 13410 _ = v.Args[1] 13411 x := v.Args[0] 13412 v_1 := v.Args[1] 13413 if v_1.Op != OpConst64 { 13414 break 13415 } 13416 if v_1.AuxInt != -1 { 13417 break 13418 } 13419 v.reset(OpNeg64) 13420 v.AddArg(x) 13421 return true 13422 } 13423 // match: (Mul64 <t> n (Const64 [c])) 13424 // cond: isPowerOfTwo(c) 13425 // result: (Lsh64x64 <t> n (Const64 <typ.UInt64> [log2(c)])) 13426 for { 13427 t := v.Type 13428 _ = v.Args[1] 13429 n := v.Args[0] 13430 v_1 := v.Args[1] 13431 if v_1.Op != OpConst64 { 13432 break 13433 } 13434 c := v_1.AuxInt 13435 if !(isPowerOfTwo(c)) { 13436 break 13437 } 13438 v.reset(OpLsh64x64) 13439 v.Type = t 13440 v.AddArg(n) 13441 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 13442 v0.AuxInt = log2(c) 13443 v.AddArg(v0) 13444 return true 13445 } 13446 // match: (Mul64 <t> (Const64 [c]) n) 13447 // cond: isPowerOfTwo(c) 13448 // result: (Lsh64x64 <t> n (Const64 <typ.UInt64> [log2(c)])) 13449 for { 13450 t := v.Type 13451 _ = v.Args[1] 13452 v_0 := v.Args[0] 13453 if v_0.Op != OpConst64 { 13454 break 13455 } 13456 c := v_0.AuxInt 13457 n := v.Args[1] 13458 if !(isPowerOfTwo(c)) { 13459 break 13460 } 13461 v.reset(OpLsh64x64) 13462 v.Type = t 13463 v.AddArg(n) 13464 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 13465 v0.AuxInt = log2(c) 13466 v.AddArg(v0) 13467 return true 13468 } 13469 // match: (Mul64 <t> n (Const64 [c])) 13470 // cond: t.IsSigned() && isPowerOfTwo(-c) 13471 // result: (Neg64 (Lsh64x64 <t> n (Const64 <typ.UInt64> [log2(-c)]))) 13472 for { 13473 t := v.Type 13474 _ = v.Args[1] 13475 n := v.Args[0] 13476 v_1 := v.Args[1] 13477 if v_1.Op != OpConst64 { 13478 break 13479 } 13480 c := v_1.AuxInt 13481 if !(t.IsSigned() && isPowerOfTwo(-c)) { 13482 break 13483 } 13484 v.reset(OpNeg64) 13485 v0 := b.NewValue0(v.Pos, OpLsh64x64, t) 13486 v0.AddArg(n) 13487 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 13488 v1.AuxInt = log2(-c) 13489 v0.AddArg(v1) 13490 v.AddArg(v0) 13491 return true 13492 } 13493 // match: (Mul64 <t> (Const64 [c]) n) 13494 // cond: t.IsSigned() && isPowerOfTwo(-c) 13495 // result: (Neg64 (Lsh64x64 <t> n (Const64 <typ.UInt64> [log2(-c)]))) 13496 for { 13497 t := v.Type 13498 _ = v.Args[1] 13499 v_0 := v.Args[0] 13500 if v_0.Op != OpConst64 { 13501 break 13502 } 13503 c := v_0.AuxInt 13504 n := v.Args[1] 13505 if !(t.IsSigned() && isPowerOfTwo(-c)) { 13506 break 13507 } 13508 v.reset(OpNeg64) 13509 v0 := b.NewValue0(v.Pos, OpLsh64x64, t) 13510 v0.AddArg(n) 13511 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 13512 v1.AuxInt = log2(-c) 13513 v0.AddArg(v1) 13514 v.AddArg(v0) 13515 return true 13516 } 13517 return false 13518 } 13519 func rewriteValuegeneric_OpMul64_10(v *Value) bool { 13520 b := v.Block 13521 _ = b 13522 // match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) 13523 // cond: 13524 // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) 13525 for { 13526 _ = v.Args[1] 13527 v_0 := v.Args[0] 13528 if v_0.Op != OpConst64 { 13529 break 13530 } 13531 t := v_0.Type 13532 c := v_0.AuxInt 13533 v_1 := v.Args[1] 13534 if v_1.Op != OpAdd64 { 13535 break 13536 } 13537 if v_1.Type != t { 13538 break 13539 } 13540 _ = v_1.Args[1] 13541 v_1_0 := v_1.Args[0] 13542 if v_1_0.Op != OpConst64 { 13543 break 13544 } 13545 if v_1_0.Type != t { 13546 break 13547 } 13548 d := v_1_0.AuxInt 13549 x := v_1.Args[1] 13550 v.reset(OpAdd64) 13551 v0 := b.NewValue0(v.Pos, OpConst64, t) 13552 v0.AuxInt = c * d 13553 v.AddArg(v0) 13554 v1 := b.NewValue0(v.Pos, OpMul64, t) 13555 v2 := b.NewValue0(v.Pos, OpConst64, t) 13556 v2.AuxInt = c 13557 v1.AddArg(v2) 13558 v1.AddArg(x) 13559 v.AddArg(v1) 13560 return true 13561 } 13562 // match: (Mul64 (Const64 <t> [c]) (Add64 <t> x (Const64 <t> [d]))) 13563 // cond: 13564 // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) 13565 for { 13566 _ = v.Args[1] 13567 v_0 := v.Args[0] 13568 if v_0.Op != OpConst64 { 13569 break 13570 } 13571 t := v_0.Type 13572 c := v_0.AuxInt 13573 v_1 := v.Args[1] 13574 if v_1.Op != OpAdd64 { 13575 break 13576 } 13577 if v_1.Type != t { 13578 break 13579 } 13580 _ = v_1.Args[1] 13581 x := v_1.Args[0] 13582 v_1_1 := v_1.Args[1] 13583 if v_1_1.Op != OpConst64 { 13584 break 13585 } 13586 if v_1_1.Type != t { 13587 break 13588 } 13589 d := v_1_1.AuxInt 13590 v.reset(OpAdd64) 13591 v0 := b.NewValue0(v.Pos, OpConst64, t) 13592 v0.AuxInt = c * d 13593 v.AddArg(v0) 13594 v1 := b.NewValue0(v.Pos, OpMul64, t) 13595 v2 := b.NewValue0(v.Pos, OpConst64, t) 13596 v2.AuxInt = c 13597 v1.AddArg(v2) 13598 v1.AddArg(x) 13599 v.AddArg(v1) 13600 return true 13601 } 13602 // match: (Mul64 (Add64 <t> (Const64 <t> [d]) x) (Const64 <t> [c])) 13603 // cond: 13604 // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) 13605 for { 13606 _ = v.Args[1] 13607 v_0 := v.Args[0] 13608 if v_0.Op != OpAdd64 { 13609 break 13610 } 13611 t := v_0.Type 13612 _ = v_0.Args[1] 13613 v_0_0 := v_0.Args[0] 13614 if v_0_0.Op != OpConst64 { 13615 break 13616 } 13617 if v_0_0.Type != t { 13618 break 13619 } 13620 d := v_0_0.AuxInt 13621 x := v_0.Args[1] 13622 v_1 := v.Args[1] 13623 if v_1.Op != OpConst64 { 13624 break 13625 } 13626 if v_1.Type != t { 13627 break 13628 } 13629 c := v_1.AuxInt 13630 v.reset(OpAdd64) 13631 v0 := b.NewValue0(v.Pos, OpConst64, t) 13632 v0.AuxInt = c * d 13633 v.AddArg(v0) 13634 v1 := b.NewValue0(v.Pos, OpMul64, t) 13635 v2 := b.NewValue0(v.Pos, OpConst64, t) 13636 v2.AuxInt = c 13637 v1.AddArg(v2) 13638 v1.AddArg(x) 13639 v.AddArg(v1) 13640 return true 13641 } 13642 // match: (Mul64 (Add64 <t> x (Const64 <t> [d])) (Const64 <t> [c])) 13643 // cond: 13644 // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) 13645 for { 13646 _ = v.Args[1] 13647 v_0 := v.Args[0] 13648 if v_0.Op != OpAdd64 { 13649 break 13650 } 13651 t := v_0.Type 13652 _ = v_0.Args[1] 13653 x := v_0.Args[0] 13654 v_0_1 := v_0.Args[1] 13655 if v_0_1.Op != OpConst64 { 13656 break 13657 } 13658 if v_0_1.Type != t { 13659 break 13660 } 13661 d := v_0_1.AuxInt 13662 v_1 := v.Args[1] 13663 if v_1.Op != OpConst64 { 13664 break 13665 } 13666 if v_1.Type != t { 13667 break 13668 } 13669 c := v_1.AuxInt 13670 v.reset(OpAdd64) 13671 v0 := b.NewValue0(v.Pos, OpConst64, t) 13672 v0.AuxInt = c * d 13673 v.AddArg(v0) 13674 v1 := b.NewValue0(v.Pos, OpMul64, t) 13675 v2 := b.NewValue0(v.Pos, OpConst64, t) 13676 v2.AuxInt = c 13677 v1.AddArg(v2) 13678 v1.AddArg(x) 13679 v.AddArg(v1) 13680 return true 13681 } 13682 // match: (Mul64 (Const64 [0]) _) 13683 // cond: 13684 // result: (Const64 [0]) 13685 for { 13686 _ = v.Args[1] 13687 v_0 := v.Args[0] 13688 if v_0.Op != OpConst64 { 13689 break 13690 } 13691 if v_0.AuxInt != 0 { 13692 break 13693 } 13694 v.reset(OpConst64) 13695 v.AuxInt = 0 13696 return true 13697 } 13698 // match: (Mul64 _ (Const64 [0])) 13699 // cond: 13700 // result: (Const64 [0]) 13701 for { 13702 _ = v.Args[1] 13703 v_1 := v.Args[1] 13704 if v_1.Op != OpConst64 { 13705 break 13706 } 13707 if v_1.AuxInt != 0 { 13708 break 13709 } 13710 v.reset(OpConst64) 13711 v.AuxInt = 0 13712 return true 13713 } 13714 // match: (Mul64 (Const64 <t> [c]) (Mul64 (Const64 <t> [d]) x)) 13715 // cond: 13716 // result: (Mul64 (Const64 <t> [c*d]) x) 13717 for { 13718 _ = v.Args[1] 13719 v_0 := v.Args[0] 13720 if v_0.Op != OpConst64 { 13721 break 13722 } 13723 t := v_0.Type 13724 c := v_0.AuxInt 13725 v_1 := v.Args[1] 13726 if v_1.Op != OpMul64 { 13727 break 13728 } 13729 _ = v_1.Args[1] 13730 v_1_0 := v_1.Args[0] 13731 if v_1_0.Op != OpConst64 { 13732 break 13733 } 13734 if v_1_0.Type != t { 13735 break 13736 } 13737 d := v_1_0.AuxInt 13738 x := v_1.Args[1] 13739 v.reset(OpMul64) 13740 v0 := b.NewValue0(v.Pos, OpConst64, t) 13741 v0.AuxInt = c * d 13742 v.AddArg(v0) 13743 v.AddArg(x) 13744 return true 13745 } 13746 // match: (Mul64 (Const64 <t> [c]) (Mul64 x (Const64 <t> [d]))) 13747 // cond: 13748 // result: (Mul64 (Const64 <t> [c*d]) x) 13749 for { 13750 _ = v.Args[1] 13751 v_0 := v.Args[0] 13752 if v_0.Op != OpConst64 { 13753 break 13754 } 13755 t := v_0.Type 13756 c := v_0.AuxInt 13757 v_1 := v.Args[1] 13758 if v_1.Op != OpMul64 { 13759 break 13760 } 13761 _ = v_1.Args[1] 13762 x := v_1.Args[0] 13763 v_1_1 := v_1.Args[1] 13764 if v_1_1.Op != OpConst64 { 13765 break 13766 } 13767 if v_1_1.Type != t { 13768 break 13769 } 13770 d := v_1_1.AuxInt 13771 v.reset(OpMul64) 13772 v0 := b.NewValue0(v.Pos, OpConst64, t) 13773 v0.AuxInt = c * d 13774 v.AddArg(v0) 13775 v.AddArg(x) 13776 return true 13777 } 13778 // match: (Mul64 (Mul64 (Const64 <t> [d]) x) (Const64 <t> [c])) 13779 // cond: 13780 // result: (Mul64 (Const64 <t> [c*d]) x) 13781 for { 13782 _ = v.Args[1] 13783 v_0 := v.Args[0] 13784 if v_0.Op != OpMul64 { 13785 break 13786 } 13787 _ = v_0.Args[1] 13788 v_0_0 := v_0.Args[0] 13789 if v_0_0.Op != OpConst64 { 13790 break 13791 } 13792 t := v_0_0.Type 13793 d := v_0_0.AuxInt 13794 x := v_0.Args[1] 13795 v_1 := v.Args[1] 13796 if v_1.Op != OpConst64 { 13797 break 13798 } 13799 if v_1.Type != t { 13800 break 13801 } 13802 c := v_1.AuxInt 13803 v.reset(OpMul64) 13804 v0 := b.NewValue0(v.Pos, OpConst64, t) 13805 v0.AuxInt = c * d 13806 v.AddArg(v0) 13807 v.AddArg(x) 13808 return true 13809 } 13810 // match: (Mul64 (Mul64 x (Const64 <t> [d])) (Const64 <t> [c])) 13811 // cond: 13812 // result: (Mul64 (Const64 <t> [c*d]) x) 13813 for { 13814 _ = v.Args[1] 13815 v_0 := v.Args[0] 13816 if v_0.Op != OpMul64 { 13817 break 13818 } 13819 _ = v_0.Args[1] 13820 x := v_0.Args[0] 13821 v_0_1 := v_0.Args[1] 13822 if v_0_1.Op != OpConst64 { 13823 break 13824 } 13825 t := v_0_1.Type 13826 d := v_0_1.AuxInt 13827 v_1 := v.Args[1] 13828 if v_1.Op != OpConst64 { 13829 break 13830 } 13831 if v_1.Type != t { 13832 break 13833 } 13834 c := v_1.AuxInt 13835 v.reset(OpMul64) 13836 v0 := b.NewValue0(v.Pos, OpConst64, t) 13837 v0.AuxInt = c * d 13838 v.AddArg(v0) 13839 v.AddArg(x) 13840 return true 13841 } 13842 return false 13843 } 13844 func rewriteValuegeneric_OpMul64F_0(v *Value) bool { 13845 // match: (Mul64F (Const64F [c]) (Const64F [d])) 13846 // cond: 13847 // result: (Const64F [f2i(i2f(c) * i2f(d))]) 13848 for { 13849 _ = v.Args[1] 13850 v_0 := v.Args[0] 13851 if v_0.Op != OpConst64F { 13852 break 13853 } 13854 c := v_0.AuxInt 13855 v_1 := v.Args[1] 13856 if v_1.Op != OpConst64F { 13857 break 13858 } 13859 d := v_1.AuxInt 13860 v.reset(OpConst64F) 13861 v.AuxInt = f2i(i2f(c) * i2f(d)) 13862 return true 13863 } 13864 // match: (Mul64F (Const64F [d]) (Const64F [c])) 13865 // cond: 13866 // result: (Const64F [f2i(i2f(c) * i2f(d))]) 13867 for { 13868 _ = v.Args[1] 13869 v_0 := v.Args[0] 13870 if v_0.Op != OpConst64F { 13871 break 13872 } 13873 d := v_0.AuxInt 13874 v_1 := v.Args[1] 13875 if v_1.Op != OpConst64F { 13876 break 13877 } 13878 c := v_1.AuxInt 13879 v.reset(OpConst64F) 13880 v.AuxInt = f2i(i2f(c) * i2f(d)) 13881 return true 13882 } 13883 // match: (Mul64F x (Const64F [f2i(1)])) 13884 // cond: 13885 // result: x 13886 for { 13887 _ = v.Args[1] 13888 x := v.Args[0] 13889 v_1 := v.Args[1] 13890 if v_1.Op != OpConst64F { 13891 break 13892 } 13893 if v_1.AuxInt != f2i(1) { 13894 break 13895 } 13896 v.reset(OpCopy) 13897 v.Type = x.Type 13898 v.AddArg(x) 13899 return true 13900 } 13901 // match: (Mul64F (Const64F [f2i(1)]) x) 13902 // cond: 13903 // result: x 13904 for { 13905 _ = v.Args[1] 13906 v_0 := v.Args[0] 13907 if v_0.Op != OpConst64F { 13908 break 13909 } 13910 if v_0.AuxInt != f2i(1) { 13911 break 13912 } 13913 x := v.Args[1] 13914 v.reset(OpCopy) 13915 v.Type = x.Type 13916 v.AddArg(x) 13917 return true 13918 } 13919 // match: (Mul64F x (Const64F [f2i(-1)])) 13920 // cond: 13921 // result: (Neg64F x) 13922 for { 13923 _ = v.Args[1] 13924 x := v.Args[0] 13925 v_1 := v.Args[1] 13926 if v_1.Op != OpConst64F { 13927 break 13928 } 13929 if v_1.AuxInt != f2i(-1) { 13930 break 13931 } 13932 v.reset(OpNeg64F) 13933 v.AddArg(x) 13934 return true 13935 } 13936 // match: (Mul64F (Const64F [f2i(-1)]) x) 13937 // cond: 13938 // result: (Neg64F x) 13939 for { 13940 _ = v.Args[1] 13941 v_0 := v.Args[0] 13942 if v_0.Op != OpConst64F { 13943 break 13944 } 13945 if v_0.AuxInt != f2i(-1) { 13946 break 13947 } 13948 x := v.Args[1] 13949 v.reset(OpNeg64F) 13950 v.AddArg(x) 13951 return true 13952 } 13953 // match: (Mul64F x (Const64F [f2i(2)])) 13954 // cond: 13955 // result: (Add64F x x) 13956 for { 13957 _ = v.Args[1] 13958 x := v.Args[0] 13959 v_1 := v.Args[1] 13960 if v_1.Op != OpConst64F { 13961 break 13962 } 13963 if v_1.AuxInt != f2i(2) { 13964 break 13965 } 13966 v.reset(OpAdd64F) 13967 v.AddArg(x) 13968 v.AddArg(x) 13969 return true 13970 } 13971 // match: (Mul64F (Const64F [f2i(2)]) x) 13972 // cond: 13973 // result: (Add64F x x) 13974 for { 13975 _ = v.Args[1] 13976 v_0 := v.Args[0] 13977 if v_0.Op != OpConst64F { 13978 break 13979 } 13980 if v_0.AuxInt != f2i(2) { 13981 break 13982 } 13983 x := v.Args[1] 13984 v.reset(OpAdd64F) 13985 v.AddArg(x) 13986 v.AddArg(x) 13987 return true 13988 } 13989 return false 13990 } 13991 func rewriteValuegeneric_OpMul8_0(v *Value) bool { 13992 b := v.Block 13993 _ = b 13994 typ := &b.Func.Config.Types 13995 _ = typ 13996 // match: (Mul8 (Const8 [c]) (Const8 [d])) 13997 // cond: 13998 // result: (Const8 [int64(int8(c*d))]) 13999 for { 14000 _ = v.Args[1] 14001 v_0 := v.Args[0] 14002 if v_0.Op != OpConst8 { 14003 break 14004 } 14005 c := v_0.AuxInt 14006 v_1 := v.Args[1] 14007 if v_1.Op != OpConst8 { 14008 break 14009 } 14010 d := v_1.AuxInt 14011 v.reset(OpConst8) 14012 v.AuxInt = int64(int8(c * d)) 14013 return true 14014 } 14015 // match: (Mul8 (Const8 [d]) (Const8 [c])) 14016 // cond: 14017 // result: (Const8 [int64(int8(c*d))]) 14018 for { 14019 _ = v.Args[1] 14020 v_0 := v.Args[0] 14021 if v_0.Op != OpConst8 { 14022 break 14023 } 14024 d := v_0.AuxInt 14025 v_1 := v.Args[1] 14026 if v_1.Op != OpConst8 { 14027 break 14028 } 14029 c := v_1.AuxInt 14030 v.reset(OpConst8) 14031 v.AuxInt = int64(int8(c * d)) 14032 return true 14033 } 14034 // match: (Mul8 (Const8 [1]) x) 14035 // cond: 14036 // result: x 14037 for { 14038 _ = v.Args[1] 14039 v_0 := v.Args[0] 14040 if v_0.Op != OpConst8 { 14041 break 14042 } 14043 if v_0.AuxInt != 1 { 14044 break 14045 } 14046 x := v.Args[1] 14047 v.reset(OpCopy) 14048 v.Type = x.Type 14049 v.AddArg(x) 14050 return true 14051 } 14052 // match: (Mul8 x (Const8 [1])) 14053 // cond: 14054 // result: x 14055 for { 14056 _ = v.Args[1] 14057 x := v.Args[0] 14058 v_1 := v.Args[1] 14059 if v_1.Op != OpConst8 { 14060 break 14061 } 14062 if v_1.AuxInt != 1 { 14063 break 14064 } 14065 v.reset(OpCopy) 14066 v.Type = x.Type 14067 v.AddArg(x) 14068 return true 14069 } 14070 // match: (Mul8 (Const8 [-1]) x) 14071 // cond: 14072 // result: (Neg8 x) 14073 for { 14074 _ = v.Args[1] 14075 v_0 := v.Args[0] 14076 if v_0.Op != OpConst8 { 14077 break 14078 } 14079 if v_0.AuxInt != -1 { 14080 break 14081 } 14082 x := v.Args[1] 14083 v.reset(OpNeg8) 14084 v.AddArg(x) 14085 return true 14086 } 14087 // match: (Mul8 x (Const8 [-1])) 14088 // cond: 14089 // result: (Neg8 x) 14090 for { 14091 _ = v.Args[1] 14092 x := v.Args[0] 14093 v_1 := v.Args[1] 14094 if v_1.Op != OpConst8 { 14095 break 14096 } 14097 if v_1.AuxInt != -1 { 14098 break 14099 } 14100 v.reset(OpNeg8) 14101 v.AddArg(x) 14102 return true 14103 } 14104 // match: (Mul8 <t> n (Const8 [c])) 14105 // cond: isPowerOfTwo(c) 14106 // result: (Lsh8x64 <t> n (Const64 <typ.UInt64> [log2(c)])) 14107 for { 14108 t := v.Type 14109 _ = v.Args[1] 14110 n := v.Args[0] 14111 v_1 := v.Args[1] 14112 if v_1.Op != OpConst8 { 14113 break 14114 } 14115 c := v_1.AuxInt 14116 if !(isPowerOfTwo(c)) { 14117 break 14118 } 14119 v.reset(OpLsh8x64) 14120 v.Type = t 14121 v.AddArg(n) 14122 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 14123 v0.AuxInt = log2(c) 14124 v.AddArg(v0) 14125 return true 14126 } 14127 // match: (Mul8 <t> (Const8 [c]) n) 14128 // cond: isPowerOfTwo(c) 14129 // result: (Lsh8x64 <t> n (Const64 <typ.UInt64> [log2(c)])) 14130 for { 14131 t := v.Type 14132 _ = v.Args[1] 14133 v_0 := v.Args[0] 14134 if v_0.Op != OpConst8 { 14135 break 14136 } 14137 c := v_0.AuxInt 14138 n := v.Args[1] 14139 if !(isPowerOfTwo(c)) { 14140 break 14141 } 14142 v.reset(OpLsh8x64) 14143 v.Type = t 14144 v.AddArg(n) 14145 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 14146 v0.AuxInt = log2(c) 14147 v.AddArg(v0) 14148 return true 14149 } 14150 // match: (Mul8 <t> n (Const8 [c])) 14151 // cond: t.IsSigned() && isPowerOfTwo(-c) 14152 // result: (Neg8 (Lsh8x64 <t> n (Const64 <typ.UInt64> [log2(-c)]))) 14153 for { 14154 t := v.Type 14155 _ = v.Args[1] 14156 n := v.Args[0] 14157 v_1 := v.Args[1] 14158 if v_1.Op != OpConst8 { 14159 break 14160 } 14161 c := v_1.AuxInt 14162 if !(t.IsSigned() && isPowerOfTwo(-c)) { 14163 break 14164 } 14165 v.reset(OpNeg8) 14166 v0 := b.NewValue0(v.Pos, OpLsh8x64, t) 14167 v0.AddArg(n) 14168 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 14169 v1.AuxInt = log2(-c) 14170 v0.AddArg(v1) 14171 v.AddArg(v0) 14172 return true 14173 } 14174 // match: (Mul8 <t> (Const8 [c]) n) 14175 // cond: t.IsSigned() && isPowerOfTwo(-c) 14176 // result: (Neg8 (Lsh8x64 <t> n (Const64 <typ.UInt64> [log2(-c)]))) 14177 for { 14178 t := v.Type 14179 _ = v.Args[1] 14180 v_0 := v.Args[0] 14181 if v_0.Op != OpConst8 { 14182 break 14183 } 14184 c := v_0.AuxInt 14185 n := v.Args[1] 14186 if !(t.IsSigned() && isPowerOfTwo(-c)) { 14187 break 14188 } 14189 v.reset(OpNeg8) 14190 v0 := b.NewValue0(v.Pos, OpLsh8x64, t) 14191 v0.AddArg(n) 14192 v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 14193 v1.AuxInt = log2(-c) 14194 v0.AddArg(v1) 14195 v.AddArg(v0) 14196 return true 14197 } 14198 return false 14199 } 14200 func rewriteValuegeneric_OpMul8_10(v *Value) bool { 14201 b := v.Block 14202 _ = b 14203 // match: (Mul8 (Const8 [0]) _) 14204 // cond: 14205 // result: (Const8 [0]) 14206 for { 14207 _ = v.Args[1] 14208 v_0 := v.Args[0] 14209 if v_0.Op != OpConst8 { 14210 break 14211 } 14212 if v_0.AuxInt != 0 { 14213 break 14214 } 14215 v.reset(OpConst8) 14216 v.AuxInt = 0 14217 return true 14218 } 14219 // match: (Mul8 _ (Const8 [0])) 14220 // cond: 14221 // result: (Const8 [0]) 14222 for { 14223 _ = v.Args[1] 14224 v_1 := v.Args[1] 14225 if v_1.Op != OpConst8 { 14226 break 14227 } 14228 if v_1.AuxInt != 0 { 14229 break 14230 } 14231 v.reset(OpConst8) 14232 v.AuxInt = 0 14233 return true 14234 } 14235 // match: (Mul8 (Const8 <t> [c]) (Mul8 (Const8 <t> [d]) x)) 14236 // cond: 14237 // result: (Mul8 (Const8 <t> [int64(int8(c*d))]) x) 14238 for { 14239 _ = v.Args[1] 14240 v_0 := v.Args[0] 14241 if v_0.Op != OpConst8 { 14242 break 14243 } 14244 t := v_0.Type 14245 c := v_0.AuxInt 14246 v_1 := v.Args[1] 14247 if v_1.Op != OpMul8 { 14248 break 14249 } 14250 _ = v_1.Args[1] 14251 v_1_0 := v_1.Args[0] 14252 if v_1_0.Op != OpConst8 { 14253 break 14254 } 14255 if v_1_0.Type != t { 14256 break 14257 } 14258 d := v_1_0.AuxInt 14259 x := v_1.Args[1] 14260 v.reset(OpMul8) 14261 v0 := b.NewValue0(v.Pos, OpConst8, t) 14262 v0.AuxInt = int64(int8(c * d)) 14263 v.AddArg(v0) 14264 v.AddArg(x) 14265 return true 14266 } 14267 // match: (Mul8 (Const8 <t> [c]) (Mul8 x (Const8 <t> [d]))) 14268 // cond: 14269 // result: (Mul8 (Const8 <t> [int64(int8(c*d))]) x) 14270 for { 14271 _ = v.Args[1] 14272 v_0 := v.Args[0] 14273 if v_0.Op != OpConst8 { 14274 break 14275 } 14276 t := v_0.Type 14277 c := v_0.AuxInt 14278 v_1 := v.Args[1] 14279 if v_1.Op != OpMul8 { 14280 break 14281 } 14282 _ = v_1.Args[1] 14283 x := v_1.Args[0] 14284 v_1_1 := v_1.Args[1] 14285 if v_1_1.Op != OpConst8 { 14286 break 14287 } 14288 if v_1_1.Type != t { 14289 break 14290 } 14291 d := v_1_1.AuxInt 14292 v.reset(OpMul8) 14293 v0 := b.NewValue0(v.Pos, OpConst8, t) 14294 v0.AuxInt = int64(int8(c * d)) 14295 v.AddArg(v0) 14296 v.AddArg(x) 14297 return true 14298 } 14299 // match: (Mul8 (Mul8 (Const8 <t> [d]) x) (Const8 <t> [c])) 14300 // cond: 14301 // result: (Mul8 (Const8 <t> [int64(int8(c*d))]) x) 14302 for { 14303 _ = v.Args[1] 14304 v_0 := v.Args[0] 14305 if v_0.Op != OpMul8 { 14306 break 14307 } 14308 _ = v_0.Args[1] 14309 v_0_0 := v_0.Args[0] 14310 if v_0_0.Op != OpConst8 { 14311 break 14312 } 14313 t := v_0_0.Type 14314 d := v_0_0.AuxInt 14315 x := v_0.Args[1] 14316 v_1 := v.Args[1] 14317 if v_1.Op != OpConst8 { 14318 break 14319 } 14320 if v_1.Type != t { 14321 break 14322 } 14323 c := v_1.AuxInt 14324 v.reset(OpMul8) 14325 v0 := b.NewValue0(v.Pos, OpConst8, t) 14326 v0.AuxInt = int64(int8(c * d)) 14327 v.AddArg(v0) 14328 v.AddArg(x) 14329 return true 14330 } 14331 // match: (Mul8 (Mul8 x (Const8 <t> [d])) (Const8 <t> [c])) 14332 // cond: 14333 // result: (Mul8 (Const8 <t> [int64(int8(c*d))]) x) 14334 for { 14335 _ = v.Args[1] 14336 v_0 := v.Args[0] 14337 if v_0.Op != OpMul8 { 14338 break 14339 } 14340 _ = v_0.Args[1] 14341 x := v_0.Args[0] 14342 v_0_1 := v_0.Args[1] 14343 if v_0_1.Op != OpConst8 { 14344 break 14345 } 14346 t := v_0_1.Type 14347 d := v_0_1.AuxInt 14348 v_1 := v.Args[1] 14349 if v_1.Op != OpConst8 { 14350 break 14351 } 14352 if v_1.Type != t { 14353 break 14354 } 14355 c := v_1.AuxInt 14356 v.reset(OpMul8) 14357 v0 := b.NewValue0(v.Pos, OpConst8, t) 14358 v0.AuxInt = int64(int8(c * d)) 14359 v.AddArg(v0) 14360 v.AddArg(x) 14361 return true 14362 } 14363 return false 14364 } 14365 func rewriteValuegeneric_OpNeg16_0(v *Value) bool { 14366 // match: (Neg16 (Const16 [c])) 14367 // cond: 14368 // result: (Const16 [int64(-int16(c))]) 14369 for { 14370 v_0 := v.Args[0] 14371 if v_0.Op != OpConst16 { 14372 break 14373 } 14374 c := v_0.AuxInt 14375 v.reset(OpConst16) 14376 v.AuxInt = int64(-int16(c)) 14377 return true 14378 } 14379 // match: (Neg16 (Sub16 x y)) 14380 // cond: 14381 // result: (Sub16 y x) 14382 for { 14383 v_0 := v.Args[0] 14384 if v_0.Op != OpSub16 { 14385 break 14386 } 14387 _ = v_0.Args[1] 14388 x := v_0.Args[0] 14389 y := v_0.Args[1] 14390 v.reset(OpSub16) 14391 v.AddArg(y) 14392 v.AddArg(x) 14393 return true 14394 } 14395 return false 14396 } 14397 func rewriteValuegeneric_OpNeg32_0(v *Value) bool { 14398 // match: (Neg32 (Const32 [c])) 14399 // cond: 14400 // result: (Const32 [int64(-int32(c))]) 14401 for { 14402 v_0 := v.Args[0] 14403 if v_0.Op != OpConst32 { 14404 break 14405 } 14406 c := v_0.AuxInt 14407 v.reset(OpConst32) 14408 v.AuxInt = int64(-int32(c)) 14409 return true 14410 } 14411 // match: (Neg32 (Sub32 x y)) 14412 // cond: 14413 // result: (Sub32 y x) 14414 for { 14415 v_0 := v.Args[0] 14416 if v_0.Op != OpSub32 { 14417 break 14418 } 14419 _ = v_0.Args[1] 14420 x := v_0.Args[0] 14421 y := v_0.Args[1] 14422 v.reset(OpSub32) 14423 v.AddArg(y) 14424 v.AddArg(x) 14425 return true 14426 } 14427 return false 14428 } 14429 func rewriteValuegeneric_OpNeg32F_0(v *Value) bool { 14430 // match: (Neg32F (Const32F [c])) 14431 // cond: i2f(c) != 0 14432 // result: (Const32F [f2i(-i2f(c))]) 14433 for { 14434 v_0 := v.Args[0] 14435 if v_0.Op != OpConst32F { 14436 break 14437 } 14438 c := v_0.AuxInt 14439 if !(i2f(c) != 0) { 14440 break 14441 } 14442 v.reset(OpConst32F) 14443 v.AuxInt = f2i(-i2f(c)) 14444 return true 14445 } 14446 return false 14447 } 14448 func rewriteValuegeneric_OpNeg64_0(v *Value) bool { 14449 // match: (Neg64 (Const64 [c])) 14450 // cond: 14451 // result: (Const64 [-c]) 14452 for { 14453 v_0 := v.Args[0] 14454 if v_0.Op != OpConst64 { 14455 break 14456 } 14457 c := v_0.AuxInt 14458 v.reset(OpConst64) 14459 v.AuxInt = -c 14460 return true 14461 } 14462 // match: (Neg64 (Sub64 x y)) 14463 // cond: 14464 // result: (Sub64 y x) 14465 for { 14466 v_0 := v.Args[0] 14467 if v_0.Op != OpSub64 { 14468 break 14469 } 14470 _ = v_0.Args[1] 14471 x := v_0.Args[0] 14472 y := v_0.Args[1] 14473 v.reset(OpSub64) 14474 v.AddArg(y) 14475 v.AddArg(x) 14476 return true 14477 } 14478 return false 14479 } 14480 func rewriteValuegeneric_OpNeg64F_0(v *Value) bool { 14481 // match: (Neg64F (Const64F [c])) 14482 // cond: i2f(c) != 0 14483 // result: (Const64F [f2i(-i2f(c))]) 14484 for { 14485 v_0 := v.Args[0] 14486 if v_0.Op != OpConst64F { 14487 break 14488 } 14489 c := v_0.AuxInt 14490 if !(i2f(c) != 0) { 14491 break 14492 } 14493 v.reset(OpConst64F) 14494 v.AuxInt = f2i(-i2f(c)) 14495 return true 14496 } 14497 return false 14498 } 14499 func rewriteValuegeneric_OpNeg8_0(v *Value) bool { 14500 // match: (Neg8 (Const8 [c])) 14501 // cond: 14502 // result: (Const8 [int64( -int8(c))]) 14503 for { 14504 v_0 := v.Args[0] 14505 if v_0.Op != OpConst8 { 14506 break 14507 } 14508 c := v_0.AuxInt 14509 v.reset(OpConst8) 14510 v.AuxInt = int64(-int8(c)) 14511 return true 14512 } 14513 // match: (Neg8 (Sub8 x y)) 14514 // cond: 14515 // result: (Sub8 y x) 14516 for { 14517 v_0 := v.Args[0] 14518 if v_0.Op != OpSub8 { 14519 break 14520 } 14521 _ = v_0.Args[1] 14522 x := v_0.Args[0] 14523 y := v_0.Args[1] 14524 v.reset(OpSub8) 14525 v.AddArg(y) 14526 v.AddArg(x) 14527 return true 14528 } 14529 return false 14530 } 14531 func rewriteValuegeneric_OpNeq16_0(v *Value) bool { 14532 b := v.Block 14533 _ = b 14534 // match: (Neq16 x x) 14535 // cond: 14536 // result: (ConstBool [0]) 14537 for { 14538 _ = v.Args[1] 14539 x := v.Args[0] 14540 if x != v.Args[1] { 14541 break 14542 } 14543 v.reset(OpConstBool) 14544 v.AuxInt = 0 14545 return true 14546 } 14547 // match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 14548 // cond: 14549 // result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x) 14550 for { 14551 _ = v.Args[1] 14552 v_0 := v.Args[0] 14553 if v_0.Op != OpConst16 { 14554 break 14555 } 14556 t := v_0.Type 14557 c := v_0.AuxInt 14558 v_1 := v.Args[1] 14559 if v_1.Op != OpAdd16 { 14560 break 14561 } 14562 _ = v_1.Args[1] 14563 v_1_0 := v_1.Args[0] 14564 if v_1_0.Op != OpConst16 { 14565 break 14566 } 14567 if v_1_0.Type != t { 14568 break 14569 } 14570 d := v_1_0.AuxInt 14571 x := v_1.Args[1] 14572 v.reset(OpNeq16) 14573 v0 := b.NewValue0(v.Pos, OpConst16, t) 14574 v0.AuxInt = int64(int16(c - d)) 14575 v.AddArg(v0) 14576 v.AddArg(x) 14577 return true 14578 } 14579 // match: (Neq16 (Const16 <t> [c]) (Add16 x (Const16 <t> [d]))) 14580 // cond: 14581 // result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x) 14582 for { 14583 _ = v.Args[1] 14584 v_0 := v.Args[0] 14585 if v_0.Op != OpConst16 { 14586 break 14587 } 14588 t := v_0.Type 14589 c := v_0.AuxInt 14590 v_1 := v.Args[1] 14591 if v_1.Op != OpAdd16 { 14592 break 14593 } 14594 _ = v_1.Args[1] 14595 x := v_1.Args[0] 14596 v_1_1 := v_1.Args[1] 14597 if v_1_1.Op != OpConst16 { 14598 break 14599 } 14600 if v_1_1.Type != t { 14601 break 14602 } 14603 d := v_1_1.AuxInt 14604 v.reset(OpNeq16) 14605 v0 := b.NewValue0(v.Pos, OpConst16, t) 14606 v0.AuxInt = int64(int16(c - d)) 14607 v.AddArg(v0) 14608 v.AddArg(x) 14609 return true 14610 } 14611 // match: (Neq16 (Add16 (Const16 <t> [d]) x) (Const16 <t> [c])) 14612 // cond: 14613 // result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x) 14614 for { 14615 _ = v.Args[1] 14616 v_0 := v.Args[0] 14617 if v_0.Op != OpAdd16 { 14618 break 14619 } 14620 _ = v_0.Args[1] 14621 v_0_0 := v_0.Args[0] 14622 if v_0_0.Op != OpConst16 { 14623 break 14624 } 14625 t := v_0_0.Type 14626 d := v_0_0.AuxInt 14627 x := v_0.Args[1] 14628 v_1 := v.Args[1] 14629 if v_1.Op != OpConst16 { 14630 break 14631 } 14632 if v_1.Type != t { 14633 break 14634 } 14635 c := v_1.AuxInt 14636 v.reset(OpNeq16) 14637 v0 := b.NewValue0(v.Pos, OpConst16, t) 14638 v0.AuxInt = int64(int16(c - d)) 14639 v.AddArg(v0) 14640 v.AddArg(x) 14641 return true 14642 } 14643 // match: (Neq16 (Add16 x (Const16 <t> [d])) (Const16 <t> [c])) 14644 // cond: 14645 // result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x) 14646 for { 14647 _ = v.Args[1] 14648 v_0 := v.Args[0] 14649 if v_0.Op != OpAdd16 { 14650 break 14651 } 14652 _ = v_0.Args[1] 14653 x := v_0.Args[0] 14654 v_0_1 := v_0.Args[1] 14655 if v_0_1.Op != OpConst16 { 14656 break 14657 } 14658 t := v_0_1.Type 14659 d := v_0_1.AuxInt 14660 v_1 := v.Args[1] 14661 if v_1.Op != OpConst16 { 14662 break 14663 } 14664 if v_1.Type != t { 14665 break 14666 } 14667 c := v_1.AuxInt 14668 v.reset(OpNeq16) 14669 v0 := b.NewValue0(v.Pos, OpConst16, t) 14670 v0.AuxInt = int64(int16(c - d)) 14671 v.AddArg(v0) 14672 v.AddArg(x) 14673 return true 14674 } 14675 // match: (Neq16 (Const16 [c]) (Const16 [d])) 14676 // cond: 14677 // result: (ConstBool [b2i(c != d)]) 14678 for { 14679 _ = v.Args[1] 14680 v_0 := v.Args[0] 14681 if v_0.Op != OpConst16 { 14682 break 14683 } 14684 c := v_0.AuxInt 14685 v_1 := v.Args[1] 14686 if v_1.Op != OpConst16 { 14687 break 14688 } 14689 d := v_1.AuxInt 14690 v.reset(OpConstBool) 14691 v.AuxInt = b2i(c != d) 14692 return true 14693 } 14694 // match: (Neq16 (Const16 [d]) (Const16 [c])) 14695 // cond: 14696 // result: (ConstBool [b2i(c != d)]) 14697 for { 14698 _ = v.Args[1] 14699 v_0 := v.Args[0] 14700 if v_0.Op != OpConst16 { 14701 break 14702 } 14703 d := v_0.AuxInt 14704 v_1 := v.Args[1] 14705 if v_1.Op != OpConst16 { 14706 break 14707 } 14708 c := v_1.AuxInt 14709 v.reset(OpConstBool) 14710 v.AuxInt = b2i(c != d) 14711 return true 14712 } 14713 return false 14714 } 14715 func rewriteValuegeneric_OpNeq32_0(v *Value) bool { 14716 b := v.Block 14717 _ = b 14718 // match: (Neq32 x x) 14719 // cond: 14720 // result: (ConstBool [0]) 14721 for { 14722 _ = v.Args[1] 14723 x := v.Args[0] 14724 if x != v.Args[1] { 14725 break 14726 } 14727 v.reset(OpConstBool) 14728 v.AuxInt = 0 14729 return true 14730 } 14731 // match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 14732 // cond: 14733 // result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x) 14734 for { 14735 _ = v.Args[1] 14736 v_0 := v.Args[0] 14737 if v_0.Op != OpConst32 { 14738 break 14739 } 14740 t := v_0.Type 14741 c := v_0.AuxInt 14742 v_1 := v.Args[1] 14743 if v_1.Op != OpAdd32 { 14744 break 14745 } 14746 _ = v_1.Args[1] 14747 v_1_0 := v_1.Args[0] 14748 if v_1_0.Op != OpConst32 { 14749 break 14750 } 14751 if v_1_0.Type != t { 14752 break 14753 } 14754 d := v_1_0.AuxInt 14755 x := v_1.Args[1] 14756 v.reset(OpNeq32) 14757 v0 := b.NewValue0(v.Pos, OpConst32, t) 14758 v0.AuxInt = int64(int32(c - d)) 14759 v.AddArg(v0) 14760 v.AddArg(x) 14761 return true 14762 } 14763 // match: (Neq32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d]))) 14764 // cond: 14765 // result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x) 14766 for { 14767 _ = v.Args[1] 14768 v_0 := v.Args[0] 14769 if v_0.Op != OpConst32 { 14770 break 14771 } 14772 t := v_0.Type 14773 c := v_0.AuxInt 14774 v_1 := v.Args[1] 14775 if v_1.Op != OpAdd32 { 14776 break 14777 } 14778 _ = v_1.Args[1] 14779 x := v_1.Args[0] 14780 v_1_1 := v_1.Args[1] 14781 if v_1_1.Op != OpConst32 { 14782 break 14783 } 14784 if v_1_1.Type != t { 14785 break 14786 } 14787 d := v_1_1.AuxInt 14788 v.reset(OpNeq32) 14789 v0 := b.NewValue0(v.Pos, OpConst32, t) 14790 v0.AuxInt = int64(int32(c - d)) 14791 v.AddArg(v0) 14792 v.AddArg(x) 14793 return true 14794 } 14795 // match: (Neq32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c])) 14796 // cond: 14797 // result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x) 14798 for { 14799 _ = v.Args[1] 14800 v_0 := v.Args[0] 14801 if v_0.Op != OpAdd32 { 14802 break 14803 } 14804 _ = v_0.Args[1] 14805 v_0_0 := v_0.Args[0] 14806 if v_0_0.Op != OpConst32 { 14807 break 14808 } 14809 t := v_0_0.Type 14810 d := v_0_0.AuxInt 14811 x := v_0.Args[1] 14812 v_1 := v.Args[1] 14813 if v_1.Op != OpConst32 { 14814 break 14815 } 14816 if v_1.Type != t { 14817 break 14818 } 14819 c := v_1.AuxInt 14820 v.reset(OpNeq32) 14821 v0 := b.NewValue0(v.Pos, OpConst32, t) 14822 v0.AuxInt = int64(int32(c - d)) 14823 v.AddArg(v0) 14824 v.AddArg(x) 14825 return true 14826 } 14827 // match: (Neq32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c])) 14828 // cond: 14829 // result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x) 14830 for { 14831 _ = v.Args[1] 14832 v_0 := v.Args[0] 14833 if v_0.Op != OpAdd32 { 14834 break 14835 } 14836 _ = v_0.Args[1] 14837 x := v_0.Args[0] 14838 v_0_1 := v_0.Args[1] 14839 if v_0_1.Op != OpConst32 { 14840 break 14841 } 14842 t := v_0_1.Type 14843 d := v_0_1.AuxInt 14844 v_1 := v.Args[1] 14845 if v_1.Op != OpConst32 { 14846 break 14847 } 14848 if v_1.Type != t { 14849 break 14850 } 14851 c := v_1.AuxInt 14852 v.reset(OpNeq32) 14853 v0 := b.NewValue0(v.Pos, OpConst32, t) 14854 v0.AuxInt = int64(int32(c - d)) 14855 v.AddArg(v0) 14856 v.AddArg(x) 14857 return true 14858 } 14859 // match: (Neq32 (Const32 [c]) (Const32 [d])) 14860 // cond: 14861 // result: (ConstBool [b2i(c != d)]) 14862 for { 14863 _ = v.Args[1] 14864 v_0 := v.Args[0] 14865 if v_0.Op != OpConst32 { 14866 break 14867 } 14868 c := v_0.AuxInt 14869 v_1 := v.Args[1] 14870 if v_1.Op != OpConst32 { 14871 break 14872 } 14873 d := v_1.AuxInt 14874 v.reset(OpConstBool) 14875 v.AuxInt = b2i(c != d) 14876 return true 14877 } 14878 // match: (Neq32 (Const32 [d]) (Const32 [c])) 14879 // cond: 14880 // result: (ConstBool [b2i(c != d)]) 14881 for { 14882 _ = v.Args[1] 14883 v_0 := v.Args[0] 14884 if v_0.Op != OpConst32 { 14885 break 14886 } 14887 d := v_0.AuxInt 14888 v_1 := v.Args[1] 14889 if v_1.Op != OpConst32 { 14890 break 14891 } 14892 c := v_1.AuxInt 14893 v.reset(OpConstBool) 14894 v.AuxInt = b2i(c != d) 14895 return true 14896 } 14897 return false 14898 } 14899 func rewriteValuegeneric_OpNeq64_0(v *Value) bool { 14900 b := v.Block 14901 _ = b 14902 // match: (Neq64 x x) 14903 // cond: 14904 // result: (ConstBool [0]) 14905 for { 14906 _ = v.Args[1] 14907 x := v.Args[0] 14908 if x != v.Args[1] { 14909 break 14910 } 14911 v.reset(OpConstBool) 14912 v.AuxInt = 0 14913 return true 14914 } 14915 // match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 14916 // cond: 14917 // result: (Neq64 (Const64 <t> [c-d]) x) 14918 for { 14919 _ = v.Args[1] 14920 v_0 := v.Args[0] 14921 if v_0.Op != OpConst64 { 14922 break 14923 } 14924 t := v_0.Type 14925 c := v_0.AuxInt 14926 v_1 := v.Args[1] 14927 if v_1.Op != OpAdd64 { 14928 break 14929 } 14930 _ = v_1.Args[1] 14931 v_1_0 := v_1.Args[0] 14932 if v_1_0.Op != OpConst64 { 14933 break 14934 } 14935 if v_1_0.Type != t { 14936 break 14937 } 14938 d := v_1_0.AuxInt 14939 x := v_1.Args[1] 14940 v.reset(OpNeq64) 14941 v0 := b.NewValue0(v.Pos, OpConst64, t) 14942 v0.AuxInt = c - d 14943 v.AddArg(v0) 14944 v.AddArg(x) 14945 return true 14946 } 14947 // match: (Neq64 (Const64 <t> [c]) (Add64 x (Const64 <t> [d]))) 14948 // cond: 14949 // result: (Neq64 (Const64 <t> [c-d]) x) 14950 for { 14951 _ = v.Args[1] 14952 v_0 := v.Args[0] 14953 if v_0.Op != OpConst64 { 14954 break 14955 } 14956 t := v_0.Type 14957 c := v_0.AuxInt 14958 v_1 := v.Args[1] 14959 if v_1.Op != OpAdd64 { 14960 break 14961 } 14962 _ = v_1.Args[1] 14963 x := v_1.Args[0] 14964 v_1_1 := v_1.Args[1] 14965 if v_1_1.Op != OpConst64 { 14966 break 14967 } 14968 if v_1_1.Type != t { 14969 break 14970 } 14971 d := v_1_1.AuxInt 14972 v.reset(OpNeq64) 14973 v0 := b.NewValue0(v.Pos, OpConst64, t) 14974 v0.AuxInt = c - d 14975 v.AddArg(v0) 14976 v.AddArg(x) 14977 return true 14978 } 14979 // match: (Neq64 (Add64 (Const64 <t> [d]) x) (Const64 <t> [c])) 14980 // cond: 14981 // result: (Neq64 (Const64 <t> [c-d]) x) 14982 for { 14983 _ = v.Args[1] 14984 v_0 := v.Args[0] 14985 if v_0.Op != OpAdd64 { 14986 break 14987 } 14988 _ = v_0.Args[1] 14989 v_0_0 := v_0.Args[0] 14990 if v_0_0.Op != OpConst64 { 14991 break 14992 } 14993 t := v_0_0.Type 14994 d := v_0_0.AuxInt 14995 x := v_0.Args[1] 14996 v_1 := v.Args[1] 14997 if v_1.Op != OpConst64 { 14998 break 14999 } 15000 if v_1.Type != t { 15001 break 15002 } 15003 c := v_1.AuxInt 15004 v.reset(OpNeq64) 15005 v0 := b.NewValue0(v.Pos, OpConst64, t) 15006 v0.AuxInt = c - d 15007 v.AddArg(v0) 15008 v.AddArg(x) 15009 return true 15010 } 15011 // match: (Neq64 (Add64 x (Const64 <t> [d])) (Const64 <t> [c])) 15012 // cond: 15013 // result: (Neq64 (Const64 <t> [c-d]) x) 15014 for { 15015 _ = v.Args[1] 15016 v_0 := v.Args[0] 15017 if v_0.Op != OpAdd64 { 15018 break 15019 } 15020 _ = v_0.Args[1] 15021 x := v_0.Args[0] 15022 v_0_1 := v_0.Args[1] 15023 if v_0_1.Op != OpConst64 { 15024 break 15025 } 15026 t := v_0_1.Type 15027 d := v_0_1.AuxInt 15028 v_1 := v.Args[1] 15029 if v_1.Op != OpConst64 { 15030 break 15031 } 15032 if v_1.Type != t { 15033 break 15034 } 15035 c := v_1.AuxInt 15036 v.reset(OpNeq64) 15037 v0 := b.NewValue0(v.Pos, OpConst64, t) 15038 v0.AuxInt = c - d 15039 v.AddArg(v0) 15040 v.AddArg(x) 15041 return true 15042 } 15043 // match: (Neq64 (Const64 [c]) (Const64 [d])) 15044 // cond: 15045 // result: (ConstBool [b2i(c != d)]) 15046 for { 15047 _ = v.Args[1] 15048 v_0 := v.Args[0] 15049 if v_0.Op != OpConst64 { 15050 break 15051 } 15052 c := v_0.AuxInt 15053 v_1 := v.Args[1] 15054 if v_1.Op != OpConst64 { 15055 break 15056 } 15057 d := v_1.AuxInt 15058 v.reset(OpConstBool) 15059 v.AuxInt = b2i(c != d) 15060 return true 15061 } 15062 // match: (Neq64 (Const64 [d]) (Const64 [c])) 15063 // cond: 15064 // result: (ConstBool [b2i(c != d)]) 15065 for { 15066 _ = v.Args[1] 15067 v_0 := v.Args[0] 15068 if v_0.Op != OpConst64 { 15069 break 15070 } 15071 d := v_0.AuxInt 15072 v_1 := v.Args[1] 15073 if v_1.Op != OpConst64 { 15074 break 15075 } 15076 c := v_1.AuxInt 15077 v.reset(OpConstBool) 15078 v.AuxInt = b2i(c != d) 15079 return true 15080 } 15081 return false 15082 } 15083 func rewriteValuegeneric_OpNeq8_0(v *Value) bool { 15084 b := v.Block 15085 _ = b 15086 // match: (Neq8 x x) 15087 // cond: 15088 // result: (ConstBool [0]) 15089 for { 15090 _ = v.Args[1] 15091 x := v.Args[0] 15092 if x != v.Args[1] { 15093 break 15094 } 15095 v.reset(OpConstBool) 15096 v.AuxInt = 0 15097 return true 15098 } 15099 // match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 15100 // cond: 15101 // result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x) 15102 for { 15103 _ = v.Args[1] 15104 v_0 := v.Args[0] 15105 if v_0.Op != OpConst8 { 15106 break 15107 } 15108 t := v_0.Type 15109 c := v_0.AuxInt 15110 v_1 := v.Args[1] 15111 if v_1.Op != OpAdd8 { 15112 break 15113 } 15114 _ = v_1.Args[1] 15115 v_1_0 := v_1.Args[0] 15116 if v_1_0.Op != OpConst8 { 15117 break 15118 } 15119 if v_1_0.Type != t { 15120 break 15121 } 15122 d := v_1_0.AuxInt 15123 x := v_1.Args[1] 15124 v.reset(OpNeq8) 15125 v0 := b.NewValue0(v.Pos, OpConst8, t) 15126 v0.AuxInt = int64(int8(c - d)) 15127 v.AddArg(v0) 15128 v.AddArg(x) 15129 return true 15130 } 15131 // match: (Neq8 (Const8 <t> [c]) (Add8 x (Const8 <t> [d]))) 15132 // cond: 15133 // result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x) 15134 for { 15135 _ = v.Args[1] 15136 v_0 := v.Args[0] 15137 if v_0.Op != OpConst8 { 15138 break 15139 } 15140 t := v_0.Type 15141 c := v_0.AuxInt 15142 v_1 := v.Args[1] 15143 if v_1.Op != OpAdd8 { 15144 break 15145 } 15146 _ = v_1.Args[1] 15147 x := v_1.Args[0] 15148 v_1_1 := v_1.Args[1] 15149 if v_1_1.Op != OpConst8 { 15150 break 15151 } 15152 if v_1_1.Type != t { 15153 break 15154 } 15155 d := v_1_1.AuxInt 15156 v.reset(OpNeq8) 15157 v0 := b.NewValue0(v.Pos, OpConst8, t) 15158 v0.AuxInt = int64(int8(c - d)) 15159 v.AddArg(v0) 15160 v.AddArg(x) 15161 return true 15162 } 15163 // match: (Neq8 (Add8 (Const8 <t> [d]) x) (Const8 <t> [c])) 15164 // cond: 15165 // result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x) 15166 for { 15167 _ = v.Args[1] 15168 v_0 := v.Args[0] 15169 if v_0.Op != OpAdd8 { 15170 break 15171 } 15172 _ = v_0.Args[1] 15173 v_0_0 := v_0.Args[0] 15174 if v_0_0.Op != OpConst8 { 15175 break 15176 } 15177 t := v_0_0.Type 15178 d := v_0_0.AuxInt 15179 x := v_0.Args[1] 15180 v_1 := v.Args[1] 15181 if v_1.Op != OpConst8 { 15182 break 15183 } 15184 if v_1.Type != t { 15185 break 15186 } 15187 c := v_1.AuxInt 15188 v.reset(OpNeq8) 15189 v0 := b.NewValue0(v.Pos, OpConst8, t) 15190 v0.AuxInt = int64(int8(c - d)) 15191 v.AddArg(v0) 15192 v.AddArg(x) 15193 return true 15194 } 15195 // match: (Neq8 (Add8 x (Const8 <t> [d])) (Const8 <t> [c])) 15196 // cond: 15197 // result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x) 15198 for { 15199 _ = v.Args[1] 15200 v_0 := v.Args[0] 15201 if v_0.Op != OpAdd8 { 15202 break 15203 } 15204 _ = v_0.Args[1] 15205 x := v_0.Args[0] 15206 v_0_1 := v_0.Args[1] 15207 if v_0_1.Op != OpConst8 { 15208 break 15209 } 15210 t := v_0_1.Type 15211 d := v_0_1.AuxInt 15212 v_1 := v.Args[1] 15213 if v_1.Op != OpConst8 { 15214 break 15215 } 15216 if v_1.Type != t { 15217 break 15218 } 15219 c := v_1.AuxInt 15220 v.reset(OpNeq8) 15221 v0 := b.NewValue0(v.Pos, OpConst8, t) 15222 v0.AuxInt = int64(int8(c - d)) 15223 v.AddArg(v0) 15224 v.AddArg(x) 15225 return true 15226 } 15227 // match: (Neq8 (Const8 [c]) (Const8 [d])) 15228 // cond: 15229 // result: (ConstBool [b2i(c != d)]) 15230 for { 15231 _ = v.Args[1] 15232 v_0 := v.Args[0] 15233 if v_0.Op != OpConst8 { 15234 break 15235 } 15236 c := v_0.AuxInt 15237 v_1 := v.Args[1] 15238 if v_1.Op != OpConst8 { 15239 break 15240 } 15241 d := v_1.AuxInt 15242 v.reset(OpConstBool) 15243 v.AuxInt = b2i(c != d) 15244 return true 15245 } 15246 // match: (Neq8 (Const8 [d]) (Const8 [c])) 15247 // cond: 15248 // result: (ConstBool [b2i(c != d)]) 15249 for { 15250 _ = v.Args[1] 15251 v_0 := v.Args[0] 15252 if v_0.Op != OpConst8 { 15253 break 15254 } 15255 d := v_0.AuxInt 15256 v_1 := v.Args[1] 15257 if v_1.Op != OpConst8 { 15258 break 15259 } 15260 c := v_1.AuxInt 15261 v.reset(OpConstBool) 15262 v.AuxInt = b2i(c != d) 15263 return true 15264 } 15265 return false 15266 } 15267 func rewriteValuegeneric_OpNeqB_0(v *Value) bool { 15268 // match: (NeqB (ConstBool [c]) (ConstBool [d])) 15269 // cond: 15270 // result: (ConstBool [b2i(c != d)]) 15271 for { 15272 _ = v.Args[1] 15273 v_0 := v.Args[0] 15274 if v_0.Op != OpConstBool { 15275 break 15276 } 15277 c := v_0.AuxInt 15278 v_1 := v.Args[1] 15279 if v_1.Op != OpConstBool { 15280 break 15281 } 15282 d := v_1.AuxInt 15283 v.reset(OpConstBool) 15284 v.AuxInt = b2i(c != d) 15285 return true 15286 } 15287 // match: (NeqB (ConstBool [d]) (ConstBool [c])) 15288 // cond: 15289 // result: (ConstBool [b2i(c != d)]) 15290 for { 15291 _ = v.Args[1] 15292 v_0 := v.Args[0] 15293 if v_0.Op != OpConstBool { 15294 break 15295 } 15296 d := v_0.AuxInt 15297 v_1 := v.Args[1] 15298 if v_1.Op != OpConstBool { 15299 break 15300 } 15301 c := v_1.AuxInt 15302 v.reset(OpConstBool) 15303 v.AuxInt = b2i(c != d) 15304 return true 15305 } 15306 // match: (NeqB (ConstBool [0]) x) 15307 // cond: 15308 // result: x 15309 for { 15310 _ = v.Args[1] 15311 v_0 := v.Args[0] 15312 if v_0.Op != OpConstBool { 15313 break 15314 } 15315 if v_0.AuxInt != 0 { 15316 break 15317 } 15318 x := v.Args[1] 15319 v.reset(OpCopy) 15320 v.Type = x.Type 15321 v.AddArg(x) 15322 return true 15323 } 15324 // match: (NeqB x (ConstBool [0])) 15325 // cond: 15326 // result: x 15327 for { 15328 _ = v.Args[1] 15329 x := v.Args[0] 15330 v_1 := v.Args[1] 15331 if v_1.Op != OpConstBool { 15332 break 15333 } 15334 if v_1.AuxInt != 0 { 15335 break 15336 } 15337 v.reset(OpCopy) 15338 v.Type = x.Type 15339 v.AddArg(x) 15340 return true 15341 } 15342 // match: (NeqB (ConstBool [1]) x) 15343 // cond: 15344 // result: (Not x) 15345 for { 15346 _ = v.Args[1] 15347 v_0 := v.Args[0] 15348 if v_0.Op != OpConstBool { 15349 break 15350 } 15351 if v_0.AuxInt != 1 { 15352 break 15353 } 15354 x := v.Args[1] 15355 v.reset(OpNot) 15356 v.AddArg(x) 15357 return true 15358 } 15359 // match: (NeqB x (ConstBool [1])) 15360 // cond: 15361 // result: (Not x) 15362 for { 15363 _ = v.Args[1] 15364 x := v.Args[0] 15365 v_1 := v.Args[1] 15366 if v_1.Op != OpConstBool { 15367 break 15368 } 15369 if v_1.AuxInt != 1 { 15370 break 15371 } 15372 v.reset(OpNot) 15373 v.AddArg(x) 15374 return true 15375 } 15376 // match: (NeqB (Not x) (Not y)) 15377 // cond: 15378 // result: (NeqB x y) 15379 for { 15380 _ = v.Args[1] 15381 v_0 := v.Args[0] 15382 if v_0.Op != OpNot { 15383 break 15384 } 15385 x := v_0.Args[0] 15386 v_1 := v.Args[1] 15387 if v_1.Op != OpNot { 15388 break 15389 } 15390 y := v_1.Args[0] 15391 v.reset(OpNeqB) 15392 v.AddArg(x) 15393 v.AddArg(y) 15394 return true 15395 } 15396 // match: (NeqB (Not y) (Not x)) 15397 // cond: 15398 // result: (NeqB x y) 15399 for { 15400 _ = v.Args[1] 15401 v_0 := v.Args[0] 15402 if v_0.Op != OpNot { 15403 break 15404 } 15405 y := v_0.Args[0] 15406 v_1 := v.Args[1] 15407 if v_1.Op != OpNot { 15408 break 15409 } 15410 x := v_1.Args[0] 15411 v.reset(OpNeqB) 15412 v.AddArg(x) 15413 v.AddArg(y) 15414 return true 15415 } 15416 return false 15417 } 15418 func rewriteValuegeneric_OpNeqInter_0(v *Value) bool { 15419 b := v.Block 15420 _ = b 15421 typ := &b.Func.Config.Types 15422 _ = typ 15423 // match: (NeqInter x y) 15424 // cond: 15425 // result: (NeqPtr (ITab x) (ITab y)) 15426 for { 15427 _ = v.Args[1] 15428 x := v.Args[0] 15429 y := v.Args[1] 15430 v.reset(OpNeqPtr) 15431 v0 := b.NewValue0(v.Pos, OpITab, typ.BytePtr) 15432 v0.AddArg(x) 15433 v.AddArg(v0) 15434 v1 := b.NewValue0(v.Pos, OpITab, typ.BytePtr) 15435 v1.AddArg(y) 15436 v.AddArg(v1) 15437 return true 15438 } 15439 } 15440 func rewriteValuegeneric_OpNeqPtr_0(v *Value) bool { 15441 // match: (NeqPtr p (ConstNil)) 15442 // cond: 15443 // result: (IsNonNil p) 15444 for { 15445 _ = v.Args[1] 15446 p := v.Args[0] 15447 v_1 := v.Args[1] 15448 if v_1.Op != OpConstNil { 15449 break 15450 } 15451 v.reset(OpIsNonNil) 15452 v.AddArg(p) 15453 return true 15454 } 15455 // match: (NeqPtr (ConstNil) p) 15456 // cond: 15457 // result: (IsNonNil p) 15458 for { 15459 _ = v.Args[1] 15460 v_0 := v.Args[0] 15461 if v_0.Op != OpConstNil { 15462 break 15463 } 15464 p := v.Args[1] 15465 v.reset(OpIsNonNil) 15466 v.AddArg(p) 15467 return true 15468 } 15469 return false 15470 } 15471 func rewriteValuegeneric_OpNeqSlice_0(v *Value) bool { 15472 b := v.Block 15473 _ = b 15474 typ := &b.Func.Config.Types 15475 _ = typ 15476 // match: (NeqSlice x y) 15477 // cond: 15478 // result: (NeqPtr (SlicePtr x) (SlicePtr y)) 15479 for { 15480 _ = v.Args[1] 15481 x := v.Args[0] 15482 y := v.Args[1] 15483 v.reset(OpNeqPtr) 15484 v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr) 15485 v0.AddArg(x) 15486 v.AddArg(v0) 15487 v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr) 15488 v1.AddArg(y) 15489 v.AddArg(v1) 15490 return true 15491 } 15492 } 15493 func rewriteValuegeneric_OpNilCheck_0(v *Value) bool { 15494 b := v.Block 15495 _ = b 15496 config := b.Func.Config 15497 _ = config 15498 fe := b.Func.fe 15499 _ = fe 15500 // match: (NilCheck (GetG mem) mem) 15501 // cond: 15502 // result: mem 15503 for { 15504 _ = v.Args[1] 15505 v_0 := v.Args[0] 15506 if v_0.Op != OpGetG { 15507 break 15508 } 15509 mem := v_0.Args[0] 15510 if mem != v.Args[1] { 15511 break 15512 } 15513 v.reset(OpCopy) 15514 v.Type = mem.Type 15515 v.AddArg(mem) 15516 return true 15517 } 15518 // match: (NilCheck (Load (OffPtr [c] (SP)) mem) mem) 15519 // cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(fe.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check") 15520 // result: (Invalid) 15521 for { 15522 _ = v.Args[1] 15523 v_0 := v.Args[0] 15524 if v_0.Op != OpLoad { 15525 break 15526 } 15527 _ = v_0.Args[1] 15528 v_0_0 := v_0.Args[0] 15529 if v_0_0.Op != OpOffPtr { 15530 break 15531 } 15532 c := v_0_0.AuxInt 15533 v_0_0_0 := v_0_0.Args[0] 15534 if v_0_0_0.Op != OpSP { 15535 break 15536 } 15537 mem := v_0.Args[1] 15538 if mem != v.Args[1] { 15539 break 15540 } 15541 if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(fe.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check")) { 15542 break 15543 } 15544 v.reset(OpInvalid) 15545 return true 15546 } 15547 // match: (NilCheck (OffPtr (Load (OffPtr [c] (SP)) mem)) mem) 15548 // cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(fe.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check") 15549 // result: (Invalid) 15550 for { 15551 _ = v.Args[1] 15552 v_0 := v.Args[0] 15553 if v_0.Op != OpOffPtr { 15554 break 15555 } 15556 v_0_0 := v_0.Args[0] 15557 if v_0_0.Op != OpLoad { 15558 break 15559 } 15560 _ = v_0_0.Args[1] 15561 v_0_0_0 := v_0_0.Args[0] 15562 if v_0_0_0.Op != OpOffPtr { 15563 break 15564 } 15565 c := v_0_0_0.AuxInt 15566 v_0_0_0_0 := v_0_0_0.Args[0] 15567 if v_0_0_0_0.Op != OpSP { 15568 break 15569 } 15570 mem := v_0_0.Args[1] 15571 if mem != v.Args[1] { 15572 break 15573 } 15574 if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(fe.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check")) { 15575 break 15576 } 15577 v.reset(OpInvalid) 15578 return true 15579 } 15580 return false 15581 } 15582 func rewriteValuegeneric_OpNot_0(v *Value) bool { 15583 // match: (Not (Eq64 x y)) 15584 // cond: 15585 // result: (Neq64 x y) 15586 for { 15587 v_0 := v.Args[0] 15588 if v_0.Op != OpEq64 { 15589 break 15590 } 15591 _ = v_0.Args[1] 15592 x := v_0.Args[0] 15593 y := v_0.Args[1] 15594 v.reset(OpNeq64) 15595 v.AddArg(x) 15596 v.AddArg(y) 15597 return true 15598 } 15599 // match: (Not (Eq32 x y)) 15600 // cond: 15601 // result: (Neq32 x y) 15602 for { 15603 v_0 := v.Args[0] 15604 if v_0.Op != OpEq32 { 15605 break 15606 } 15607 _ = v_0.Args[1] 15608 x := v_0.Args[0] 15609 y := v_0.Args[1] 15610 v.reset(OpNeq32) 15611 v.AddArg(x) 15612 v.AddArg(y) 15613 return true 15614 } 15615 // match: (Not (Eq16 x y)) 15616 // cond: 15617 // result: (Neq16 x y) 15618 for { 15619 v_0 := v.Args[0] 15620 if v_0.Op != OpEq16 { 15621 break 15622 } 15623 _ = v_0.Args[1] 15624 x := v_0.Args[0] 15625 y := v_0.Args[1] 15626 v.reset(OpNeq16) 15627 v.AddArg(x) 15628 v.AddArg(y) 15629 return true 15630 } 15631 // match: (Not (Eq8 x y)) 15632 // cond: 15633 // result: (Neq8 x y) 15634 for { 15635 v_0 := v.Args[0] 15636 if v_0.Op != OpEq8 { 15637 break 15638 } 15639 _ = v_0.Args[1] 15640 x := v_0.Args[0] 15641 y := v_0.Args[1] 15642 v.reset(OpNeq8) 15643 v.AddArg(x) 15644 v.AddArg(y) 15645 return true 15646 } 15647 // match: (Not (EqB x y)) 15648 // cond: 15649 // result: (NeqB x y) 15650 for { 15651 v_0 := v.Args[0] 15652 if v_0.Op != OpEqB { 15653 break 15654 } 15655 _ = v_0.Args[1] 15656 x := v_0.Args[0] 15657 y := v_0.Args[1] 15658 v.reset(OpNeqB) 15659 v.AddArg(x) 15660 v.AddArg(y) 15661 return true 15662 } 15663 // match: (Not (Neq64 x y)) 15664 // cond: 15665 // result: (Eq64 x y) 15666 for { 15667 v_0 := v.Args[0] 15668 if v_0.Op != OpNeq64 { 15669 break 15670 } 15671 _ = v_0.Args[1] 15672 x := v_0.Args[0] 15673 y := v_0.Args[1] 15674 v.reset(OpEq64) 15675 v.AddArg(x) 15676 v.AddArg(y) 15677 return true 15678 } 15679 // match: (Not (Neq32 x y)) 15680 // cond: 15681 // result: (Eq32 x y) 15682 for { 15683 v_0 := v.Args[0] 15684 if v_0.Op != OpNeq32 { 15685 break 15686 } 15687 _ = v_0.Args[1] 15688 x := v_0.Args[0] 15689 y := v_0.Args[1] 15690 v.reset(OpEq32) 15691 v.AddArg(x) 15692 v.AddArg(y) 15693 return true 15694 } 15695 // match: (Not (Neq16 x y)) 15696 // cond: 15697 // result: (Eq16 x y) 15698 for { 15699 v_0 := v.Args[0] 15700 if v_0.Op != OpNeq16 { 15701 break 15702 } 15703 _ = v_0.Args[1] 15704 x := v_0.Args[0] 15705 y := v_0.Args[1] 15706 v.reset(OpEq16) 15707 v.AddArg(x) 15708 v.AddArg(y) 15709 return true 15710 } 15711 // match: (Not (Neq8 x y)) 15712 // cond: 15713 // result: (Eq8 x y) 15714 for { 15715 v_0 := v.Args[0] 15716 if v_0.Op != OpNeq8 { 15717 break 15718 } 15719 _ = v_0.Args[1] 15720 x := v_0.Args[0] 15721 y := v_0.Args[1] 15722 v.reset(OpEq8) 15723 v.AddArg(x) 15724 v.AddArg(y) 15725 return true 15726 } 15727 // match: (Not (NeqB x y)) 15728 // cond: 15729 // result: (EqB x y) 15730 for { 15731 v_0 := v.Args[0] 15732 if v_0.Op != OpNeqB { 15733 break 15734 } 15735 _ = v_0.Args[1] 15736 x := v_0.Args[0] 15737 y := v_0.Args[1] 15738 v.reset(OpEqB) 15739 v.AddArg(x) 15740 v.AddArg(y) 15741 return true 15742 } 15743 return false 15744 } 15745 func rewriteValuegeneric_OpNot_10(v *Value) bool { 15746 // match: (Not (Greater64 x y)) 15747 // cond: 15748 // result: (Leq64 x y) 15749 for { 15750 v_0 := v.Args[0] 15751 if v_0.Op != OpGreater64 { 15752 break 15753 } 15754 _ = v_0.Args[1] 15755 x := v_0.Args[0] 15756 y := v_0.Args[1] 15757 v.reset(OpLeq64) 15758 v.AddArg(x) 15759 v.AddArg(y) 15760 return true 15761 } 15762 // match: (Not (Greater32 x y)) 15763 // cond: 15764 // result: (Leq32 x y) 15765 for { 15766 v_0 := v.Args[0] 15767 if v_0.Op != OpGreater32 { 15768 break 15769 } 15770 _ = v_0.Args[1] 15771 x := v_0.Args[0] 15772 y := v_0.Args[1] 15773 v.reset(OpLeq32) 15774 v.AddArg(x) 15775 v.AddArg(y) 15776 return true 15777 } 15778 // match: (Not (Greater16 x y)) 15779 // cond: 15780 // result: (Leq16 x y) 15781 for { 15782 v_0 := v.Args[0] 15783 if v_0.Op != OpGreater16 { 15784 break 15785 } 15786 _ = v_0.Args[1] 15787 x := v_0.Args[0] 15788 y := v_0.Args[1] 15789 v.reset(OpLeq16) 15790 v.AddArg(x) 15791 v.AddArg(y) 15792 return true 15793 } 15794 // match: (Not (Greater8 x y)) 15795 // cond: 15796 // result: (Leq8 x y) 15797 for { 15798 v_0 := v.Args[0] 15799 if v_0.Op != OpGreater8 { 15800 break 15801 } 15802 _ = v_0.Args[1] 15803 x := v_0.Args[0] 15804 y := v_0.Args[1] 15805 v.reset(OpLeq8) 15806 v.AddArg(x) 15807 v.AddArg(y) 15808 return true 15809 } 15810 // match: (Not (Greater64U x y)) 15811 // cond: 15812 // result: (Leq64U x y) 15813 for { 15814 v_0 := v.Args[0] 15815 if v_0.Op != OpGreater64U { 15816 break 15817 } 15818 _ = v_0.Args[1] 15819 x := v_0.Args[0] 15820 y := v_0.Args[1] 15821 v.reset(OpLeq64U) 15822 v.AddArg(x) 15823 v.AddArg(y) 15824 return true 15825 } 15826 // match: (Not (Greater32U x y)) 15827 // cond: 15828 // result: (Leq32U x y) 15829 for { 15830 v_0 := v.Args[0] 15831 if v_0.Op != OpGreater32U { 15832 break 15833 } 15834 _ = v_0.Args[1] 15835 x := v_0.Args[0] 15836 y := v_0.Args[1] 15837 v.reset(OpLeq32U) 15838 v.AddArg(x) 15839 v.AddArg(y) 15840 return true 15841 } 15842 // match: (Not (Greater16U x y)) 15843 // cond: 15844 // result: (Leq16U x y) 15845 for { 15846 v_0 := v.Args[0] 15847 if v_0.Op != OpGreater16U { 15848 break 15849 } 15850 _ = v_0.Args[1] 15851 x := v_0.Args[0] 15852 y := v_0.Args[1] 15853 v.reset(OpLeq16U) 15854 v.AddArg(x) 15855 v.AddArg(y) 15856 return true 15857 } 15858 // match: (Not (Greater8U x y)) 15859 // cond: 15860 // result: (Leq8U x y) 15861 for { 15862 v_0 := v.Args[0] 15863 if v_0.Op != OpGreater8U { 15864 break 15865 } 15866 _ = v_0.Args[1] 15867 x := v_0.Args[0] 15868 y := v_0.Args[1] 15869 v.reset(OpLeq8U) 15870 v.AddArg(x) 15871 v.AddArg(y) 15872 return true 15873 } 15874 // match: (Not (Geq64 x y)) 15875 // cond: 15876 // result: (Less64 x y) 15877 for { 15878 v_0 := v.Args[0] 15879 if v_0.Op != OpGeq64 { 15880 break 15881 } 15882 _ = v_0.Args[1] 15883 x := v_0.Args[0] 15884 y := v_0.Args[1] 15885 v.reset(OpLess64) 15886 v.AddArg(x) 15887 v.AddArg(y) 15888 return true 15889 } 15890 // match: (Not (Geq32 x y)) 15891 // cond: 15892 // result: (Less32 x y) 15893 for { 15894 v_0 := v.Args[0] 15895 if v_0.Op != OpGeq32 { 15896 break 15897 } 15898 _ = v_0.Args[1] 15899 x := v_0.Args[0] 15900 y := v_0.Args[1] 15901 v.reset(OpLess32) 15902 v.AddArg(x) 15903 v.AddArg(y) 15904 return true 15905 } 15906 return false 15907 } 15908 func rewriteValuegeneric_OpNot_20(v *Value) bool { 15909 // match: (Not (Geq16 x y)) 15910 // cond: 15911 // result: (Less16 x y) 15912 for { 15913 v_0 := v.Args[0] 15914 if v_0.Op != OpGeq16 { 15915 break 15916 } 15917 _ = v_0.Args[1] 15918 x := v_0.Args[0] 15919 y := v_0.Args[1] 15920 v.reset(OpLess16) 15921 v.AddArg(x) 15922 v.AddArg(y) 15923 return true 15924 } 15925 // match: (Not (Geq8 x y)) 15926 // cond: 15927 // result: (Less8 x y) 15928 for { 15929 v_0 := v.Args[0] 15930 if v_0.Op != OpGeq8 { 15931 break 15932 } 15933 _ = v_0.Args[1] 15934 x := v_0.Args[0] 15935 y := v_0.Args[1] 15936 v.reset(OpLess8) 15937 v.AddArg(x) 15938 v.AddArg(y) 15939 return true 15940 } 15941 // match: (Not (Geq64U x y)) 15942 // cond: 15943 // result: (Less64U x y) 15944 for { 15945 v_0 := v.Args[0] 15946 if v_0.Op != OpGeq64U { 15947 break 15948 } 15949 _ = v_0.Args[1] 15950 x := v_0.Args[0] 15951 y := v_0.Args[1] 15952 v.reset(OpLess64U) 15953 v.AddArg(x) 15954 v.AddArg(y) 15955 return true 15956 } 15957 // match: (Not (Geq32U x y)) 15958 // cond: 15959 // result: (Less32U x y) 15960 for { 15961 v_0 := v.Args[0] 15962 if v_0.Op != OpGeq32U { 15963 break 15964 } 15965 _ = v_0.Args[1] 15966 x := v_0.Args[0] 15967 y := v_0.Args[1] 15968 v.reset(OpLess32U) 15969 v.AddArg(x) 15970 v.AddArg(y) 15971 return true 15972 } 15973 // match: (Not (Geq16U x y)) 15974 // cond: 15975 // result: (Less16U x y) 15976 for { 15977 v_0 := v.Args[0] 15978 if v_0.Op != OpGeq16U { 15979 break 15980 } 15981 _ = v_0.Args[1] 15982 x := v_0.Args[0] 15983 y := v_0.Args[1] 15984 v.reset(OpLess16U) 15985 v.AddArg(x) 15986 v.AddArg(y) 15987 return true 15988 } 15989 // match: (Not (Geq8U x y)) 15990 // cond: 15991 // result: (Less8U x y) 15992 for { 15993 v_0 := v.Args[0] 15994 if v_0.Op != OpGeq8U { 15995 break 15996 } 15997 _ = v_0.Args[1] 15998 x := v_0.Args[0] 15999 y := v_0.Args[1] 16000 v.reset(OpLess8U) 16001 v.AddArg(x) 16002 v.AddArg(y) 16003 return true 16004 } 16005 // match: (Not (Less64 x y)) 16006 // cond: 16007 // result: (Geq64 x y) 16008 for { 16009 v_0 := v.Args[0] 16010 if v_0.Op != OpLess64 { 16011 break 16012 } 16013 _ = v_0.Args[1] 16014 x := v_0.Args[0] 16015 y := v_0.Args[1] 16016 v.reset(OpGeq64) 16017 v.AddArg(x) 16018 v.AddArg(y) 16019 return true 16020 } 16021 // match: (Not (Less32 x y)) 16022 // cond: 16023 // result: (Geq32 x y) 16024 for { 16025 v_0 := v.Args[0] 16026 if v_0.Op != OpLess32 { 16027 break 16028 } 16029 _ = v_0.Args[1] 16030 x := v_0.Args[0] 16031 y := v_0.Args[1] 16032 v.reset(OpGeq32) 16033 v.AddArg(x) 16034 v.AddArg(y) 16035 return true 16036 } 16037 // match: (Not (Less16 x y)) 16038 // cond: 16039 // result: (Geq16 x y) 16040 for { 16041 v_0 := v.Args[0] 16042 if v_0.Op != OpLess16 { 16043 break 16044 } 16045 _ = v_0.Args[1] 16046 x := v_0.Args[0] 16047 y := v_0.Args[1] 16048 v.reset(OpGeq16) 16049 v.AddArg(x) 16050 v.AddArg(y) 16051 return true 16052 } 16053 // match: (Not (Less8 x y)) 16054 // cond: 16055 // result: (Geq8 x y) 16056 for { 16057 v_0 := v.Args[0] 16058 if v_0.Op != OpLess8 { 16059 break 16060 } 16061 _ = v_0.Args[1] 16062 x := v_0.Args[0] 16063 y := v_0.Args[1] 16064 v.reset(OpGeq8) 16065 v.AddArg(x) 16066 v.AddArg(y) 16067 return true 16068 } 16069 return false 16070 } 16071 func rewriteValuegeneric_OpNot_30(v *Value) bool { 16072 // match: (Not (Less64U x y)) 16073 // cond: 16074 // result: (Geq64U x y) 16075 for { 16076 v_0 := v.Args[0] 16077 if v_0.Op != OpLess64U { 16078 break 16079 } 16080 _ = v_0.Args[1] 16081 x := v_0.Args[0] 16082 y := v_0.Args[1] 16083 v.reset(OpGeq64U) 16084 v.AddArg(x) 16085 v.AddArg(y) 16086 return true 16087 } 16088 // match: (Not (Less32U x y)) 16089 // cond: 16090 // result: (Geq32U x y) 16091 for { 16092 v_0 := v.Args[0] 16093 if v_0.Op != OpLess32U { 16094 break 16095 } 16096 _ = v_0.Args[1] 16097 x := v_0.Args[0] 16098 y := v_0.Args[1] 16099 v.reset(OpGeq32U) 16100 v.AddArg(x) 16101 v.AddArg(y) 16102 return true 16103 } 16104 // match: (Not (Less16U x y)) 16105 // cond: 16106 // result: (Geq16U x y) 16107 for { 16108 v_0 := v.Args[0] 16109 if v_0.Op != OpLess16U { 16110 break 16111 } 16112 _ = v_0.Args[1] 16113 x := v_0.Args[0] 16114 y := v_0.Args[1] 16115 v.reset(OpGeq16U) 16116 v.AddArg(x) 16117 v.AddArg(y) 16118 return true 16119 } 16120 // match: (Not (Less8U x y)) 16121 // cond: 16122 // result: (Geq8U x y) 16123 for { 16124 v_0 := v.Args[0] 16125 if v_0.Op != OpLess8U { 16126 break 16127 } 16128 _ = v_0.Args[1] 16129 x := v_0.Args[0] 16130 y := v_0.Args[1] 16131 v.reset(OpGeq8U) 16132 v.AddArg(x) 16133 v.AddArg(y) 16134 return true 16135 } 16136 // match: (Not (Leq64 x y)) 16137 // cond: 16138 // result: (Greater64 x y) 16139 for { 16140 v_0 := v.Args[0] 16141 if v_0.Op != OpLeq64 { 16142 break 16143 } 16144 _ = v_0.Args[1] 16145 x := v_0.Args[0] 16146 y := v_0.Args[1] 16147 v.reset(OpGreater64) 16148 v.AddArg(x) 16149 v.AddArg(y) 16150 return true 16151 } 16152 // match: (Not (Leq32 x y)) 16153 // cond: 16154 // result: (Greater32 x y) 16155 for { 16156 v_0 := v.Args[0] 16157 if v_0.Op != OpLeq32 { 16158 break 16159 } 16160 _ = v_0.Args[1] 16161 x := v_0.Args[0] 16162 y := v_0.Args[1] 16163 v.reset(OpGreater32) 16164 v.AddArg(x) 16165 v.AddArg(y) 16166 return true 16167 } 16168 // match: (Not (Leq16 x y)) 16169 // cond: 16170 // result: (Greater16 x y) 16171 for { 16172 v_0 := v.Args[0] 16173 if v_0.Op != OpLeq16 { 16174 break 16175 } 16176 _ = v_0.Args[1] 16177 x := v_0.Args[0] 16178 y := v_0.Args[1] 16179 v.reset(OpGreater16) 16180 v.AddArg(x) 16181 v.AddArg(y) 16182 return true 16183 } 16184 // match: (Not (Leq8 x y)) 16185 // cond: 16186 // result: (Greater8 x y) 16187 for { 16188 v_0 := v.Args[0] 16189 if v_0.Op != OpLeq8 { 16190 break 16191 } 16192 _ = v_0.Args[1] 16193 x := v_0.Args[0] 16194 y := v_0.Args[1] 16195 v.reset(OpGreater8) 16196 v.AddArg(x) 16197 v.AddArg(y) 16198 return true 16199 } 16200 // match: (Not (Leq64U x y)) 16201 // cond: 16202 // result: (Greater64U x y) 16203 for { 16204 v_0 := v.Args[0] 16205 if v_0.Op != OpLeq64U { 16206 break 16207 } 16208 _ = v_0.Args[1] 16209 x := v_0.Args[0] 16210 y := v_0.Args[1] 16211 v.reset(OpGreater64U) 16212 v.AddArg(x) 16213 v.AddArg(y) 16214 return true 16215 } 16216 // match: (Not (Leq32U x y)) 16217 // cond: 16218 // result: (Greater32U x y) 16219 for { 16220 v_0 := v.Args[0] 16221 if v_0.Op != OpLeq32U { 16222 break 16223 } 16224 _ = v_0.Args[1] 16225 x := v_0.Args[0] 16226 y := v_0.Args[1] 16227 v.reset(OpGreater32U) 16228 v.AddArg(x) 16229 v.AddArg(y) 16230 return true 16231 } 16232 return false 16233 } 16234 func rewriteValuegeneric_OpNot_40(v *Value) bool { 16235 // match: (Not (Leq16U x y)) 16236 // cond: 16237 // result: (Greater16U x y) 16238 for { 16239 v_0 := v.Args[0] 16240 if v_0.Op != OpLeq16U { 16241 break 16242 } 16243 _ = v_0.Args[1] 16244 x := v_0.Args[0] 16245 y := v_0.Args[1] 16246 v.reset(OpGreater16U) 16247 v.AddArg(x) 16248 v.AddArg(y) 16249 return true 16250 } 16251 // match: (Not (Leq8U x y)) 16252 // cond: 16253 // result: (Greater8U x y) 16254 for { 16255 v_0 := v.Args[0] 16256 if v_0.Op != OpLeq8U { 16257 break 16258 } 16259 _ = v_0.Args[1] 16260 x := v_0.Args[0] 16261 y := v_0.Args[1] 16262 v.reset(OpGreater8U) 16263 v.AddArg(x) 16264 v.AddArg(y) 16265 return true 16266 } 16267 return false 16268 } 16269 func rewriteValuegeneric_OpOffPtr_0(v *Value) bool { 16270 // match: (OffPtr (OffPtr p [b]) [a]) 16271 // cond: 16272 // result: (OffPtr p [a+b]) 16273 for { 16274 a := v.AuxInt 16275 v_0 := v.Args[0] 16276 if v_0.Op != OpOffPtr { 16277 break 16278 } 16279 b := v_0.AuxInt 16280 p := v_0.Args[0] 16281 v.reset(OpOffPtr) 16282 v.AuxInt = a + b 16283 v.AddArg(p) 16284 return true 16285 } 16286 // match: (OffPtr p [0]) 16287 // cond: v.Type.Compare(p.Type) == types.CMPeq 16288 // result: p 16289 for { 16290 if v.AuxInt != 0 { 16291 break 16292 } 16293 p := v.Args[0] 16294 if !(v.Type.Compare(p.Type) == types.CMPeq) { 16295 break 16296 } 16297 v.reset(OpCopy) 16298 v.Type = p.Type 16299 v.AddArg(p) 16300 return true 16301 } 16302 return false 16303 } 16304 func rewriteValuegeneric_OpOr16_0(v *Value) bool { 16305 // match: (Or16 (Const16 [c]) (Const16 [d])) 16306 // cond: 16307 // result: (Const16 [int64(int16(c|d))]) 16308 for { 16309 _ = v.Args[1] 16310 v_0 := v.Args[0] 16311 if v_0.Op != OpConst16 { 16312 break 16313 } 16314 c := v_0.AuxInt 16315 v_1 := v.Args[1] 16316 if v_1.Op != OpConst16 { 16317 break 16318 } 16319 d := v_1.AuxInt 16320 v.reset(OpConst16) 16321 v.AuxInt = int64(int16(c | d)) 16322 return true 16323 } 16324 // match: (Or16 (Const16 [d]) (Const16 [c])) 16325 // cond: 16326 // result: (Const16 [int64(int16(c|d))]) 16327 for { 16328 _ = v.Args[1] 16329 v_0 := v.Args[0] 16330 if v_0.Op != OpConst16 { 16331 break 16332 } 16333 d := v_0.AuxInt 16334 v_1 := v.Args[1] 16335 if v_1.Op != OpConst16 { 16336 break 16337 } 16338 c := v_1.AuxInt 16339 v.reset(OpConst16) 16340 v.AuxInt = int64(int16(c | d)) 16341 return true 16342 } 16343 // match: (Or16 x x) 16344 // cond: 16345 // result: x 16346 for { 16347 _ = v.Args[1] 16348 x := v.Args[0] 16349 if x != v.Args[1] { 16350 break 16351 } 16352 v.reset(OpCopy) 16353 v.Type = x.Type 16354 v.AddArg(x) 16355 return true 16356 } 16357 // match: (Or16 (Const16 [0]) x) 16358 // cond: 16359 // result: x 16360 for { 16361 _ = v.Args[1] 16362 v_0 := v.Args[0] 16363 if v_0.Op != OpConst16 { 16364 break 16365 } 16366 if v_0.AuxInt != 0 { 16367 break 16368 } 16369 x := v.Args[1] 16370 v.reset(OpCopy) 16371 v.Type = x.Type 16372 v.AddArg(x) 16373 return true 16374 } 16375 // match: (Or16 x (Const16 [0])) 16376 // cond: 16377 // result: x 16378 for { 16379 _ = v.Args[1] 16380 x := v.Args[0] 16381 v_1 := v.Args[1] 16382 if v_1.Op != OpConst16 { 16383 break 16384 } 16385 if v_1.AuxInt != 0 { 16386 break 16387 } 16388 v.reset(OpCopy) 16389 v.Type = x.Type 16390 v.AddArg(x) 16391 return true 16392 } 16393 // match: (Or16 (Const16 [-1]) _) 16394 // cond: 16395 // result: (Const16 [-1]) 16396 for { 16397 _ = v.Args[1] 16398 v_0 := v.Args[0] 16399 if v_0.Op != OpConst16 { 16400 break 16401 } 16402 if v_0.AuxInt != -1 { 16403 break 16404 } 16405 v.reset(OpConst16) 16406 v.AuxInt = -1 16407 return true 16408 } 16409 // match: (Or16 _ (Const16 [-1])) 16410 // cond: 16411 // result: (Const16 [-1]) 16412 for { 16413 _ = v.Args[1] 16414 v_1 := v.Args[1] 16415 if v_1.Op != OpConst16 { 16416 break 16417 } 16418 if v_1.AuxInt != -1 { 16419 break 16420 } 16421 v.reset(OpConst16) 16422 v.AuxInt = -1 16423 return true 16424 } 16425 // match: (Or16 x (Or16 x y)) 16426 // cond: 16427 // result: (Or16 x y) 16428 for { 16429 _ = v.Args[1] 16430 x := v.Args[0] 16431 v_1 := v.Args[1] 16432 if v_1.Op != OpOr16 { 16433 break 16434 } 16435 _ = v_1.Args[1] 16436 if x != v_1.Args[0] { 16437 break 16438 } 16439 y := v_1.Args[1] 16440 v.reset(OpOr16) 16441 v.AddArg(x) 16442 v.AddArg(y) 16443 return true 16444 } 16445 // match: (Or16 x (Or16 y x)) 16446 // cond: 16447 // result: (Or16 x y) 16448 for { 16449 _ = v.Args[1] 16450 x := v.Args[0] 16451 v_1 := v.Args[1] 16452 if v_1.Op != OpOr16 { 16453 break 16454 } 16455 _ = v_1.Args[1] 16456 y := v_1.Args[0] 16457 if x != v_1.Args[1] { 16458 break 16459 } 16460 v.reset(OpOr16) 16461 v.AddArg(x) 16462 v.AddArg(y) 16463 return true 16464 } 16465 // match: (Or16 (Or16 x y) x) 16466 // cond: 16467 // result: (Or16 x y) 16468 for { 16469 _ = v.Args[1] 16470 v_0 := v.Args[0] 16471 if v_0.Op != OpOr16 { 16472 break 16473 } 16474 _ = v_0.Args[1] 16475 x := v_0.Args[0] 16476 y := v_0.Args[1] 16477 if x != v.Args[1] { 16478 break 16479 } 16480 v.reset(OpOr16) 16481 v.AddArg(x) 16482 v.AddArg(y) 16483 return true 16484 } 16485 return false 16486 } 16487 func rewriteValuegeneric_OpOr16_10(v *Value) bool { 16488 b := v.Block 16489 _ = b 16490 // match: (Or16 (Or16 y x) x) 16491 // cond: 16492 // result: (Or16 x y) 16493 for { 16494 _ = v.Args[1] 16495 v_0 := v.Args[0] 16496 if v_0.Op != OpOr16 { 16497 break 16498 } 16499 _ = v_0.Args[1] 16500 y := v_0.Args[0] 16501 x := v_0.Args[1] 16502 if x != v.Args[1] { 16503 break 16504 } 16505 v.reset(OpOr16) 16506 v.AddArg(x) 16507 v.AddArg(y) 16508 return true 16509 } 16510 // match: (Or16 (And16 x (Const16 [c2])) (Const16 <t> [c1])) 16511 // cond: ^(c1 | c2) == 0 16512 // result: (Or16 (Const16 <t> [c1]) x) 16513 for { 16514 _ = v.Args[1] 16515 v_0 := v.Args[0] 16516 if v_0.Op != OpAnd16 { 16517 break 16518 } 16519 _ = v_0.Args[1] 16520 x := v_0.Args[0] 16521 v_0_1 := v_0.Args[1] 16522 if v_0_1.Op != OpConst16 { 16523 break 16524 } 16525 c2 := v_0_1.AuxInt 16526 v_1 := v.Args[1] 16527 if v_1.Op != OpConst16 { 16528 break 16529 } 16530 t := v_1.Type 16531 c1 := v_1.AuxInt 16532 if !(^(c1 | c2) == 0) { 16533 break 16534 } 16535 v.reset(OpOr16) 16536 v0 := b.NewValue0(v.Pos, OpConst16, t) 16537 v0.AuxInt = c1 16538 v.AddArg(v0) 16539 v.AddArg(x) 16540 return true 16541 } 16542 // match: (Or16 (And16 (Const16 [c2]) x) (Const16 <t> [c1])) 16543 // cond: ^(c1 | c2) == 0 16544 // result: (Or16 (Const16 <t> [c1]) x) 16545 for { 16546 _ = v.Args[1] 16547 v_0 := v.Args[0] 16548 if v_0.Op != OpAnd16 { 16549 break 16550 } 16551 _ = v_0.Args[1] 16552 v_0_0 := v_0.Args[0] 16553 if v_0_0.Op != OpConst16 { 16554 break 16555 } 16556 c2 := v_0_0.AuxInt 16557 x := v_0.Args[1] 16558 v_1 := v.Args[1] 16559 if v_1.Op != OpConst16 { 16560 break 16561 } 16562 t := v_1.Type 16563 c1 := v_1.AuxInt 16564 if !(^(c1 | c2) == 0) { 16565 break 16566 } 16567 v.reset(OpOr16) 16568 v0 := b.NewValue0(v.Pos, OpConst16, t) 16569 v0.AuxInt = c1 16570 v.AddArg(v0) 16571 v.AddArg(x) 16572 return true 16573 } 16574 // match: (Or16 (Const16 <t> [c1]) (And16 x (Const16 [c2]))) 16575 // cond: ^(c1 | c2) == 0 16576 // result: (Or16 (Const16 <t> [c1]) x) 16577 for { 16578 _ = v.Args[1] 16579 v_0 := v.Args[0] 16580 if v_0.Op != OpConst16 { 16581 break 16582 } 16583 t := v_0.Type 16584 c1 := v_0.AuxInt 16585 v_1 := v.Args[1] 16586 if v_1.Op != OpAnd16 { 16587 break 16588 } 16589 _ = v_1.Args[1] 16590 x := v_1.Args[0] 16591 v_1_1 := v_1.Args[1] 16592 if v_1_1.Op != OpConst16 { 16593 break 16594 } 16595 c2 := v_1_1.AuxInt 16596 if !(^(c1 | c2) == 0) { 16597 break 16598 } 16599 v.reset(OpOr16) 16600 v0 := b.NewValue0(v.Pos, OpConst16, t) 16601 v0.AuxInt = c1 16602 v.AddArg(v0) 16603 v.AddArg(x) 16604 return true 16605 } 16606 // match: (Or16 (Const16 <t> [c1]) (And16 (Const16 [c2]) x)) 16607 // cond: ^(c1 | c2) == 0 16608 // result: (Or16 (Const16 <t> [c1]) x) 16609 for { 16610 _ = v.Args[1] 16611 v_0 := v.Args[0] 16612 if v_0.Op != OpConst16 { 16613 break 16614 } 16615 t := v_0.Type 16616 c1 := v_0.AuxInt 16617 v_1 := v.Args[1] 16618 if v_1.Op != OpAnd16 { 16619 break 16620 } 16621 _ = v_1.Args[1] 16622 v_1_0 := v_1.Args[0] 16623 if v_1_0.Op != OpConst16 { 16624 break 16625 } 16626 c2 := v_1_0.AuxInt 16627 x := v_1.Args[1] 16628 if !(^(c1 | c2) == 0) { 16629 break 16630 } 16631 v.reset(OpOr16) 16632 v0 := b.NewValue0(v.Pos, OpConst16, t) 16633 v0.AuxInt = c1 16634 v.AddArg(v0) 16635 v.AddArg(x) 16636 return true 16637 } 16638 // match: (Or16 (Or16 i:(Const16 <t>) z) x) 16639 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 16640 // result: (Or16 i (Or16 <t> z x)) 16641 for { 16642 _ = v.Args[1] 16643 v_0 := v.Args[0] 16644 if v_0.Op != OpOr16 { 16645 break 16646 } 16647 _ = v_0.Args[1] 16648 i := v_0.Args[0] 16649 if i.Op != OpConst16 { 16650 break 16651 } 16652 t := i.Type 16653 z := v_0.Args[1] 16654 x := v.Args[1] 16655 if !(z.Op != OpConst16 && x.Op != OpConst16) { 16656 break 16657 } 16658 v.reset(OpOr16) 16659 v.AddArg(i) 16660 v0 := b.NewValue0(v.Pos, OpOr16, t) 16661 v0.AddArg(z) 16662 v0.AddArg(x) 16663 v.AddArg(v0) 16664 return true 16665 } 16666 // match: (Or16 (Or16 z i:(Const16 <t>)) x) 16667 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 16668 // result: (Or16 i (Or16 <t> z x)) 16669 for { 16670 _ = v.Args[1] 16671 v_0 := v.Args[0] 16672 if v_0.Op != OpOr16 { 16673 break 16674 } 16675 _ = v_0.Args[1] 16676 z := v_0.Args[0] 16677 i := v_0.Args[1] 16678 if i.Op != OpConst16 { 16679 break 16680 } 16681 t := i.Type 16682 x := v.Args[1] 16683 if !(z.Op != OpConst16 && x.Op != OpConst16) { 16684 break 16685 } 16686 v.reset(OpOr16) 16687 v.AddArg(i) 16688 v0 := b.NewValue0(v.Pos, OpOr16, t) 16689 v0.AddArg(z) 16690 v0.AddArg(x) 16691 v.AddArg(v0) 16692 return true 16693 } 16694 // match: (Or16 x (Or16 i:(Const16 <t>) z)) 16695 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 16696 // result: (Or16 i (Or16 <t> z x)) 16697 for { 16698 _ = v.Args[1] 16699 x := v.Args[0] 16700 v_1 := v.Args[1] 16701 if v_1.Op != OpOr16 { 16702 break 16703 } 16704 _ = v_1.Args[1] 16705 i := v_1.Args[0] 16706 if i.Op != OpConst16 { 16707 break 16708 } 16709 t := i.Type 16710 z := v_1.Args[1] 16711 if !(z.Op != OpConst16 && x.Op != OpConst16) { 16712 break 16713 } 16714 v.reset(OpOr16) 16715 v.AddArg(i) 16716 v0 := b.NewValue0(v.Pos, OpOr16, t) 16717 v0.AddArg(z) 16718 v0.AddArg(x) 16719 v.AddArg(v0) 16720 return true 16721 } 16722 // match: (Or16 x (Or16 z i:(Const16 <t>))) 16723 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 16724 // result: (Or16 i (Or16 <t> z x)) 16725 for { 16726 _ = v.Args[1] 16727 x := v.Args[0] 16728 v_1 := v.Args[1] 16729 if v_1.Op != OpOr16 { 16730 break 16731 } 16732 _ = v_1.Args[1] 16733 z := v_1.Args[0] 16734 i := v_1.Args[1] 16735 if i.Op != OpConst16 { 16736 break 16737 } 16738 t := i.Type 16739 if !(z.Op != OpConst16 && x.Op != OpConst16) { 16740 break 16741 } 16742 v.reset(OpOr16) 16743 v.AddArg(i) 16744 v0 := b.NewValue0(v.Pos, OpOr16, t) 16745 v0.AddArg(z) 16746 v0.AddArg(x) 16747 v.AddArg(v0) 16748 return true 16749 } 16750 // match: (Or16 (Const16 <t> [c]) (Or16 (Const16 <t> [d]) x)) 16751 // cond: 16752 // result: (Or16 (Const16 <t> [int64(int16(c|d))]) x) 16753 for { 16754 _ = v.Args[1] 16755 v_0 := v.Args[0] 16756 if v_0.Op != OpConst16 { 16757 break 16758 } 16759 t := v_0.Type 16760 c := v_0.AuxInt 16761 v_1 := v.Args[1] 16762 if v_1.Op != OpOr16 { 16763 break 16764 } 16765 _ = v_1.Args[1] 16766 v_1_0 := v_1.Args[0] 16767 if v_1_0.Op != OpConst16 { 16768 break 16769 } 16770 if v_1_0.Type != t { 16771 break 16772 } 16773 d := v_1_0.AuxInt 16774 x := v_1.Args[1] 16775 v.reset(OpOr16) 16776 v0 := b.NewValue0(v.Pos, OpConst16, t) 16777 v0.AuxInt = int64(int16(c | d)) 16778 v.AddArg(v0) 16779 v.AddArg(x) 16780 return true 16781 } 16782 return false 16783 } 16784 func rewriteValuegeneric_OpOr16_20(v *Value) bool { 16785 b := v.Block 16786 _ = b 16787 // match: (Or16 (Const16 <t> [c]) (Or16 x (Const16 <t> [d]))) 16788 // cond: 16789 // result: (Or16 (Const16 <t> [int64(int16(c|d))]) x) 16790 for { 16791 _ = v.Args[1] 16792 v_0 := v.Args[0] 16793 if v_0.Op != OpConst16 { 16794 break 16795 } 16796 t := v_0.Type 16797 c := v_0.AuxInt 16798 v_1 := v.Args[1] 16799 if v_1.Op != OpOr16 { 16800 break 16801 } 16802 _ = v_1.Args[1] 16803 x := v_1.Args[0] 16804 v_1_1 := v_1.Args[1] 16805 if v_1_1.Op != OpConst16 { 16806 break 16807 } 16808 if v_1_1.Type != t { 16809 break 16810 } 16811 d := v_1_1.AuxInt 16812 v.reset(OpOr16) 16813 v0 := b.NewValue0(v.Pos, OpConst16, t) 16814 v0.AuxInt = int64(int16(c | d)) 16815 v.AddArg(v0) 16816 v.AddArg(x) 16817 return true 16818 } 16819 // match: (Or16 (Or16 (Const16 <t> [d]) x) (Const16 <t> [c])) 16820 // cond: 16821 // result: (Or16 (Const16 <t> [int64(int16(c|d))]) x) 16822 for { 16823 _ = v.Args[1] 16824 v_0 := v.Args[0] 16825 if v_0.Op != OpOr16 { 16826 break 16827 } 16828 _ = v_0.Args[1] 16829 v_0_0 := v_0.Args[0] 16830 if v_0_0.Op != OpConst16 { 16831 break 16832 } 16833 t := v_0_0.Type 16834 d := v_0_0.AuxInt 16835 x := v_0.Args[1] 16836 v_1 := v.Args[1] 16837 if v_1.Op != OpConst16 { 16838 break 16839 } 16840 if v_1.Type != t { 16841 break 16842 } 16843 c := v_1.AuxInt 16844 v.reset(OpOr16) 16845 v0 := b.NewValue0(v.Pos, OpConst16, t) 16846 v0.AuxInt = int64(int16(c | d)) 16847 v.AddArg(v0) 16848 v.AddArg(x) 16849 return true 16850 } 16851 // match: (Or16 (Or16 x (Const16 <t> [d])) (Const16 <t> [c])) 16852 // cond: 16853 // result: (Or16 (Const16 <t> [int64(int16(c|d))]) x) 16854 for { 16855 _ = v.Args[1] 16856 v_0 := v.Args[0] 16857 if v_0.Op != OpOr16 { 16858 break 16859 } 16860 _ = v_0.Args[1] 16861 x := v_0.Args[0] 16862 v_0_1 := v_0.Args[1] 16863 if v_0_1.Op != OpConst16 { 16864 break 16865 } 16866 t := v_0_1.Type 16867 d := v_0_1.AuxInt 16868 v_1 := v.Args[1] 16869 if v_1.Op != OpConst16 { 16870 break 16871 } 16872 if v_1.Type != t { 16873 break 16874 } 16875 c := v_1.AuxInt 16876 v.reset(OpOr16) 16877 v0 := b.NewValue0(v.Pos, OpConst16, t) 16878 v0.AuxInt = int64(int16(c | d)) 16879 v.AddArg(v0) 16880 v.AddArg(x) 16881 return true 16882 } 16883 return false 16884 } 16885 func rewriteValuegeneric_OpOr32_0(v *Value) bool { 16886 // match: (Or32 (Const32 [c]) (Const32 [d])) 16887 // cond: 16888 // result: (Const32 [int64(int32(c|d))]) 16889 for { 16890 _ = v.Args[1] 16891 v_0 := v.Args[0] 16892 if v_0.Op != OpConst32 { 16893 break 16894 } 16895 c := v_0.AuxInt 16896 v_1 := v.Args[1] 16897 if v_1.Op != OpConst32 { 16898 break 16899 } 16900 d := v_1.AuxInt 16901 v.reset(OpConst32) 16902 v.AuxInt = int64(int32(c | d)) 16903 return true 16904 } 16905 // match: (Or32 (Const32 [d]) (Const32 [c])) 16906 // cond: 16907 // result: (Const32 [int64(int32(c|d))]) 16908 for { 16909 _ = v.Args[1] 16910 v_0 := v.Args[0] 16911 if v_0.Op != OpConst32 { 16912 break 16913 } 16914 d := v_0.AuxInt 16915 v_1 := v.Args[1] 16916 if v_1.Op != OpConst32 { 16917 break 16918 } 16919 c := v_1.AuxInt 16920 v.reset(OpConst32) 16921 v.AuxInt = int64(int32(c | d)) 16922 return true 16923 } 16924 // match: (Or32 x x) 16925 // cond: 16926 // result: x 16927 for { 16928 _ = v.Args[1] 16929 x := v.Args[0] 16930 if x != v.Args[1] { 16931 break 16932 } 16933 v.reset(OpCopy) 16934 v.Type = x.Type 16935 v.AddArg(x) 16936 return true 16937 } 16938 // match: (Or32 (Const32 [0]) x) 16939 // cond: 16940 // result: x 16941 for { 16942 _ = v.Args[1] 16943 v_0 := v.Args[0] 16944 if v_0.Op != OpConst32 { 16945 break 16946 } 16947 if v_0.AuxInt != 0 { 16948 break 16949 } 16950 x := v.Args[1] 16951 v.reset(OpCopy) 16952 v.Type = x.Type 16953 v.AddArg(x) 16954 return true 16955 } 16956 // match: (Or32 x (Const32 [0])) 16957 // cond: 16958 // result: x 16959 for { 16960 _ = v.Args[1] 16961 x := v.Args[0] 16962 v_1 := v.Args[1] 16963 if v_1.Op != OpConst32 { 16964 break 16965 } 16966 if v_1.AuxInt != 0 { 16967 break 16968 } 16969 v.reset(OpCopy) 16970 v.Type = x.Type 16971 v.AddArg(x) 16972 return true 16973 } 16974 // match: (Or32 (Const32 [-1]) _) 16975 // cond: 16976 // result: (Const32 [-1]) 16977 for { 16978 _ = v.Args[1] 16979 v_0 := v.Args[0] 16980 if v_0.Op != OpConst32 { 16981 break 16982 } 16983 if v_0.AuxInt != -1 { 16984 break 16985 } 16986 v.reset(OpConst32) 16987 v.AuxInt = -1 16988 return true 16989 } 16990 // match: (Or32 _ (Const32 [-1])) 16991 // cond: 16992 // result: (Const32 [-1]) 16993 for { 16994 _ = v.Args[1] 16995 v_1 := v.Args[1] 16996 if v_1.Op != OpConst32 { 16997 break 16998 } 16999 if v_1.AuxInt != -1 { 17000 break 17001 } 17002 v.reset(OpConst32) 17003 v.AuxInt = -1 17004 return true 17005 } 17006 // match: (Or32 x (Or32 x y)) 17007 // cond: 17008 // result: (Or32 x y) 17009 for { 17010 _ = v.Args[1] 17011 x := v.Args[0] 17012 v_1 := v.Args[1] 17013 if v_1.Op != OpOr32 { 17014 break 17015 } 17016 _ = v_1.Args[1] 17017 if x != v_1.Args[0] { 17018 break 17019 } 17020 y := v_1.Args[1] 17021 v.reset(OpOr32) 17022 v.AddArg(x) 17023 v.AddArg(y) 17024 return true 17025 } 17026 // match: (Or32 x (Or32 y x)) 17027 // cond: 17028 // result: (Or32 x y) 17029 for { 17030 _ = v.Args[1] 17031 x := v.Args[0] 17032 v_1 := v.Args[1] 17033 if v_1.Op != OpOr32 { 17034 break 17035 } 17036 _ = v_1.Args[1] 17037 y := v_1.Args[0] 17038 if x != v_1.Args[1] { 17039 break 17040 } 17041 v.reset(OpOr32) 17042 v.AddArg(x) 17043 v.AddArg(y) 17044 return true 17045 } 17046 // match: (Or32 (Or32 x y) x) 17047 // cond: 17048 // result: (Or32 x y) 17049 for { 17050 _ = v.Args[1] 17051 v_0 := v.Args[0] 17052 if v_0.Op != OpOr32 { 17053 break 17054 } 17055 _ = v_0.Args[1] 17056 x := v_0.Args[0] 17057 y := v_0.Args[1] 17058 if x != v.Args[1] { 17059 break 17060 } 17061 v.reset(OpOr32) 17062 v.AddArg(x) 17063 v.AddArg(y) 17064 return true 17065 } 17066 return false 17067 } 17068 func rewriteValuegeneric_OpOr32_10(v *Value) bool { 17069 b := v.Block 17070 _ = b 17071 // match: (Or32 (Or32 y x) x) 17072 // cond: 17073 // result: (Or32 x y) 17074 for { 17075 _ = v.Args[1] 17076 v_0 := v.Args[0] 17077 if v_0.Op != OpOr32 { 17078 break 17079 } 17080 _ = v_0.Args[1] 17081 y := v_0.Args[0] 17082 x := v_0.Args[1] 17083 if x != v.Args[1] { 17084 break 17085 } 17086 v.reset(OpOr32) 17087 v.AddArg(x) 17088 v.AddArg(y) 17089 return true 17090 } 17091 // match: (Or32 (And32 x (Const32 [c2])) (Const32 <t> [c1])) 17092 // cond: ^(c1 | c2) == 0 17093 // result: (Or32 (Const32 <t> [c1]) x) 17094 for { 17095 _ = v.Args[1] 17096 v_0 := v.Args[0] 17097 if v_0.Op != OpAnd32 { 17098 break 17099 } 17100 _ = v_0.Args[1] 17101 x := v_0.Args[0] 17102 v_0_1 := v_0.Args[1] 17103 if v_0_1.Op != OpConst32 { 17104 break 17105 } 17106 c2 := v_0_1.AuxInt 17107 v_1 := v.Args[1] 17108 if v_1.Op != OpConst32 { 17109 break 17110 } 17111 t := v_1.Type 17112 c1 := v_1.AuxInt 17113 if !(^(c1 | c2) == 0) { 17114 break 17115 } 17116 v.reset(OpOr32) 17117 v0 := b.NewValue0(v.Pos, OpConst32, t) 17118 v0.AuxInt = c1 17119 v.AddArg(v0) 17120 v.AddArg(x) 17121 return true 17122 } 17123 // match: (Or32 (And32 (Const32 [c2]) x) (Const32 <t> [c1])) 17124 // cond: ^(c1 | c2) == 0 17125 // result: (Or32 (Const32 <t> [c1]) x) 17126 for { 17127 _ = v.Args[1] 17128 v_0 := v.Args[0] 17129 if v_0.Op != OpAnd32 { 17130 break 17131 } 17132 _ = v_0.Args[1] 17133 v_0_0 := v_0.Args[0] 17134 if v_0_0.Op != OpConst32 { 17135 break 17136 } 17137 c2 := v_0_0.AuxInt 17138 x := v_0.Args[1] 17139 v_1 := v.Args[1] 17140 if v_1.Op != OpConst32 { 17141 break 17142 } 17143 t := v_1.Type 17144 c1 := v_1.AuxInt 17145 if !(^(c1 | c2) == 0) { 17146 break 17147 } 17148 v.reset(OpOr32) 17149 v0 := b.NewValue0(v.Pos, OpConst32, t) 17150 v0.AuxInt = c1 17151 v.AddArg(v0) 17152 v.AddArg(x) 17153 return true 17154 } 17155 // match: (Or32 (Const32 <t> [c1]) (And32 x (Const32 [c2]))) 17156 // cond: ^(c1 | c2) == 0 17157 // result: (Or32 (Const32 <t> [c1]) x) 17158 for { 17159 _ = v.Args[1] 17160 v_0 := v.Args[0] 17161 if v_0.Op != OpConst32 { 17162 break 17163 } 17164 t := v_0.Type 17165 c1 := v_0.AuxInt 17166 v_1 := v.Args[1] 17167 if v_1.Op != OpAnd32 { 17168 break 17169 } 17170 _ = v_1.Args[1] 17171 x := v_1.Args[0] 17172 v_1_1 := v_1.Args[1] 17173 if v_1_1.Op != OpConst32 { 17174 break 17175 } 17176 c2 := v_1_1.AuxInt 17177 if !(^(c1 | c2) == 0) { 17178 break 17179 } 17180 v.reset(OpOr32) 17181 v0 := b.NewValue0(v.Pos, OpConst32, t) 17182 v0.AuxInt = c1 17183 v.AddArg(v0) 17184 v.AddArg(x) 17185 return true 17186 } 17187 // match: (Or32 (Const32 <t> [c1]) (And32 (Const32 [c2]) x)) 17188 // cond: ^(c1 | c2) == 0 17189 // result: (Or32 (Const32 <t> [c1]) x) 17190 for { 17191 _ = v.Args[1] 17192 v_0 := v.Args[0] 17193 if v_0.Op != OpConst32 { 17194 break 17195 } 17196 t := v_0.Type 17197 c1 := v_0.AuxInt 17198 v_1 := v.Args[1] 17199 if v_1.Op != OpAnd32 { 17200 break 17201 } 17202 _ = v_1.Args[1] 17203 v_1_0 := v_1.Args[0] 17204 if v_1_0.Op != OpConst32 { 17205 break 17206 } 17207 c2 := v_1_0.AuxInt 17208 x := v_1.Args[1] 17209 if !(^(c1 | c2) == 0) { 17210 break 17211 } 17212 v.reset(OpOr32) 17213 v0 := b.NewValue0(v.Pos, OpConst32, t) 17214 v0.AuxInt = c1 17215 v.AddArg(v0) 17216 v.AddArg(x) 17217 return true 17218 } 17219 // match: (Or32 (Or32 i:(Const32 <t>) z) x) 17220 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 17221 // result: (Or32 i (Or32 <t> z x)) 17222 for { 17223 _ = v.Args[1] 17224 v_0 := v.Args[0] 17225 if v_0.Op != OpOr32 { 17226 break 17227 } 17228 _ = v_0.Args[1] 17229 i := v_0.Args[0] 17230 if i.Op != OpConst32 { 17231 break 17232 } 17233 t := i.Type 17234 z := v_0.Args[1] 17235 x := v.Args[1] 17236 if !(z.Op != OpConst32 && x.Op != OpConst32) { 17237 break 17238 } 17239 v.reset(OpOr32) 17240 v.AddArg(i) 17241 v0 := b.NewValue0(v.Pos, OpOr32, t) 17242 v0.AddArg(z) 17243 v0.AddArg(x) 17244 v.AddArg(v0) 17245 return true 17246 } 17247 // match: (Or32 (Or32 z i:(Const32 <t>)) x) 17248 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 17249 // result: (Or32 i (Or32 <t> z x)) 17250 for { 17251 _ = v.Args[1] 17252 v_0 := v.Args[0] 17253 if v_0.Op != OpOr32 { 17254 break 17255 } 17256 _ = v_0.Args[1] 17257 z := v_0.Args[0] 17258 i := v_0.Args[1] 17259 if i.Op != OpConst32 { 17260 break 17261 } 17262 t := i.Type 17263 x := v.Args[1] 17264 if !(z.Op != OpConst32 && x.Op != OpConst32) { 17265 break 17266 } 17267 v.reset(OpOr32) 17268 v.AddArg(i) 17269 v0 := b.NewValue0(v.Pos, OpOr32, t) 17270 v0.AddArg(z) 17271 v0.AddArg(x) 17272 v.AddArg(v0) 17273 return true 17274 } 17275 // match: (Or32 x (Or32 i:(Const32 <t>) z)) 17276 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 17277 // result: (Or32 i (Or32 <t> z x)) 17278 for { 17279 _ = v.Args[1] 17280 x := v.Args[0] 17281 v_1 := v.Args[1] 17282 if v_1.Op != OpOr32 { 17283 break 17284 } 17285 _ = v_1.Args[1] 17286 i := v_1.Args[0] 17287 if i.Op != OpConst32 { 17288 break 17289 } 17290 t := i.Type 17291 z := v_1.Args[1] 17292 if !(z.Op != OpConst32 && x.Op != OpConst32) { 17293 break 17294 } 17295 v.reset(OpOr32) 17296 v.AddArg(i) 17297 v0 := b.NewValue0(v.Pos, OpOr32, t) 17298 v0.AddArg(z) 17299 v0.AddArg(x) 17300 v.AddArg(v0) 17301 return true 17302 } 17303 // match: (Or32 x (Or32 z i:(Const32 <t>))) 17304 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 17305 // result: (Or32 i (Or32 <t> z x)) 17306 for { 17307 _ = v.Args[1] 17308 x := v.Args[0] 17309 v_1 := v.Args[1] 17310 if v_1.Op != OpOr32 { 17311 break 17312 } 17313 _ = v_1.Args[1] 17314 z := v_1.Args[0] 17315 i := v_1.Args[1] 17316 if i.Op != OpConst32 { 17317 break 17318 } 17319 t := i.Type 17320 if !(z.Op != OpConst32 && x.Op != OpConst32) { 17321 break 17322 } 17323 v.reset(OpOr32) 17324 v.AddArg(i) 17325 v0 := b.NewValue0(v.Pos, OpOr32, t) 17326 v0.AddArg(z) 17327 v0.AddArg(x) 17328 v.AddArg(v0) 17329 return true 17330 } 17331 // match: (Or32 (Const32 <t> [c]) (Or32 (Const32 <t> [d]) x)) 17332 // cond: 17333 // result: (Or32 (Const32 <t> [int64(int32(c|d))]) x) 17334 for { 17335 _ = v.Args[1] 17336 v_0 := v.Args[0] 17337 if v_0.Op != OpConst32 { 17338 break 17339 } 17340 t := v_0.Type 17341 c := v_0.AuxInt 17342 v_1 := v.Args[1] 17343 if v_1.Op != OpOr32 { 17344 break 17345 } 17346 _ = v_1.Args[1] 17347 v_1_0 := v_1.Args[0] 17348 if v_1_0.Op != OpConst32 { 17349 break 17350 } 17351 if v_1_0.Type != t { 17352 break 17353 } 17354 d := v_1_0.AuxInt 17355 x := v_1.Args[1] 17356 v.reset(OpOr32) 17357 v0 := b.NewValue0(v.Pos, OpConst32, t) 17358 v0.AuxInt = int64(int32(c | d)) 17359 v.AddArg(v0) 17360 v.AddArg(x) 17361 return true 17362 } 17363 return false 17364 } 17365 func rewriteValuegeneric_OpOr32_20(v *Value) bool { 17366 b := v.Block 17367 _ = b 17368 // match: (Or32 (Const32 <t> [c]) (Or32 x (Const32 <t> [d]))) 17369 // cond: 17370 // result: (Or32 (Const32 <t> [int64(int32(c|d))]) x) 17371 for { 17372 _ = v.Args[1] 17373 v_0 := v.Args[0] 17374 if v_0.Op != OpConst32 { 17375 break 17376 } 17377 t := v_0.Type 17378 c := v_0.AuxInt 17379 v_1 := v.Args[1] 17380 if v_1.Op != OpOr32 { 17381 break 17382 } 17383 _ = v_1.Args[1] 17384 x := v_1.Args[0] 17385 v_1_1 := v_1.Args[1] 17386 if v_1_1.Op != OpConst32 { 17387 break 17388 } 17389 if v_1_1.Type != t { 17390 break 17391 } 17392 d := v_1_1.AuxInt 17393 v.reset(OpOr32) 17394 v0 := b.NewValue0(v.Pos, OpConst32, t) 17395 v0.AuxInt = int64(int32(c | d)) 17396 v.AddArg(v0) 17397 v.AddArg(x) 17398 return true 17399 } 17400 // match: (Or32 (Or32 (Const32 <t> [d]) x) (Const32 <t> [c])) 17401 // cond: 17402 // result: (Or32 (Const32 <t> [int64(int32(c|d))]) x) 17403 for { 17404 _ = v.Args[1] 17405 v_0 := v.Args[0] 17406 if v_0.Op != OpOr32 { 17407 break 17408 } 17409 _ = v_0.Args[1] 17410 v_0_0 := v_0.Args[0] 17411 if v_0_0.Op != OpConst32 { 17412 break 17413 } 17414 t := v_0_0.Type 17415 d := v_0_0.AuxInt 17416 x := v_0.Args[1] 17417 v_1 := v.Args[1] 17418 if v_1.Op != OpConst32 { 17419 break 17420 } 17421 if v_1.Type != t { 17422 break 17423 } 17424 c := v_1.AuxInt 17425 v.reset(OpOr32) 17426 v0 := b.NewValue0(v.Pos, OpConst32, t) 17427 v0.AuxInt = int64(int32(c | d)) 17428 v.AddArg(v0) 17429 v.AddArg(x) 17430 return true 17431 } 17432 // match: (Or32 (Or32 x (Const32 <t> [d])) (Const32 <t> [c])) 17433 // cond: 17434 // result: (Or32 (Const32 <t> [int64(int32(c|d))]) x) 17435 for { 17436 _ = v.Args[1] 17437 v_0 := v.Args[0] 17438 if v_0.Op != OpOr32 { 17439 break 17440 } 17441 _ = v_0.Args[1] 17442 x := v_0.Args[0] 17443 v_0_1 := v_0.Args[1] 17444 if v_0_1.Op != OpConst32 { 17445 break 17446 } 17447 t := v_0_1.Type 17448 d := v_0_1.AuxInt 17449 v_1 := v.Args[1] 17450 if v_1.Op != OpConst32 { 17451 break 17452 } 17453 if v_1.Type != t { 17454 break 17455 } 17456 c := v_1.AuxInt 17457 v.reset(OpOr32) 17458 v0 := b.NewValue0(v.Pos, OpConst32, t) 17459 v0.AuxInt = int64(int32(c | d)) 17460 v.AddArg(v0) 17461 v.AddArg(x) 17462 return true 17463 } 17464 return false 17465 } 17466 func rewriteValuegeneric_OpOr64_0(v *Value) bool { 17467 // match: (Or64 (Const64 [c]) (Const64 [d])) 17468 // cond: 17469 // result: (Const64 [c|d]) 17470 for { 17471 _ = v.Args[1] 17472 v_0 := v.Args[0] 17473 if v_0.Op != OpConst64 { 17474 break 17475 } 17476 c := v_0.AuxInt 17477 v_1 := v.Args[1] 17478 if v_1.Op != OpConst64 { 17479 break 17480 } 17481 d := v_1.AuxInt 17482 v.reset(OpConst64) 17483 v.AuxInt = c | d 17484 return true 17485 } 17486 // match: (Or64 (Const64 [d]) (Const64 [c])) 17487 // cond: 17488 // result: (Const64 [c|d]) 17489 for { 17490 _ = v.Args[1] 17491 v_0 := v.Args[0] 17492 if v_0.Op != OpConst64 { 17493 break 17494 } 17495 d := v_0.AuxInt 17496 v_1 := v.Args[1] 17497 if v_1.Op != OpConst64 { 17498 break 17499 } 17500 c := v_1.AuxInt 17501 v.reset(OpConst64) 17502 v.AuxInt = c | d 17503 return true 17504 } 17505 // match: (Or64 x x) 17506 // cond: 17507 // result: x 17508 for { 17509 _ = v.Args[1] 17510 x := v.Args[0] 17511 if x != v.Args[1] { 17512 break 17513 } 17514 v.reset(OpCopy) 17515 v.Type = x.Type 17516 v.AddArg(x) 17517 return true 17518 } 17519 // match: (Or64 (Const64 [0]) x) 17520 // cond: 17521 // result: x 17522 for { 17523 _ = v.Args[1] 17524 v_0 := v.Args[0] 17525 if v_0.Op != OpConst64 { 17526 break 17527 } 17528 if v_0.AuxInt != 0 { 17529 break 17530 } 17531 x := v.Args[1] 17532 v.reset(OpCopy) 17533 v.Type = x.Type 17534 v.AddArg(x) 17535 return true 17536 } 17537 // match: (Or64 x (Const64 [0])) 17538 // cond: 17539 // result: x 17540 for { 17541 _ = v.Args[1] 17542 x := v.Args[0] 17543 v_1 := v.Args[1] 17544 if v_1.Op != OpConst64 { 17545 break 17546 } 17547 if v_1.AuxInt != 0 { 17548 break 17549 } 17550 v.reset(OpCopy) 17551 v.Type = x.Type 17552 v.AddArg(x) 17553 return true 17554 } 17555 // match: (Or64 (Const64 [-1]) _) 17556 // cond: 17557 // result: (Const64 [-1]) 17558 for { 17559 _ = v.Args[1] 17560 v_0 := v.Args[0] 17561 if v_0.Op != OpConst64 { 17562 break 17563 } 17564 if v_0.AuxInt != -1 { 17565 break 17566 } 17567 v.reset(OpConst64) 17568 v.AuxInt = -1 17569 return true 17570 } 17571 // match: (Or64 _ (Const64 [-1])) 17572 // cond: 17573 // result: (Const64 [-1]) 17574 for { 17575 _ = v.Args[1] 17576 v_1 := v.Args[1] 17577 if v_1.Op != OpConst64 { 17578 break 17579 } 17580 if v_1.AuxInt != -1 { 17581 break 17582 } 17583 v.reset(OpConst64) 17584 v.AuxInt = -1 17585 return true 17586 } 17587 // match: (Or64 x (Or64 x y)) 17588 // cond: 17589 // result: (Or64 x y) 17590 for { 17591 _ = v.Args[1] 17592 x := v.Args[0] 17593 v_1 := v.Args[1] 17594 if v_1.Op != OpOr64 { 17595 break 17596 } 17597 _ = v_1.Args[1] 17598 if x != v_1.Args[0] { 17599 break 17600 } 17601 y := v_1.Args[1] 17602 v.reset(OpOr64) 17603 v.AddArg(x) 17604 v.AddArg(y) 17605 return true 17606 } 17607 // match: (Or64 x (Or64 y x)) 17608 // cond: 17609 // result: (Or64 x y) 17610 for { 17611 _ = v.Args[1] 17612 x := v.Args[0] 17613 v_1 := v.Args[1] 17614 if v_1.Op != OpOr64 { 17615 break 17616 } 17617 _ = v_1.Args[1] 17618 y := v_1.Args[0] 17619 if x != v_1.Args[1] { 17620 break 17621 } 17622 v.reset(OpOr64) 17623 v.AddArg(x) 17624 v.AddArg(y) 17625 return true 17626 } 17627 // match: (Or64 (Or64 x y) x) 17628 // cond: 17629 // result: (Or64 x y) 17630 for { 17631 _ = v.Args[1] 17632 v_0 := v.Args[0] 17633 if v_0.Op != OpOr64 { 17634 break 17635 } 17636 _ = v_0.Args[1] 17637 x := v_0.Args[0] 17638 y := v_0.Args[1] 17639 if x != v.Args[1] { 17640 break 17641 } 17642 v.reset(OpOr64) 17643 v.AddArg(x) 17644 v.AddArg(y) 17645 return true 17646 } 17647 return false 17648 } 17649 func rewriteValuegeneric_OpOr64_10(v *Value) bool { 17650 b := v.Block 17651 _ = b 17652 // match: (Or64 (Or64 y x) x) 17653 // cond: 17654 // result: (Or64 x y) 17655 for { 17656 _ = v.Args[1] 17657 v_0 := v.Args[0] 17658 if v_0.Op != OpOr64 { 17659 break 17660 } 17661 _ = v_0.Args[1] 17662 y := v_0.Args[0] 17663 x := v_0.Args[1] 17664 if x != v.Args[1] { 17665 break 17666 } 17667 v.reset(OpOr64) 17668 v.AddArg(x) 17669 v.AddArg(y) 17670 return true 17671 } 17672 // match: (Or64 (And64 x (Const64 [c2])) (Const64 <t> [c1])) 17673 // cond: ^(c1 | c2) == 0 17674 // result: (Or64 (Const64 <t> [c1]) x) 17675 for { 17676 _ = v.Args[1] 17677 v_0 := v.Args[0] 17678 if v_0.Op != OpAnd64 { 17679 break 17680 } 17681 _ = v_0.Args[1] 17682 x := v_0.Args[0] 17683 v_0_1 := v_0.Args[1] 17684 if v_0_1.Op != OpConst64 { 17685 break 17686 } 17687 c2 := v_0_1.AuxInt 17688 v_1 := v.Args[1] 17689 if v_1.Op != OpConst64 { 17690 break 17691 } 17692 t := v_1.Type 17693 c1 := v_1.AuxInt 17694 if !(^(c1 | c2) == 0) { 17695 break 17696 } 17697 v.reset(OpOr64) 17698 v0 := b.NewValue0(v.Pos, OpConst64, t) 17699 v0.AuxInt = c1 17700 v.AddArg(v0) 17701 v.AddArg(x) 17702 return true 17703 } 17704 // match: (Or64 (And64 (Const64 [c2]) x) (Const64 <t> [c1])) 17705 // cond: ^(c1 | c2) == 0 17706 // result: (Or64 (Const64 <t> [c1]) x) 17707 for { 17708 _ = v.Args[1] 17709 v_0 := v.Args[0] 17710 if v_0.Op != OpAnd64 { 17711 break 17712 } 17713 _ = v_0.Args[1] 17714 v_0_0 := v_0.Args[0] 17715 if v_0_0.Op != OpConst64 { 17716 break 17717 } 17718 c2 := v_0_0.AuxInt 17719 x := v_0.Args[1] 17720 v_1 := v.Args[1] 17721 if v_1.Op != OpConst64 { 17722 break 17723 } 17724 t := v_1.Type 17725 c1 := v_1.AuxInt 17726 if !(^(c1 | c2) == 0) { 17727 break 17728 } 17729 v.reset(OpOr64) 17730 v0 := b.NewValue0(v.Pos, OpConst64, t) 17731 v0.AuxInt = c1 17732 v.AddArg(v0) 17733 v.AddArg(x) 17734 return true 17735 } 17736 // match: (Or64 (Const64 <t> [c1]) (And64 x (Const64 [c2]))) 17737 // cond: ^(c1 | c2) == 0 17738 // result: (Or64 (Const64 <t> [c1]) x) 17739 for { 17740 _ = v.Args[1] 17741 v_0 := v.Args[0] 17742 if v_0.Op != OpConst64 { 17743 break 17744 } 17745 t := v_0.Type 17746 c1 := v_0.AuxInt 17747 v_1 := v.Args[1] 17748 if v_1.Op != OpAnd64 { 17749 break 17750 } 17751 _ = v_1.Args[1] 17752 x := v_1.Args[0] 17753 v_1_1 := v_1.Args[1] 17754 if v_1_1.Op != OpConst64 { 17755 break 17756 } 17757 c2 := v_1_1.AuxInt 17758 if !(^(c1 | c2) == 0) { 17759 break 17760 } 17761 v.reset(OpOr64) 17762 v0 := b.NewValue0(v.Pos, OpConst64, t) 17763 v0.AuxInt = c1 17764 v.AddArg(v0) 17765 v.AddArg(x) 17766 return true 17767 } 17768 // match: (Or64 (Const64 <t> [c1]) (And64 (Const64 [c2]) x)) 17769 // cond: ^(c1 | c2) == 0 17770 // result: (Or64 (Const64 <t> [c1]) x) 17771 for { 17772 _ = v.Args[1] 17773 v_0 := v.Args[0] 17774 if v_0.Op != OpConst64 { 17775 break 17776 } 17777 t := v_0.Type 17778 c1 := v_0.AuxInt 17779 v_1 := v.Args[1] 17780 if v_1.Op != OpAnd64 { 17781 break 17782 } 17783 _ = v_1.Args[1] 17784 v_1_0 := v_1.Args[0] 17785 if v_1_0.Op != OpConst64 { 17786 break 17787 } 17788 c2 := v_1_0.AuxInt 17789 x := v_1.Args[1] 17790 if !(^(c1 | c2) == 0) { 17791 break 17792 } 17793 v.reset(OpOr64) 17794 v0 := b.NewValue0(v.Pos, OpConst64, t) 17795 v0.AuxInt = c1 17796 v.AddArg(v0) 17797 v.AddArg(x) 17798 return true 17799 } 17800 // match: (Or64 (Or64 i:(Const64 <t>) z) x) 17801 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 17802 // result: (Or64 i (Or64 <t> z x)) 17803 for { 17804 _ = v.Args[1] 17805 v_0 := v.Args[0] 17806 if v_0.Op != OpOr64 { 17807 break 17808 } 17809 _ = v_0.Args[1] 17810 i := v_0.Args[0] 17811 if i.Op != OpConst64 { 17812 break 17813 } 17814 t := i.Type 17815 z := v_0.Args[1] 17816 x := v.Args[1] 17817 if !(z.Op != OpConst64 && x.Op != OpConst64) { 17818 break 17819 } 17820 v.reset(OpOr64) 17821 v.AddArg(i) 17822 v0 := b.NewValue0(v.Pos, OpOr64, t) 17823 v0.AddArg(z) 17824 v0.AddArg(x) 17825 v.AddArg(v0) 17826 return true 17827 } 17828 // match: (Or64 (Or64 z i:(Const64 <t>)) x) 17829 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 17830 // result: (Or64 i (Or64 <t> z x)) 17831 for { 17832 _ = v.Args[1] 17833 v_0 := v.Args[0] 17834 if v_0.Op != OpOr64 { 17835 break 17836 } 17837 _ = v_0.Args[1] 17838 z := v_0.Args[0] 17839 i := v_0.Args[1] 17840 if i.Op != OpConst64 { 17841 break 17842 } 17843 t := i.Type 17844 x := v.Args[1] 17845 if !(z.Op != OpConst64 && x.Op != OpConst64) { 17846 break 17847 } 17848 v.reset(OpOr64) 17849 v.AddArg(i) 17850 v0 := b.NewValue0(v.Pos, OpOr64, t) 17851 v0.AddArg(z) 17852 v0.AddArg(x) 17853 v.AddArg(v0) 17854 return true 17855 } 17856 // match: (Or64 x (Or64 i:(Const64 <t>) z)) 17857 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 17858 // result: (Or64 i (Or64 <t> z x)) 17859 for { 17860 _ = v.Args[1] 17861 x := v.Args[0] 17862 v_1 := v.Args[1] 17863 if v_1.Op != OpOr64 { 17864 break 17865 } 17866 _ = v_1.Args[1] 17867 i := v_1.Args[0] 17868 if i.Op != OpConst64 { 17869 break 17870 } 17871 t := i.Type 17872 z := v_1.Args[1] 17873 if !(z.Op != OpConst64 && x.Op != OpConst64) { 17874 break 17875 } 17876 v.reset(OpOr64) 17877 v.AddArg(i) 17878 v0 := b.NewValue0(v.Pos, OpOr64, t) 17879 v0.AddArg(z) 17880 v0.AddArg(x) 17881 v.AddArg(v0) 17882 return true 17883 } 17884 // match: (Or64 x (Or64 z i:(Const64 <t>))) 17885 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 17886 // result: (Or64 i (Or64 <t> z x)) 17887 for { 17888 _ = v.Args[1] 17889 x := v.Args[0] 17890 v_1 := v.Args[1] 17891 if v_1.Op != OpOr64 { 17892 break 17893 } 17894 _ = v_1.Args[1] 17895 z := v_1.Args[0] 17896 i := v_1.Args[1] 17897 if i.Op != OpConst64 { 17898 break 17899 } 17900 t := i.Type 17901 if !(z.Op != OpConst64 && x.Op != OpConst64) { 17902 break 17903 } 17904 v.reset(OpOr64) 17905 v.AddArg(i) 17906 v0 := b.NewValue0(v.Pos, OpOr64, t) 17907 v0.AddArg(z) 17908 v0.AddArg(x) 17909 v.AddArg(v0) 17910 return true 17911 } 17912 // match: (Or64 (Const64 <t> [c]) (Or64 (Const64 <t> [d]) x)) 17913 // cond: 17914 // result: (Or64 (Const64 <t> [c|d]) x) 17915 for { 17916 _ = v.Args[1] 17917 v_0 := v.Args[0] 17918 if v_0.Op != OpConst64 { 17919 break 17920 } 17921 t := v_0.Type 17922 c := v_0.AuxInt 17923 v_1 := v.Args[1] 17924 if v_1.Op != OpOr64 { 17925 break 17926 } 17927 _ = v_1.Args[1] 17928 v_1_0 := v_1.Args[0] 17929 if v_1_0.Op != OpConst64 { 17930 break 17931 } 17932 if v_1_0.Type != t { 17933 break 17934 } 17935 d := v_1_0.AuxInt 17936 x := v_1.Args[1] 17937 v.reset(OpOr64) 17938 v0 := b.NewValue0(v.Pos, OpConst64, t) 17939 v0.AuxInt = c | d 17940 v.AddArg(v0) 17941 v.AddArg(x) 17942 return true 17943 } 17944 return false 17945 } 17946 func rewriteValuegeneric_OpOr64_20(v *Value) bool { 17947 b := v.Block 17948 _ = b 17949 // match: (Or64 (Const64 <t> [c]) (Or64 x (Const64 <t> [d]))) 17950 // cond: 17951 // result: (Or64 (Const64 <t> [c|d]) x) 17952 for { 17953 _ = v.Args[1] 17954 v_0 := v.Args[0] 17955 if v_0.Op != OpConst64 { 17956 break 17957 } 17958 t := v_0.Type 17959 c := v_0.AuxInt 17960 v_1 := v.Args[1] 17961 if v_1.Op != OpOr64 { 17962 break 17963 } 17964 _ = v_1.Args[1] 17965 x := v_1.Args[0] 17966 v_1_1 := v_1.Args[1] 17967 if v_1_1.Op != OpConst64 { 17968 break 17969 } 17970 if v_1_1.Type != t { 17971 break 17972 } 17973 d := v_1_1.AuxInt 17974 v.reset(OpOr64) 17975 v0 := b.NewValue0(v.Pos, OpConst64, t) 17976 v0.AuxInt = c | d 17977 v.AddArg(v0) 17978 v.AddArg(x) 17979 return true 17980 } 17981 // match: (Or64 (Or64 (Const64 <t> [d]) x) (Const64 <t> [c])) 17982 // cond: 17983 // result: (Or64 (Const64 <t> [c|d]) x) 17984 for { 17985 _ = v.Args[1] 17986 v_0 := v.Args[0] 17987 if v_0.Op != OpOr64 { 17988 break 17989 } 17990 _ = v_0.Args[1] 17991 v_0_0 := v_0.Args[0] 17992 if v_0_0.Op != OpConst64 { 17993 break 17994 } 17995 t := v_0_0.Type 17996 d := v_0_0.AuxInt 17997 x := v_0.Args[1] 17998 v_1 := v.Args[1] 17999 if v_1.Op != OpConst64 { 18000 break 18001 } 18002 if v_1.Type != t { 18003 break 18004 } 18005 c := v_1.AuxInt 18006 v.reset(OpOr64) 18007 v0 := b.NewValue0(v.Pos, OpConst64, t) 18008 v0.AuxInt = c | d 18009 v.AddArg(v0) 18010 v.AddArg(x) 18011 return true 18012 } 18013 // match: (Or64 (Or64 x (Const64 <t> [d])) (Const64 <t> [c])) 18014 // cond: 18015 // result: (Or64 (Const64 <t> [c|d]) x) 18016 for { 18017 _ = v.Args[1] 18018 v_0 := v.Args[0] 18019 if v_0.Op != OpOr64 { 18020 break 18021 } 18022 _ = v_0.Args[1] 18023 x := v_0.Args[0] 18024 v_0_1 := v_0.Args[1] 18025 if v_0_1.Op != OpConst64 { 18026 break 18027 } 18028 t := v_0_1.Type 18029 d := v_0_1.AuxInt 18030 v_1 := v.Args[1] 18031 if v_1.Op != OpConst64 { 18032 break 18033 } 18034 if v_1.Type != t { 18035 break 18036 } 18037 c := v_1.AuxInt 18038 v.reset(OpOr64) 18039 v0 := b.NewValue0(v.Pos, OpConst64, t) 18040 v0.AuxInt = c | d 18041 v.AddArg(v0) 18042 v.AddArg(x) 18043 return true 18044 } 18045 return false 18046 } 18047 func rewriteValuegeneric_OpOr8_0(v *Value) bool { 18048 // match: (Or8 (Const8 [c]) (Const8 [d])) 18049 // cond: 18050 // result: (Const8 [int64(int8(c|d))]) 18051 for { 18052 _ = v.Args[1] 18053 v_0 := v.Args[0] 18054 if v_0.Op != OpConst8 { 18055 break 18056 } 18057 c := v_0.AuxInt 18058 v_1 := v.Args[1] 18059 if v_1.Op != OpConst8 { 18060 break 18061 } 18062 d := v_1.AuxInt 18063 v.reset(OpConst8) 18064 v.AuxInt = int64(int8(c | d)) 18065 return true 18066 } 18067 // match: (Or8 (Const8 [d]) (Const8 [c])) 18068 // cond: 18069 // result: (Const8 [int64(int8(c|d))]) 18070 for { 18071 _ = v.Args[1] 18072 v_0 := v.Args[0] 18073 if v_0.Op != OpConst8 { 18074 break 18075 } 18076 d := v_0.AuxInt 18077 v_1 := v.Args[1] 18078 if v_1.Op != OpConst8 { 18079 break 18080 } 18081 c := v_1.AuxInt 18082 v.reset(OpConst8) 18083 v.AuxInt = int64(int8(c | d)) 18084 return true 18085 } 18086 // match: (Or8 x x) 18087 // cond: 18088 // result: x 18089 for { 18090 _ = v.Args[1] 18091 x := v.Args[0] 18092 if x != v.Args[1] { 18093 break 18094 } 18095 v.reset(OpCopy) 18096 v.Type = x.Type 18097 v.AddArg(x) 18098 return true 18099 } 18100 // match: (Or8 (Const8 [0]) x) 18101 // cond: 18102 // result: x 18103 for { 18104 _ = v.Args[1] 18105 v_0 := v.Args[0] 18106 if v_0.Op != OpConst8 { 18107 break 18108 } 18109 if v_0.AuxInt != 0 { 18110 break 18111 } 18112 x := v.Args[1] 18113 v.reset(OpCopy) 18114 v.Type = x.Type 18115 v.AddArg(x) 18116 return true 18117 } 18118 // match: (Or8 x (Const8 [0])) 18119 // cond: 18120 // result: x 18121 for { 18122 _ = v.Args[1] 18123 x := v.Args[0] 18124 v_1 := v.Args[1] 18125 if v_1.Op != OpConst8 { 18126 break 18127 } 18128 if v_1.AuxInt != 0 { 18129 break 18130 } 18131 v.reset(OpCopy) 18132 v.Type = x.Type 18133 v.AddArg(x) 18134 return true 18135 } 18136 // match: (Or8 (Const8 [-1]) _) 18137 // cond: 18138 // result: (Const8 [-1]) 18139 for { 18140 _ = v.Args[1] 18141 v_0 := v.Args[0] 18142 if v_0.Op != OpConst8 { 18143 break 18144 } 18145 if v_0.AuxInt != -1 { 18146 break 18147 } 18148 v.reset(OpConst8) 18149 v.AuxInt = -1 18150 return true 18151 } 18152 // match: (Or8 _ (Const8 [-1])) 18153 // cond: 18154 // result: (Const8 [-1]) 18155 for { 18156 _ = v.Args[1] 18157 v_1 := v.Args[1] 18158 if v_1.Op != OpConst8 { 18159 break 18160 } 18161 if v_1.AuxInt != -1 { 18162 break 18163 } 18164 v.reset(OpConst8) 18165 v.AuxInt = -1 18166 return true 18167 } 18168 // match: (Or8 x (Or8 x y)) 18169 // cond: 18170 // result: (Or8 x y) 18171 for { 18172 _ = v.Args[1] 18173 x := v.Args[0] 18174 v_1 := v.Args[1] 18175 if v_1.Op != OpOr8 { 18176 break 18177 } 18178 _ = v_1.Args[1] 18179 if x != v_1.Args[0] { 18180 break 18181 } 18182 y := v_1.Args[1] 18183 v.reset(OpOr8) 18184 v.AddArg(x) 18185 v.AddArg(y) 18186 return true 18187 } 18188 // match: (Or8 x (Or8 y x)) 18189 // cond: 18190 // result: (Or8 x y) 18191 for { 18192 _ = v.Args[1] 18193 x := v.Args[0] 18194 v_1 := v.Args[1] 18195 if v_1.Op != OpOr8 { 18196 break 18197 } 18198 _ = v_1.Args[1] 18199 y := v_1.Args[0] 18200 if x != v_1.Args[1] { 18201 break 18202 } 18203 v.reset(OpOr8) 18204 v.AddArg(x) 18205 v.AddArg(y) 18206 return true 18207 } 18208 // match: (Or8 (Or8 x y) x) 18209 // cond: 18210 // result: (Or8 x y) 18211 for { 18212 _ = v.Args[1] 18213 v_0 := v.Args[0] 18214 if v_0.Op != OpOr8 { 18215 break 18216 } 18217 _ = v_0.Args[1] 18218 x := v_0.Args[0] 18219 y := v_0.Args[1] 18220 if x != v.Args[1] { 18221 break 18222 } 18223 v.reset(OpOr8) 18224 v.AddArg(x) 18225 v.AddArg(y) 18226 return true 18227 } 18228 return false 18229 } 18230 func rewriteValuegeneric_OpOr8_10(v *Value) bool { 18231 b := v.Block 18232 _ = b 18233 // match: (Or8 (Or8 y x) x) 18234 // cond: 18235 // result: (Or8 x y) 18236 for { 18237 _ = v.Args[1] 18238 v_0 := v.Args[0] 18239 if v_0.Op != OpOr8 { 18240 break 18241 } 18242 _ = v_0.Args[1] 18243 y := v_0.Args[0] 18244 x := v_0.Args[1] 18245 if x != v.Args[1] { 18246 break 18247 } 18248 v.reset(OpOr8) 18249 v.AddArg(x) 18250 v.AddArg(y) 18251 return true 18252 } 18253 // match: (Or8 (And8 x (Const8 [c2])) (Const8 <t> [c1])) 18254 // cond: ^(c1 | c2) == 0 18255 // result: (Or8 (Const8 <t> [c1]) x) 18256 for { 18257 _ = v.Args[1] 18258 v_0 := v.Args[0] 18259 if v_0.Op != OpAnd8 { 18260 break 18261 } 18262 _ = v_0.Args[1] 18263 x := v_0.Args[0] 18264 v_0_1 := v_0.Args[1] 18265 if v_0_1.Op != OpConst8 { 18266 break 18267 } 18268 c2 := v_0_1.AuxInt 18269 v_1 := v.Args[1] 18270 if v_1.Op != OpConst8 { 18271 break 18272 } 18273 t := v_1.Type 18274 c1 := v_1.AuxInt 18275 if !(^(c1 | c2) == 0) { 18276 break 18277 } 18278 v.reset(OpOr8) 18279 v0 := b.NewValue0(v.Pos, OpConst8, t) 18280 v0.AuxInt = c1 18281 v.AddArg(v0) 18282 v.AddArg(x) 18283 return true 18284 } 18285 // match: (Or8 (And8 (Const8 [c2]) x) (Const8 <t> [c1])) 18286 // cond: ^(c1 | c2) == 0 18287 // result: (Or8 (Const8 <t> [c1]) x) 18288 for { 18289 _ = v.Args[1] 18290 v_0 := v.Args[0] 18291 if v_0.Op != OpAnd8 { 18292 break 18293 } 18294 _ = v_0.Args[1] 18295 v_0_0 := v_0.Args[0] 18296 if v_0_0.Op != OpConst8 { 18297 break 18298 } 18299 c2 := v_0_0.AuxInt 18300 x := v_0.Args[1] 18301 v_1 := v.Args[1] 18302 if v_1.Op != OpConst8 { 18303 break 18304 } 18305 t := v_1.Type 18306 c1 := v_1.AuxInt 18307 if !(^(c1 | c2) == 0) { 18308 break 18309 } 18310 v.reset(OpOr8) 18311 v0 := b.NewValue0(v.Pos, OpConst8, t) 18312 v0.AuxInt = c1 18313 v.AddArg(v0) 18314 v.AddArg(x) 18315 return true 18316 } 18317 // match: (Or8 (Const8 <t> [c1]) (And8 x (Const8 [c2]))) 18318 // cond: ^(c1 | c2) == 0 18319 // result: (Or8 (Const8 <t> [c1]) x) 18320 for { 18321 _ = v.Args[1] 18322 v_0 := v.Args[0] 18323 if v_0.Op != OpConst8 { 18324 break 18325 } 18326 t := v_0.Type 18327 c1 := v_0.AuxInt 18328 v_1 := v.Args[1] 18329 if v_1.Op != OpAnd8 { 18330 break 18331 } 18332 _ = v_1.Args[1] 18333 x := v_1.Args[0] 18334 v_1_1 := v_1.Args[1] 18335 if v_1_1.Op != OpConst8 { 18336 break 18337 } 18338 c2 := v_1_1.AuxInt 18339 if !(^(c1 | c2) == 0) { 18340 break 18341 } 18342 v.reset(OpOr8) 18343 v0 := b.NewValue0(v.Pos, OpConst8, t) 18344 v0.AuxInt = c1 18345 v.AddArg(v0) 18346 v.AddArg(x) 18347 return true 18348 } 18349 // match: (Or8 (Const8 <t> [c1]) (And8 (Const8 [c2]) x)) 18350 // cond: ^(c1 | c2) == 0 18351 // result: (Or8 (Const8 <t> [c1]) x) 18352 for { 18353 _ = v.Args[1] 18354 v_0 := v.Args[0] 18355 if v_0.Op != OpConst8 { 18356 break 18357 } 18358 t := v_0.Type 18359 c1 := v_0.AuxInt 18360 v_1 := v.Args[1] 18361 if v_1.Op != OpAnd8 { 18362 break 18363 } 18364 _ = v_1.Args[1] 18365 v_1_0 := v_1.Args[0] 18366 if v_1_0.Op != OpConst8 { 18367 break 18368 } 18369 c2 := v_1_0.AuxInt 18370 x := v_1.Args[1] 18371 if !(^(c1 | c2) == 0) { 18372 break 18373 } 18374 v.reset(OpOr8) 18375 v0 := b.NewValue0(v.Pos, OpConst8, t) 18376 v0.AuxInt = c1 18377 v.AddArg(v0) 18378 v.AddArg(x) 18379 return true 18380 } 18381 // match: (Or8 (Or8 i:(Const8 <t>) z) x) 18382 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 18383 // result: (Or8 i (Or8 <t> z x)) 18384 for { 18385 _ = v.Args[1] 18386 v_0 := v.Args[0] 18387 if v_0.Op != OpOr8 { 18388 break 18389 } 18390 _ = v_0.Args[1] 18391 i := v_0.Args[0] 18392 if i.Op != OpConst8 { 18393 break 18394 } 18395 t := i.Type 18396 z := v_0.Args[1] 18397 x := v.Args[1] 18398 if !(z.Op != OpConst8 && x.Op != OpConst8) { 18399 break 18400 } 18401 v.reset(OpOr8) 18402 v.AddArg(i) 18403 v0 := b.NewValue0(v.Pos, OpOr8, t) 18404 v0.AddArg(z) 18405 v0.AddArg(x) 18406 v.AddArg(v0) 18407 return true 18408 } 18409 // match: (Or8 (Or8 z i:(Const8 <t>)) x) 18410 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 18411 // result: (Or8 i (Or8 <t> z x)) 18412 for { 18413 _ = v.Args[1] 18414 v_0 := v.Args[0] 18415 if v_0.Op != OpOr8 { 18416 break 18417 } 18418 _ = v_0.Args[1] 18419 z := v_0.Args[0] 18420 i := v_0.Args[1] 18421 if i.Op != OpConst8 { 18422 break 18423 } 18424 t := i.Type 18425 x := v.Args[1] 18426 if !(z.Op != OpConst8 && x.Op != OpConst8) { 18427 break 18428 } 18429 v.reset(OpOr8) 18430 v.AddArg(i) 18431 v0 := b.NewValue0(v.Pos, OpOr8, t) 18432 v0.AddArg(z) 18433 v0.AddArg(x) 18434 v.AddArg(v0) 18435 return true 18436 } 18437 // match: (Or8 x (Or8 i:(Const8 <t>) z)) 18438 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 18439 // result: (Or8 i (Or8 <t> z x)) 18440 for { 18441 _ = v.Args[1] 18442 x := v.Args[0] 18443 v_1 := v.Args[1] 18444 if v_1.Op != OpOr8 { 18445 break 18446 } 18447 _ = v_1.Args[1] 18448 i := v_1.Args[0] 18449 if i.Op != OpConst8 { 18450 break 18451 } 18452 t := i.Type 18453 z := v_1.Args[1] 18454 if !(z.Op != OpConst8 && x.Op != OpConst8) { 18455 break 18456 } 18457 v.reset(OpOr8) 18458 v.AddArg(i) 18459 v0 := b.NewValue0(v.Pos, OpOr8, t) 18460 v0.AddArg(z) 18461 v0.AddArg(x) 18462 v.AddArg(v0) 18463 return true 18464 } 18465 // match: (Or8 x (Or8 z i:(Const8 <t>))) 18466 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 18467 // result: (Or8 i (Or8 <t> z x)) 18468 for { 18469 _ = v.Args[1] 18470 x := v.Args[0] 18471 v_1 := v.Args[1] 18472 if v_1.Op != OpOr8 { 18473 break 18474 } 18475 _ = v_1.Args[1] 18476 z := v_1.Args[0] 18477 i := v_1.Args[1] 18478 if i.Op != OpConst8 { 18479 break 18480 } 18481 t := i.Type 18482 if !(z.Op != OpConst8 && x.Op != OpConst8) { 18483 break 18484 } 18485 v.reset(OpOr8) 18486 v.AddArg(i) 18487 v0 := b.NewValue0(v.Pos, OpOr8, t) 18488 v0.AddArg(z) 18489 v0.AddArg(x) 18490 v.AddArg(v0) 18491 return true 18492 } 18493 // match: (Or8 (Const8 <t> [c]) (Or8 (Const8 <t> [d]) x)) 18494 // cond: 18495 // result: (Or8 (Const8 <t> [int64(int8(c|d))]) x) 18496 for { 18497 _ = v.Args[1] 18498 v_0 := v.Args[0] 18499 if v_0.Op != OpConst8 { 18500 break 18501 } 18502 t := v_0.Type 18503 c := v_0.AuxInt 18504 v_1 := v.Args[1] 18505 if v_1.Op != OpOr8 { 18506 break 18507 } 18508 _ = v_1.Args[1] 18509 v_1_0 := v_1.Args[0] 18510 if v_1_0.Op != OpConst8 { 18511 break 18512 } 18513 if v_1_0.Type != t { 18514 break 18515 } 18516 d := v_1_0.AuxInt 18517 x := v_1.Args[1] 18518 v.reset(OpOr8) 18519 v0 := b.NewValue0(v.Pos, OpConst8, t) 18520 v0.AuxInt = int64(int8(c | d)) 18521 v.AddArg(v0) 18522 v.AddArg(x) 18523 return true 18524 } 18525 return false 18526 } 18527 func rewriteValuegeneric_OpOr8_20(v *Value) bool { 18528 b := v.Block 18529 _ = b 18530 // match: (Or8 (Const8 <t> [c]) (Or8 x (Const8 <t> [d]))) 18531 // cond: 18532 // result: (Or8 (Const8 <t> [int64(int8(c|d))]) x) 18533 for { 18534 _ = v.Args[1] 18535 v_0 := v.Args[0] 18536 if v_0.Op != OpConst8 { 18537 break 18538 } 18539 t := v_0.Type 18540 c := v_0.AuxInt 18541 v_1 := v.Args[1] 18542 if v_1.Op != OpOr8 { 18543 break 18544 } 18545 _ = v_1.Args[1] 18546 x := v_1.Args[0] 18547 v_1_1 := v_1.Args[1] 18548 if v_1_1.Op != OpConst8 { 18549 break 18550 } 18551 if v_1_1.Type != t { 18552 break 18553 } 18554 d := v_1_1.AuxInt 18555 v.reset(OpOr8) 18556 v0 := b.NewValue0(v.Pos, OpConst8, t) 18557 v0.AuxInt = int64(int8(c | d)) 18558 v.AddArg(v0) 18559 v.AddArg(x) 18560 return true 18561 } 18562 // match: (Or8 (Or8 (Const8 <t> [d]) x) (Const8 <t> [c])) 18563 // cond: 18564 // result: (Or8 (Const8 <t> [int64(int8(c|d))]) x) 18565 for { 18566 _ = v.Args[1] 18567 v_0 := v.Args[0] 18568 if v_0.Op != OpOr8 { 18569 break 18570 } 18571 _ = v_0.Args[1] 18572 v_0_0 := v_0.Args[0] 18573 if v_0_0.Op != OpConst8 { 18574 break 18575 } 18576 t := v_0_0.Type 18577 d := v_0_0.AuxInt 18578 x := v_0.Args[1] 18579 v_1 := v.Args[1] 18580 if v_1.Op != OpConst8 { 18581 break 18582 } 18583 if v_1.Type != t { 18584 break 18585 } 18586 c := v_1.AuxInt 18587 v.reset(OpOr8) 18588 v0 := b.NewValue0(v.Pos, OpConst8, t) 18589 v0.AuxInt = int64(int8(c | d)) 18590 v.AddArg(v0) 18591 v.AddArg(x) 18592 return true 18593 } 18594 // match: (Or8 (Or8 x (Const8 <t> [d])) (Const8 <t> [c])) 18595 // cond: 18596 // result: (Or8 (Const8 <t> [int64(int8(c|d))]) x) 18597 for { 18598 _ = v.Args[1] 18599 v_0 := v.Args[0] 18600 if v_0.Op != OpOr8 { 18601 break 18602 } 18603 _ = v_0.Args[1] 18604 x := v_0.Args[0] 18605 v_0_1 := v_0.Args[1] 18606 if v_0_1.Op != OpConst8 { 18607 break 18608 } 18609 t := v_0_1.Type 18610 d := v_0_1.AuxInt 18611 v_1 := v.Args[1] 18612 if v_1.Op != OpConst8 { 18613 break 18614 } 18615 if v_1.Type != t { 18616 break 18617 } 18618 c := v_1.AuxInt 18619 v.reset(OpOr8) 18620 v0 := b.NewValue0(v.Pos, OpConst8, t) 18621 v0.AuxInt = int64(int8(c | d)) 18622 v.AddArg(v0) 18623 v.AddArg(x) 18624 return true 18625 } 18626 return false 18627 } 18628 func rewriteValuegeneric_OpPhi_0(v *Value) bool { 18629 // match: (Phi (Const8 [c]) (Const8 [c])) 18630 // cond: 18631 // result: (Const8 [c]) 18632 for { 18633 _ = v.Args[1] 18634 v_0 := v.Args[0] 18635 if v_0.Op != OpConst8 { 18636 break 18637 } 18638 c := v_0.AuxInt 18639 v_1 := v.Args[1] 18640 if v_1.Op != OpConst8 { 18641 break 18642 } 18643 if v_1.AuxInt != c { 18644 break 18645 } 18646 if len(v.Args) != 2 { 18647 break 18648 } 18649 v.reset(OpConst8) 18650 v.AuxInt = c 18651 return true 18652 } 18653 // match: (Phi (Const16 [c]) (Const16 [c])) 18654 // cond: 18655 // result: (Const16 [c]) 18656 for { 18657 _ = v.Args[1] 18658 v_0 := v.Args[0] 18659 if v_0.Op != OpConst16 { 18660 break 18661 } 18662 c := v_0.AuxInt 18663 v_1 := v.Args[1] 18664 if v_1.Op != OpConst16 { 18665 break 18666 } 18667 if v_1.AuxInt != c { 18668 break 18669 } 18670 if len(v.Args) != 2 { 18671 break 18672 } 18673 v.reset(OpConst16) 18674 v.AuxInt = c 18675 return true 18676 } 18677 // match: (Phi (Const32 [c]) (Const32 [c])) 18678 // cond: 18679 // result: (Const32 [c]) 18680 for { 18681 _ = v.Args[1] 18682 v_0 := v.Args[0] 18683 if v_0.Op != OpConst32 { 18684 break 18685 } 18686 c := v_0.AuxInt 18687 v_1 := v.Args[1] 18688 if v_1.Op != OpConst32 { 18689 break 18690 } 18691 if v_1.AuxInt != c { 18692 break 18693 } 18694 if len(v.Args) != 2 { 18695 break 18696 } 18697 v.reset(OpConst32) 18698 v.AuxInt = c 18699 return true 18700 } 18701 // match: (Phi (Const64 [c]) (Const64 [c])) 18702 // cond: 18703 // result: (Const64 [c]) 18704 for { 18705 _ = v.Args[1] 18706 v_0 := v.Args[0] 18707 if v_0.Op != OpConst64 { 18708 break 18709 } 18710 c := v_0.AuxInt 18711 v_1 := v.Args[1] 18712 if v_1.Op != OpConst64 { 18713 break 18714 } 18715 if v_1.AuxInt != c { 18716 break 18717 } 18718 if len(v.Args) != 2 { 18719 break 18720 } 18721 v.reset(OpConst64) 18722 v.AuxInt = c 18723 return true 18724 } 18725 return false 18726 } 18727 func rewriteValuegeneric_OpPtrIndex_0(v *Value) bool { 18728 b := v.Block 18729 _ = b 18730 config := b.Func.Config 18731 _ = config 18732 typ := &b.Func.Config.Types 18733 _ = typ 18734 // match: (PtrIndex <t> ptr idx) 18735 // cond: config.PtrSize == 4 18736 // result: (AddPtr ptr (Mul32 <typ.Int> idx (Const32 <typ.Int> [t.ElemType().Size()]))) 18737 for { 18738 t := v.Type 18739 _ = v.Args[1] 18740 ptr := v.Args[0] 18741 idx := v.Args[1] 18742 if !(config.PtrSize == 4) { 18743 break 18744 } 18745 v.reset(OpAddPtr) 18746 v.AddArg(ptr) 18747 v0 := b.NewValue0(v.Pos, OpMul32, typ.Int) 18748 v0.AddArg(idx) 18749 v1 := b.NewValue0(v.Pos, OpConst32, typ.Int) 18750 v1.AuxInt = t.ElemType().Size() 18751 v0.AddArg(v1) 18752 v.AddArg(v0) 18753 return true 18754 } 18755 // match: (PtrIndex <t> ptr idx) 18756 // cond: config.PtrSize == 8 18757 // result: (AddPtr ptr (Mul64 <typ.Int> idx (Const64 <typ.Int> [t.ElemType().Size()]))) 18758 for { 18759 t := v.Type 18760 _ = v.Args[1] 18761 ptr := v.Args[0] 18762 idx := v.Args[1] 18763 if !(config.PtrSize == 8) { 18764 break 18765 } 18766 v.reset(OpAddPtr) 18767 v.AddArg(ptr) 18768 v0 := b.NewValue0(v.Pos, OpMul64, typ.Int) 18769 v0.AddArg(idx) 18770 v1 := b.NewValue0(v.Pos, OpConst64, typ.Int) 18771 v1.AuxInt = t.ElemType().Size() 18772 v0.AddArg(v1) 18773 v.AddArg(v0) 18774 return true 18775 } 18776 return false 18777 } 18778 func rewriteValuegeneric_OpRound32F_0(v *Value) bool { 18779 // match: (Round32F x:(Const32F)) 18780 // cond: 18781 // result: x 18782 for { 18783 x := v.Args[0] 18784 if x.Op != OpConst32F { 18785 break 18786 } 18787 v.reset(OpCopy) 18788 v.Type = x.Type 18789 v.AddArg(x) 18790 return true 18791 } 18792 return false 18793 } 18794 func rewriteValuegeneric_OpRound64F_0(v *Value) bool { 18795 // match: (Round64F x:(Const64F)) 18796 // cond: 18797 // result: x 18798 for { 18799 x := v.Args[0] 18800 if x.Op != OpConst64F { 18801 break 18802 } 18803 v.reset(OpCopy) 18804 v.Type = x.Type 18805 v.AddArg(x) 18806 return true 18807 } 18808 return false 18809 } 18810 func rewriteValuegeneric_OpRsh16Ux16_0(v *Value) bool { 18811 b := v.Block 18812 _ = b 18813 // match: (Rsh16Ux16 <t> x (Const16 [c])) 18814 // cond: 18815 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))])) 18816 for { 18817 t := v.Type 18818 _ = v.Args[1] 18819 x := v.Args[0] 18820 v_1 := v.Args[1] 18821 if v_1.Op != OpConst16 { 18822 break 18823 } 18824 c := v_1.AuxInt 18825 v.reset(OpRsh16Ux64) 18826 v.AddArg(x) 18827 v0 := b.NewValue0(v.Pos, OpConst64, t) 18828 v0.AuxInt = int64(uint16(c)) 18829 v.AddArg(v0) 18830 return true 18831 } 18832 // match: (Rsh16Ux16 (Const16 [0]) _) 18833 // cond: 18834 // result: (Const16 [0]) 18835 for { 18836 _ = v.Args[1] 18837 v_0 := v.Args[0] 18838 if v_0.Op != OpConst16 { 18839 break 18840 } 18841 if v_0.AuxInt != 0 { 18842 break 18843 } 18844 v.reset(OpConst16) 18845 v.AuxInt = 0 18846 return true 18847 } 18848 return false 18849 } 18850 func rewriteValuegeneric_OpRsh16Ux32_0(v *Value) bool { 18851 b := v.Block 18852 _ = b 18853 // match: (Rsh16Ux32 <t> x (Const32 [c])) 18854 // cond: 18855 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))])) 18856 for { 18857 t := v.Type 18858 _ = v.Args[1] 18859 x := v.Args[0] 18860 v_1 := v.Args[1] 18861 if v_1.Op != OpConst32 { 18862 break 18863 } 18864 c := v_1.AuxInt 18865 v.reset(OpRsh16Ux64) 18866 v.AddArg(x) 18867 v0 := b.NewValue0(v.Pos, OpConst64, t) 18868 v0.AuxInt = int64(uint32(c)) 18869 v.AddArg(v0) 18870 return true 18871 } 18872 // match: (Rsh16Ux32 (Const16 [0]) _) 18873 // cond: 18874 // result: (Const16 [0]) 18875 for { 18876 _ = v.Args[1] 18877 v_0 := v.Args[0] 18878 if v_0.Op != OpConst16 { 18879 break 18880 } 18881 if v_0.AuxInt != 0 { 18882 break 18883 } 18884 v.reset(OpConst16) 18885 v.AuxInt = 0 18886 return true 18887 } 18888 return false 18889 } 18890 func rewriteValuegeneric_OpRsh16Ux64_0(v *Value) bool { 18891 b := v.Block 18892 _ = b 18893 typ := &b.Func.Config.Types 18894 _ = typ 18895 // match: (Rsh16Ux64 (Const16 [c]) (Const64 [d])) 18896 // cond: 18897 // result: (Const16 [int64(int16(uint16(c) >> uint64(d)))]) 18898 for { 18899 _ = v.Args[1] 18900 v_0 := v.Args[0] 18901 if v_0.Op != OpConst16 { 18902 break 18903 } 18904 c := v_0.AuxInt 18905 v_1 := v.Args[1] 18906 if v_1.Op != OpConst64 { 18907 break 18908 } 18909 d := v_1.AuxInt 18910 v.reset(OpConst16) 18911 v.AuxInt = int64(int16(uint16(c) >> uint64(d))) 18912 return true 18913 } 18914 // match: (Rsh16Ux64 x (Const64 [0])) 18915 // cond: 18916 // result: x 18917 for { 18918 _ = v.Args[1] 18919 x := v.Args[0] 18920 v_1 := v.Args[1] 18921 if v_1.Op != OpConst64 { 18922 break 18923 } 18924 if v_1.AuxInt != 0 { 18925 break 18926 } 18927 v.reset(OpCopy) 18928 v.Type = x.Type 18929 v.AddArg(x) 18930 return true 18931 } 18932 // match: (Rsh16Ux64 (Const16 [0]) _) 18933 // cond: 18934 // result: (Const16 [0]) 18935 for { 18936 _ = v.Args[1] 18937 v_0 := v.Args[0] 18938 if v_0.Op != OpConst16 { 18939 break 18940 } 18941 if v_0.AuxInt != 0 { 18942 break 18943 } 18944 v.reset(OpConst16) 18945 v.AuxInt = 0 18946 return true 18947 } 18948 // match: (Rsh16Ux64 _ (Const64 [c])) 18949 // cond: uint64(c) >= 16 18950 // result: (Const16 [0]) 18951 for { 18952 _ = v.Args[1] 18953 v_1 := v.Args[1] 18954 if v_1.Op != OpConst64 { 18955 break 18956 } 18957 c := v_1.AuxInt 18958 if !(uint64(c) >= 16) { 18959 break 18960 } 18961 v.reset(OpConst16) 18962 v.AuxInt = 0 18963 return true 18964 } 18965 // match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) 18966 // cond: !uaddOvf(c,d) 18967 // result: (Rsh16Ux64 x (Const64 <t> [c+d])) 18968 for { 18969 t := v.Type 18970 _ = v.Args[1] 18971 v_0 := v.Args[0] 18972 if v_0.Op != OpRsh16Ux64 { 18973 break 18974 } 18975 _ = v_0.Args[1] 18976 x := v_0.Args[0] 18977 v_0_1 := v_0.Args[1] 18978 if v_0_1.Op != OpConst64 { 18979 break 18980 } 18981 c := v_0_1.AuxInt 18982 v_1 := v.Args[1] 18983 if v_1.Op != OpConst64 { 18984 break 18985 } 18986 d := v_1.AuxInt 18987 if !(!uaddOvf(c, d)) { 18988 break 18989 } 18990 v.reset(OpRsh16Ux64) 18991 v.AddArg(x) 18992 v0 := b.NewValue0(v.Pos, OpConst64, t) 18993 v0.AuxInt = c + d 18994 v.AddArg(v0) 18995 return true 18996 } 18997 // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 18998 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 18999 // result: (Rsh16Ux64 x (Const64 <typ.UInt64> [c1-c2+c3])) 19000 for { 19001 _ = v.Args[1] 19002 v_0 := v.Args[0] 19003 if v_0.Op != OpLsh16x64 { 19004 break 19005 } 19006 _ = v_0.Args[1] 19007 v_0_0 := v_0.Args[0] 19008 if v_0_0.Op != OpRsh16Ux64 { 19009 break 19010 } 19011 _ = v_0_0.Args[1] 19012 x := v_0_0.Args[0] 19013 v_0_0_1 := v_0_0.Args[1] 19014 if v_0_0_1.Op != OpConst64 { 19015 break 19016 } 19017 c1 := v_0_0_1.AuxInt 19018 v_0_1 := v_0.Args[1] 19019 if v_0_1.Op != OpConst64 { 19020 break 19021 } 19022 c2 := v_0_1.AuxInt 19023 v_1 := v.Args[1] 19024 if v_1.Op != OpConst64 { 19025 break 19026 } 19027 c3 := v_1.AuxInt 19028 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 19029 break 19030 } 19031 v.reset(OpRsh16Ux64) 19032 v.AddArg(x) 19033 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 19034 v0.AuxInt = c1 - c2 + c3 19035 v.AddArg(v0) 19036 return true 19037 } 19038 // match: (Rsh16Ux64 (Lsh16x64 x (Const64 [8])) (Const64 [8])) 19039 // cond: 19040 // result: (ZeroExt8to16 (Trunc16to8 <typ.UInt8> x)) 19041 for { 19042 _ = v.Args[1] 19043 v_0 := v.Args[0] 19044 if v_0.Op != OpLsh16x64 { 19045 break 19046 } 19047 _ = v_0.Args[1] 19048 x := v_0.Args[0] 19049 v_0_1 := v_0.Args[1] 19050 if v_0_1.Op != OpConst64 { 19051 break 19052 } 19053 if v_0_1.AuxInt != 8 { 19054 break 19055 } 19056 v_1 := v.Args[1] 19057 if v_1.Op != OpConst64 { 19058 break 19059 } 19060 if v_1.AuxInt != 8 { 19061 break 19062 } 19063 v.reset(OpZeroExt8to16) 19064 v0 := b.NewValue0(v.Pos, OpTrunc16to8, typ.UInt8) 19065 v0.AddArg(x) 19066 v.AddArg(v0) 19067 return true 19068 } 19069 return false 19070 } 19071 func rewriteValuegeneric_OpRsh16Ux8_0(v *Value) bool { 19072 b := v.Block 19073 _ = b 19074 // match: (Rsh16Ux8 <t> x (Const8 [c])) 19075 // cond: 19076 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))])) 19077 for { 19078 t := v.Type 19079 _ = v.Args[1] 19080 x := v.Args[0] 19081 v_1 := v.Args[1] 19082 if v_1.Op != OpConst8 { 19083 break 19084 } 19085 c := v_1.AuxInt 19086 v.reset(OpRsh16Ux64) 19087 v.AddArg(x) 19088 v0 := b.NewValue0(v.Pos, OpConst64, t) 19089 v0.AuxInt = int64(uint8(c)) 19090 v.AddArg(v0) 19091 return true 19092 } 19093 // match: (Rsh16Ux8 (Const16 [0]) _) 19094 // cond: 19095 // result: (Const16 [0]) 19096 for { 19097 _ = v.Args[1] 19098 v_0 := v.Args[0] 19099 if v_0.Op != OpConst16 { 19100 break 19101 } 19102 if v_0.AuxInt != 0 { 19103 break 19104 } 19105 v.reset(OpConst16) 19106 v.AuxInt = 0 19107 return true 19108 } 19109 return false 19110 } 19111 func rewriteValuegeneric_OpRsh16x16_0(v *Value) bool { 19112 b := v.Block 19113 _ = b 19114 // match: (Rsh16x16 <t> x (Const16 [c])) 19115 // cond: 19116 // result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))])) 19117 for { 19118 t := v.Type 19119 _ = v.Args[1] 19120 x := v.Args[0] 19121 v_1 := v.Args[1] 19122 if v_1.Op != OpConst16 { 19123 break 19124 } 19125 c := v_1.AuxInt 19126 v.reset(OpRsh16x64) 19127 v.AddArg(x) 19128 v0 := b.NewValue0(v.Pos, OpConst64, t) 19129 v0.AuxInt = int64(uint16(c)) 19130 v.AddArg(v0) 19131 return true 19132 } 19133 // match: (Rsh16x16 (Const16 [0]) _) 19134 // cond: 19135 // result: (Const16 [0]) 19136 for { 19137 _ = v.Args[1] 19138 v_0 := v.Args[0] 19139 if v_0.Op != OpConst16 { 19140 break 19141 } 19142 if v_0.AuxInt != 0 { 19143 break 19144 } 19145 v.reset(OpConst16) 19146 v.AuxInt = 0 19147 return true 19148 } 19149 return false 19150 } 19151 func rewriteValuegeneric_OpRsh16x32_0(v *Value) bool { 19152 b := v.Block 19153 _ = b 19154 // match: (Rsh16x32 <t> x (Const32 [c])) 19155 // cond: 19156 // result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))])) 19157 for { 19158 t := v.Type 19159 _ = v.Args[1] 19160 x := v.Args[0] 19161 v_1 := v.Args[1] 19162 if v_1.Op != OpConst32 { 19163 break 19164 } 19165 c := v_1.AuxInt 19166 v.reset(OpRsh16x64) 19167 v.AddArg(x) 19168 v0 := b.NewValue0(v.Pos, OpConst64, t) 19169 v0.AuxInt = int64(uint32(c)) 19170 v.AddArg(v0) 19171 return true 19172 } 19173 // match: (Rsh16x32 (Const16 [0]) _) 19174 // cond: 19175 // result: (Const16 [0]) 19176 for { 19177 _ = v.Args[1] 19178 v_0 := v.Args[0] 19179 if v_0.Op != OpConst16 { 19180 break 19181 } 19182 if v_0.AuxInt != 0 { 19183 break 19184 } 19185 v.reset(OpConst16) 19186 v.AuxInt = 0 19187 return true 19188 } 19189 return false 19190 } 19191 func rewriteValuegeneric_OpRsh16x64_0(v *Value) bool { 19192 b := v.Block 19193 _ = b 19194 typ := &b.Func.Config.Types 19195 _ = typ 19196 // match: (Rsh16x64 (Const16 [c]) (Const64 [d])) 19197 // cond: 19198 // result: (Const16 [int64(int16(c) >> uint64(d))]) 19199 for { 19200 _ = v.Args[1] 19201 v_0 := v.Args[0] 19202 if v_0.Op != OpConst16 { 19203 break 19204 } 19205 c := v_0.AuxInt 19206 v_1 := v.Args[1] 19207 if v_1.Op != OpConst64 { 19208 break 19209 } 19210 d := v_1.AuxInt 19211 v.reset(OpConst16) 19212 v.AuxInt = int64(int16(c) >> uint64(d)) 19213 return true 19214 } 19215 // match: (Rsh16x64 x (Const64 [0])) 19216 // cond: 19217 // result: x 19218 for { 19219 _ = v.Args[1] 19220 x := v.Args[0] 19221 v_1 := v.Args[1] 19222 if v_1.Op != OpConst64 { 19223 break 19224 } 19225 if v_1.AuxInt != 0 { 19226 break 19227 } 19228 v.reset(OpCopy) 19229 v.Type = x.Type 19230 v.AddArg(x) 19231 return true 19232 } 19233 // match: (Rsh16x64 (Const16 [0]) _) 19234 // cond: 19235 // result: (Const16 [0]) 19236 for { 19237 _ = v.Args[1] 19238 v_0 := v.Args[0] 19239 if v_0.Op != OpConst16 { 19240 break 19241 } 19242 if v_0.AuxInt != 0 { 19243 break 19244 } 19245 v.reset(OpConst16) 19246 v.AuxInt = 0 19247 return true 19248 } 19249 // match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d])) 19250 // cond: !uaddOvf(c,d) 19251 // result: (Rsh16x64 x (Const64 <t> [c+d])) 19252 for { 19253 t := v.Type 19254 _ = v.Args[1] 19255 v_0 := v.Args[0] 19256 if v_0.Op != OpRsh16x64 { 19257 break 19258 } 19259 _ = v_0.Args[1] 19260 x := v_0.Args[0] 19261 v_0_1 := v_0.Args[1] 19262 if v_0_1.Op != OpConst64 { 19263 break 19264 } 19265 c := v_0_1.AuxInt 19266 v_1 := v.Args[1] 19267 if v_1.Op != OpConst64 { 19268 break 19269 } 19270 d := v_1.AuxInt 19271 if !(!uaddOvf(c, d)) { 19272 break 19273 } 19274 v.reset(OpRsh16x64) 19275 v.AddArg(x) 19276 v0 := b.NewValue0(v.Pos, OpConst64, t) 19277 v0.AuxInt = c + d 19278 v.AddArg(v0) 19279 return true 19280 } 19281 // match: (Rsh16x64 (Lsh16x64 x (Const64 [8])) (Const64 [8])) 19282 // cond: 19283 // result: (SignExt8to16 (Trunc16to8 <typ.Int8> x)) 19284 for { 19285 _ = v.Args[1] 19286 v_0 := v.Args[0] 19287 if v_0.Op != OpLsh16x64 { 19288 break 19289 } 19290 _ = v_0.Args[1] 19291 x := v_0.Args[0] 19292 v_0_1 := v_0.Args[1] 19293 if v_0_1.Op != OpConst64 { 19294 break 19295 } 19296 if v_0_1.AuxInt != 8 { 19297 break 19298 } 19299 v_1 := v.Args[1] 19300 if v_1.Op != OpConst64 { 19301 break 19302 } 19303 if v_1.AuxInt != 8 { 19304 break 19305 } 19306 v.reset(OpSignExt8to16) 19307 v0 := b.NewValue0(v.Pos, OpTrunc16to8, typ.Int8) 19308 v0.AddArg(x) 19309 v.AddArg(v0) 19310 return true 19311 } 19312 return false 19313 } 19314 func rewriteValuegeneric_OpRsh16x8_0(v *Value) bool { 19315 b := v.Block 19316 _ = b 19317 // match: (Rsh16x8 <t> x (Const8 [c])) 19318 // cond: 19319 // result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))])) 19320 for { 19321 t := v.Type 19322 _ = v.Args[1] 19323 x := v.Args[0] 19324 v_1 := v.Args[1] 19325 if v_1.Op != OpConst8 { 19326 break 19327 } 19328 c := v_1.AuxInt 19329 v.reset(OpRsh16x64) 19330 v.AddArg(x) 19331 v0 := b.NewValue0(v.Pos, OpConst64, t) 19332 v0.AuxInt = int64(uint8(c)) 19333 v.AddArg(v0) 19334 return true 19335 } 19336 // match: (Rsh16x8 (Const16 [0]) _) 19337 // cond: 19338 // result: (Const16 [0]) 19339 for { 19340 _ = v.Args[1] 19341 v_0 := v.Args[0] 19342 if v_0.Op != OpConst16 { 19343 break 19344 } 19345 if v_0.AuxInt != 0 { 19346 break 19347 } 19348 v.reset(OpConst16) 19349 v.AuxInt = 0 19350 return true 19351 } 19352 return false 19353 } 19354 func rewriteValuegeneric_OpRsh32Ux16_0(v *Value) bool { 19355 b := v.Block 19356 _ = b 19357 // match: (Rsh32Ux16 <t> x (Const16 [c])) 19358 // cond: 19359 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))])) 19360 for { 19361 t := v.Type 19362 _ = v.Args[1] 19363 x := v.Args[0] 19364 v_1 := v.Args[1] 19365 if v_1.Op != OpConst16 { 19366 break 19367 } 19368 c := v_1.AuxInt 19369 v.reset(OpRsh32Ux64) 19370 v.AddArg(x) 19371 v0 := b.NewValue0(v.Pos, OpConst64, t) 19372 v0.AuxInt = int64(uint16(c)) 19373 v.AddArg(v0) 19374 return true 19375 } 19376 // match: (Rsh32Ux16 (Const32 [0]) _) 19377 // cond: 19378 // result: (Const32 [0]) 19379 for { 19380 _ = v.Args[1] 19381 v_0 := v.Args[0] 19382 if v_0.Op != OpConst32 { 19383 break 19384 } 19385 if v_0.AuxInt != 0 { 19386 break 19387 } 19388 v.reset(OpConst32) 19389 v.AuxInt = 0 19390 return true 19391 } 19392 return false 19393 } 19394 func rewriteValuegeneric_OpRsh32Ux32_0(v *Value) bool { 19395 b := v.Block 19396 _ = b 19397 // match: (Rsh32Ux32 <t> x (Const32 [c])) 19398 // cond: 19399 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))])) 19400 for { 19401 t := v.Type 19402 _ = v.Args[1] 19403 x := v.Args[0] 19404 v_1 := v.Args[1] 19405 if v_1.Op != OpConst32 { 19406 break 19407 } 19408 c := v_1.AuxInt 19409 v.reset(OpRsh32Ux64) 19410 v.AddArg(x) 19411 v0 := b.NewValue0(v.Pos, OpConst64, t) 19412 v0.AuxInt = int64(uint32(c)) 19413 v.AddArg(v0) 19414 return true 19415 } 19416 // match: (Rsh32Ux32 (Const32 [0]) _) 19417 // cond: 19418 // result: (Const32 [0]) 19419 for { 19420 _ = v.Args[1] 19421 v_0 := v.Args[0] 19422 if v_0.Op != OpConst32 { 19423 break 19424 } 19425 if v_0.AuxInt != 0 { 19426 break 19427 } 19428 v.reset(OpConst32) 19429 v.AuxInt = 0 19430 return true 19431 } 19432 return false 19433 } 19434 func rewriteValuegeneric_OpRsh32Ux64_0(v *Value) bool { 19435 b := v.Block 19436 _ = b 19437 typ := &b.Func.Config.Types 19438 _ = typ 19439 // match: (Rsh32Ux64 (Const32 [c]) (Const64 [d])) 19440 // cond: 19441 // result: (Const32 [int64(int32(uint32(c) >> uint64(d)))]) 19442 for { 19443 _ = v.Args[1] 19444 v_0 := v.Args[0] 19445 if v_0.Op != OpConst32 { 19446 break 19447 } 19448 c := v_0.AuxInt 19449 v_1 := v.Args[1] 19450 if v_1.Op != OpConst64 { 19451 break 19452 } 19453 d := v_1.AuxInt 19454 v.reset(OpConst32) 19455 v.AuxInt = int64(int32(uint32(c) >> uint64(d))) 19456 return true 19457 } 19458 // match: (Rsh32Ux64 x (Const64 [0])) 19459 // cond: 19460 // result: x 19461 for { 19462 _ = v.Args[1] 19463 x := v.Args[0] 19464 v_1 := v.Args[1] 19465 if v_1.Op != OpConst64 { 19466 break 19467 } 19468 if v_1.AuxInt != 0 { 19469 break 19470 } 19471 v.reset(OpCopy) 19472 v.Type = x.Type 19473 v.AddArg(x) 19474 return true 19475 } 19476 // match: (Rsh32Ux64 (Const32 [0]) _) 19477 // cond: 19478 // result: (Const32 [0]) 19479 for { 19480 _ = v.Args[1] 19481 v_0 := v.Args[0] 19482 if v_0.Op != OpConst32 { 19483 break 19484 } 19485 if v_0.AuxInt != 0 { 19486 break 19487 } 19488 v.reset(OpConst32) 19489 v.AuxInt = 0 19490 return true 19491 } 19492 // match: (Rsh32Ux64 _ (Const64 [c])) 19493 // cond: uint64(c) >= 32 19494 // result: (Const32 [0]) 19495 for { 19496 _ = v.Args[1] 19497 v_1 := v.Args[1] 19498 if v_1.Op != OpConst64 { 19499 break 19500 } 19501 c := v_1.AuxInt 19502 if !(uint64(c) >= 32) { 19503 break 19504 } 19505 v.reset(OpConst32) 19506 v.AuxInt = 0 19507 return true 19508 } 19509 // match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) 19510 // cond: !uaddOvf(c,d) 19511 // result: (Rsh32Ux64 x (Const64 <t> [c+d])) 19512 for { 19513 t := v.Type 19514 _ = v.Args[1] 19515 v_0 := v.Args[0] 19516 if v_0.Op != OpRsh32Ux64 { 19517 break 19518 } 19519 _ = v_0.Args[1] 19520 x := v_0.Args[0] 19521 v_0_1 := v_0.Args[1] 19522 if v_0_1.Op != OpConst64 { 19523 break 19524 } 19525 c := v_0_1.AuxInt 19526 v_1 := v.Args[1] 19527 if v_1.Op != OpConst64 { 19528 break 19529 } 19530 d := v_1.AuxInt 19531 if !(!uaddOvf(c, d)) { 19532 break 19533 } 19534 v.reset(OpRsh32Ux64) 19535 v.AddArg(x) 19536 v0 := b.NewValue0(v.Pos, OpConst64, t) 19537 v0.AuxInt = c + d 19538 v.AddArg(v0) 19539 return true 19540 } 19541 // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 19542 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 19543 // result: (Rsh32Ux64 x (Const64 <typ.UInt64> [c1-c2+c3])) 19544 for { 19545 _ = v.Args[1] 19546 v_0 := v.Args[0] 19547 if v_0.Op != OpLsh32x64 { 19548 break 19549 } 19550 _ = v_0.Args[1] 19551 v_0_0 := v_0.Args[0] 19552 if v_0_0.Op != OpRsh32Ux64 { 19553 break 19554 } 19555 _ = v_0_0.Args[1] 19556 x := v_0_0.Args[0] 19557 v_0_0_1 := v_0_0.Args[1] 19558 if v_0_0_1.Op != OpConst64 { 19559 break 19560 } 19561 c1 := v_0_0_1.AuxInt 19562 v_0_1 := v_0.Args[1] 19563 if v_0_1.Op != OpConst64 { 19564 break 19565 } 19566 c2 := v_0_1.AuxInt 19567 v_1 := v.Args[1] 19568 if v_1.Op != OpConst64 { 19569 break 19570 } 19571 c3 := v_1.AuxInt 19572 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 19573 break 19574 } 19575 v.reset(OpRsh32Ux64) 19576 v.AddArg(x) 19577 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 19578 v0.AuxInt = c1 - c2 + c3 19579 v.AddArg(v0) 19580 return true 19581 } 19582 // match: (Rsh32Ux64 (Lsh32x64 x (Const64 [24])) (Const64 [24])) 19583 // cond: 19584 // result: (ZeroExt8to32 (Trunc32to8 <typ.UInt8> x)) 19585 for { 19586 _ = v.Args[1] 19587 v_0 := v.Args[0] 19588 if v_0.Op != OpLsh32x64 { 19589 break 19590 } 19591 _ = v_0.Args[1] 19592 x := v_0.Args[0] 19593 v_0_1 := v_0.Args[1] 19594 if v_0_1.Op != OpConst64 { 19595 break 19596 } 19597 if v_0_1.AuxInt != 24 { 19598 break 19599 } 19600 v_1 := v.Args[1] 19601 if v_1.Op != OpConst64 { 19602 break 19603 } 19604 if v_1.AuxInt != 24 { 19605 break 19606 } 19607 v.reset(OpZeroExt8to32) 19608 v0 := b.NewValue0(v.Pos, OpTrunc32to8, typ.UInt8) 19609 v0.AddArg(x) 19610 v.AddArg(v0) 19611 return true 19612 } 19613 // match: (Rsh32Ux64 (Lsh32x64 x (Const64 [16])) (Const64 [16])) 19614 // cond: 19615 // result: (ZeroExt16to32 (Trunc32to16 <typ.UInt16> x)) 19616 for { 19617 _ = v.Args[1] 19618 v_0 := v.Args[0] 19619 if v_0.Op != OpLsh32x64 { 19620 break 19621 } 19622 _ = v_0.Args[1] 19623 x := v_0.Args[0] 19624 v_0_1 := v_0.Args[1] 19625 if v_0_1.Op != OpConst64 { 19626 break 19627 } 19628 if v_0_1.AuxInt != 16 { 19629 break 19630 } 19631 v_1 := v.Args[1] 19632 if v_1.Op != OpConst64 { 19633 break 19634 } 19635 if v_1.AuxInt != 16 { 19636 break 19637 } 19638 v.reset(OpZeroExt16to32) 19639 v0 := b.NewValue0(v.Pos, OpTrunc32to16, typ.UInt16) 19640 v0.AddArg(x) 19641 v.AddArg(v0) 19642 return true 19643 } 19644 return false 19645 } 19646 func rewriteValuegeneric_OpRsh32Ux8_0(v *Value) bool { 19647 b := v.Block 19648 _ = b 19649 // match: (Rsh32Ux8 <t> x (Const8 [c])) 19650 // cond: 19651 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))])) 19652 for { 19653 t := v.Type 19654 _ = v.Args[1] 19655 x := v.Args[0] 19656 v_1 := v.Args[1] 19657 if v_1.Op != OpConst8 { 19658 break 19659 } 19660 c := v_1.AuxInt 19661 v.reset(OpRsh32Ux64) 19662 v.AddArg(x) 19663 v0 := b.NewValue0(v.Pos, OpConst64, t) 19664 v0.AuxInt = int64(uint8(c)) 19665 v.AddArg(v0) 19666 return true 19667 } 19668 // match: (Rsh32Ux8 (Const32 [0]) _) 19669 // cond: 19670 // result: (Const32 [0]) 19671 for { 19672 _ = v.Args[1] 19673 v_0 := v.Args[0] 19674 if v_0.Op != OpConst32 { 19675 break 19676 } 19677 if v_0.AuxInt != 0 { 19678 break 19679 } 19680 v.reset(OpConst32) 19681 v.AuxInt = 0 19682 return true 19683 } 19684 return false 19685 } 19686 func rewriteValuegeneric_OpRsh32x16_0(v *Value) bool { 19687 b := v.Block 19688 _ = b 19689 // match: (Rsh32x16 <t> x (Const16 [c])) 19690 // cond: 19691 // result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))])) 19692 for { 19693 t := v.Type 19694 _ = v.Args[1] 19695 x := v.Args[0] 19696 v_1 := v.Args[1] 19697 if v_1.Op != OpConst16 { 19698 break 19699 } 19700 c := v_1.AuxInt 19701 v.reset(OpRsh32x64) 19702 v.AddArg(x) 19703 v0 := b.NewValue0(v.Pos, OpConst64, t) 19704 v0.AuxInt = int64(uint16(c)) 19705 v.AddArg(v0) 19706 return true 19707 } 19708 // match: (Rsh32x16 (Const32 [0]) _) 19709 // cond: 19710 // result: (Const32 [0]) 19711 for { 19712 _ = v.Args[1] 19713 v_0 := v.Args[0] 19714 if v_0.Op != OpConst32 { 19715 break 19716 } 19717 if v_0.AuxInt != 0 { 19718 break 19719 } 19720 v.reset(OpConst32) 19721 v.AuxInt = 0 19722 return true 19723 } 19724 return false 19725 } 19726 func rewriteValuegeneric_OpRsh32x32_0(v *Value) bool { 19727 b := v.Block 19728 _ = b 19729 // match: (Rsh32x32 <t> x (Const32 [c])) 19730 // cond: 19731 // result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))])) 19732 for { 19733 t := v.Type 19734 _ = v.Args[1] 19735 x := v.Args[0] 19736 v_1 := v.Args[1] 19737 if v_1.Op != OpConst32 { 19738 break 19739 } 19740 c := v_1.AuxInt 19741 v.reset(OpRsh32x64) 19742 v.AddArg(x) 19743 v0 := b.NewValue0(v.Pos, OpConst64, t) 19744 v0.AuxInt = int64(uint32(c)) 19745 v.AddArg(v0) 19746 return true 19747 } 19748 // match: (Rsh32x32 (Const32 [0]) _) 19749 // cond: 19750 // result: (Const32 [0]) 19751 for { 19752 _ = v.Args[1] 19753 v_0 := v.Args[0] 19754 if v_0.Op != OpConst32 { 19755 break 19756 } 19757 if v_0.AuxInt != 0 { 19758 break 19759 } 19760 v.reset(OpConst32) 19761 v.AuxInt = 0 19762 return true 19763 } 19764 return false 19765 } 19766 func rewriteValuegeneric_OpRsh32x64_0(v *Value) bool { 19767 b := v.Block 19768 _ = b 19769 typ := &b.Func.Config.Types 19770 _ = typ 19771 // match: (Rsh32x64 (Const32 [c]) (Const64 [d])) 19772 // cond: 19773 // result: (Const32 [int64(int32(c) >> uint64(d))]) 19774 for { 19775 _ = v.Args[1] 19776 v_0 := v.Args[0] 19777 if v_0.Op != OpConst32 { 19778 break 19779 } 19780 c := v_0.AuxInt 19781 v_1 := v.Args[1] 19782 if v_1.Op != OpConst64 { 19783 break 19784 } 19785 d := v_1.AuxInt 19786 v.reset(OpConst32) 19787 v.AuxInt = int64(int32(c) >> uint64(d)) 19788 return true 19789 } 19790 // match: (Rsh32x64 x (Const64 [0])) 19791 // cond: 19792 // result: x 19793 for { 19794 _ = v.Args[1] 19795 x := v.Args[0] 19796 v_1 := v.Args[1] 19797 if v_1.Op != OpConst64 { 19798 break 19799 } 19800 if v_1.AuxInt != 0 { 19801 break 19802 } 19803 v.reset(OpCopy) 19804 v.Type = x.Type 19805 v.AddArg(x) 19806 return true 19807 } 19808 // match: (Rsh32x64 (Const32 [0]) _) 19809 // cond: 19810 // result: (Const32 [0]) 19811 for { 19812 _ = v.Args[1] 19813 v_0 := v.Args[0] 19814 if v_0.Op != OpConst32 { 19815 break 19816 } 19817 if v_0.AuxInt != 0 { 19818 break 19819 } 19820 v.reset(OpConst32) 19821 v.AuxInt = 0 19822 return true 19823 } 19824 // match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d])) 19825 // cond: !uaddOvf(c,d) 19826 // result: (Rsh32x64 x (Const64 <t> [c+d])) 19827 for { 19828 t := v.Type 19829 _ = v.Args[1] 19830 v_0 := v.Args[0] 19831 if v_0.Op != OpRsh32x64 { 19832 break 19833 } 19834 _ = v_0.Args[1] 19835 x := v_0.Args[0] 19836 v_0_1 := v_0.Args[1] 19837 if v_0_1.Op != OpConst64 { 19838 break 19839 } 19840 c := v_0_1.AuxInt 19841 v_1 := v.Args[1] 19842 if v_1.Op != OpConst64 { 19843 break 19844 } 19845 d := v_1.AuxInt 19846 if !(!uaddOvf(c, d)) { 19847 break 19848 } 19849 v.reset(OpRsh32x64) 19850 v.AddArg(x) 19851 v0 := b.NewValue0(v.Pos, OpConst64, t) 19852 v0.AuxInt = c + d 19853 v.AddArg(v0) 19854 return true 19855 } 19856 // match: (Rsh32x64 (Lsh32x64 x (Const64 [24])) (Const64 [24])) 19857 // cond: 19858 // result: (SignExt8to32 (Trunc32to8 <typ.Int8> x)) 19859 for { 19860 _ = v.Args[1] 19861 v_0 := v.Args[0] 19862 if v_0.Op != OpLsh32x64 { 19863 break 19864 } 19865 _ = v_0.Args[1] 19866 x := v_0.Args[0] 19867 v_0_1 := v_0.Args[1] 19868 if v_0_1.Op != OpConst64 { 19869 break 19870 } 19871 if v_0_1.AuxInt != 24 { 19872 break 19873 } 19874 v_1 := v.Args[1] 19875 if v_1.Op != OpConst64 { 19876 break 19877 } 19878 if v_1.AuxInt != 24 { 19879 break 19880 } 19881 v.reset(OpSignExt8to32) 19882 v0 := b.NewValue0(v.Pos, OpTrunc32to8, typ.Int8) 19883 v0.AddArg(x) 19884 v.AddArg(v0) 19885 return true 19886 } 19887 // match: (Rsh32x64 (Lsh32x64 x (Const64 [16])) (Const64 [16])) 19888 // cond: 19889 // result: (SignExt16to32 (Trunc32to16 <typ.Int16> x)) 19890 for { 19891 _ = v.Args[1] 19892 v_0 := v.Args[0] 19893 if v_0.Op != OpLsh32x64 { 19894 break 19895 } 19896 _ = v_0.Args[1] 19897 x := v_0.Args[0] 19898 v_0_1 := v_0.Args[1] 19899 if v_0_1.Op != OpConst64 { 19900 break 19901 } 19902 if v_0_1.AuxInt != 16 { 19903 break 19904 } 19905 v_1 := v.Args[1] 19906 if v_1.Op != OpConst64 { 19907 break 19908 } 19909 if v_1.AuxInt != 16 { 19910 break 19911 } 19912 v.reset(OpSignExt16to32) 19913 v0 := b.NewValue0(v.Pos, OpTrunc32to16, typ.Int16) 19914 v0.AddArg(x) 19915 v.AddArg(v0) 19916 return true 19917 } 19918 return false 19919 } 19920 func rewriteValuegeneric_OpRsh32x8_0(v *Value) bool { 19921 b := v.Block 19922 _ = b 19923 // match: (Rsh32x8 <t> x (Const8 [c])) 19924 // cond: 19925 // result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))])) 19926 for { 19927 t := v.Type 19928 _ = v.Args[1] 19929 x := v.Args[0] 19930 v_1 := v.Args[1] 19931 if v_1.Op != OpConst8 { 19932 break 19933 } 19934 c := v_1.AuxInt 19935 v.reset(OpRsh32x64) 19936 v.AddArg(x) 19937 v0 := b.NewValue0(v.Pos, OpConst64, t) 19938 v0.AuxInt = int64(uint8(c)) 19939 v.AddArg(v0) 19940 return true 19941 } 19942 // match: (Rsh32x8 (Const32 [0]) _) 19943 // cond: 19944 // result: (Const32 [0]) 19945 for { 19946 _ = v.Args[1] 19947 v_0 := v.Args[0] 19948 if v_0.Op != OpConst32 { 19949 break 19950 } 19951 if v_0.AuxInt != 0 { 19952 break 19953 } 19954 v.reset(OpConst32) 19955 v.AuxInt = 0 19956 return true 19957 } 19958 return false 19959 } 19960 func rewriteValuegeneric_OpRsh64Ux16_0(v *Value) bool { 19961 b := v.Block 19962 _ = b 19963 // match: (Rsh64Ux16 <t> x (Const16 [c])) 19964 // cond: 19965 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))])) 19966 for { 19967 t := v.Type 19968 _ = v.Args[1] 19969 x := v.Args[0] 19970 v_1 := v.Args[1] 19971 if v_1.Op != OpConst16 { 19972 break 19973 } 19974 c := v_1.AuxInt 19975 v.reset(OpRsh64Ux64) 19976 v.AddArg(x) 19977 v0 := b.NewValue0(v.Pos, OpConst64, t) 19978 v0.AuxInt = int64(uint16(c)) 19979 v.AddArg(v0) 19980 return true 19981 } 19982 // match: (Rsh64Ux16 (Const64 [0]) _) 19983 // cond: 19984 // result: (Const64 [0]) 19985 for { 19986 _ = v.Args[1] 19987 v_0 := v.Args[0] 19988 if v_0.Op != OpConst64 { 19989 break 19990 } 19991 if v_0.AuxInt != 0 { 19992 break 19993 } 19994 v.reset(OpConst64) 19995 v.AuxInt = 0 19996 return true 19997 } 19998 return false 19999 } 20000 func rewriteValuegeneric_OpRsh64Ux32_0(v *Value) bool { 20001 b := v.Block 20002 _ = b 20003 // match: (Rsh64Ux32 <t> x (Const32 [c])) 20004 // cond: 20005 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))])) 20006 for { 20007 t := v.Type 20008 _ = v.Args[1] 20009 x := v.Args[0] 20010 v_1 := v.Args[1] 20011 if v_1.Op != OpConst32 { 20012 break 20013 } 20014 c := v_1.AuxInt 20015 v.reset(OpRsh64Ux64) 20016 v.AddArg(x) 20017 v0 := b.NewValue0(v.Pos, OpConst64, t) 20018 v0.AuxInt = int64(uint32(c)) 20019 v.AddArg(v0) 20020 return true 20021 } 20022 // match: (Rsh64Ux32 (Const64 [0]) _) 20023 // cond: 20024 // result: (Const64 [0]) 20025 for { 20026 _ = v.Args[1] 20027 v_0 := v.Args[0] 20028 if v_0.Op != OpConst64 { 20029 break 20030 } 20031 if v_0.AuxInt != 0 { 20032 break 20033 } 20034 v.reset(OpConst64) 20035 v.AuxInt = 0 20036 return true 20037 } 20038 return false 20039 } 20040 func rewriteValuegeneric_OpRsh64Ux64_0(v *Value) bool { 20041 b := v.Block 20042 _ = b 20043 typ := &b.Func.Config.Types 20044 _ = typ 20045 // match: (Rsh64Ux64 (Const64 [c]) (Const64 [d])) 20046 // cond: 20047 // result: (Const64 [int64(uint64(c) >> uint64(d))]) 20048 for { 20049 _ = v.Args[1] 20050 v_0 := v.Args[0] 20051 if v_0.Op != OpConst64 { 20052 break 20053 } 20054 c := v_0.AuxInt 20055 v_1 := v.Args[1] 20056 if v_1.Op != OpConst64 { 20057 break 20058 } 20059 d := v_1.AuxInt 20060 v.reset(OpConst64) 20061 v.AuxInt = int64(uint64(c) >> uint64(d)) 20062 return true 20063 } 20064 // match: (Rsh64Ux64 x (Const64 [0])) 20065 // cond: 20066 // result: x 20067 for { 20068 _ = v.Args[1] 20069 x := v.Args[0] 20070 v_1 := v.Args[1] 20071 if v_1.Op != OpConst64 { 20072 break 20073 } 20074 if v_1.AuxInt != 0 { 20075 break 20076 } 20077 v.reset(OpCopy) 20078 v.Type = x.Type 20079 v.AddArg(x) 20080 return true 20081 } 20082 // match: (Rsh64Ux64 (Const64 [0]) _) 20083 // cond: 20084 // result: (Const64 [0]) 20085 for { 20086 _ = v.Args[1] 20087 v_0 := v.Args[0] 20088 if v_0.Op != OpConst64 { 20089 break 20090 } 20091 if v_0.AuxInt != 0 { 20092 break 20093 } 20094 v.reset(OpConst64) 20095 v.AuxInt = 0 20096 return true 20097 } 20098 // match: (Rsh64Ux64 _ (Const64 [c])) 20099 // cond: uint64(c) >= 64 20100 // result: (Const64 [0]) 20101 for { 20102 _ = v.Args[1] 20103 v_1 := v.Args[1] 20104 if v_1.Op != OpConst64 { 20105 break 20106 } 20107 c := v_1.AuxInt 20108 if !(uint64(c) >= 64) { 20109 break 20110 } 20111 v.reset(OpConst64) 20112 v.AuxInt = 0 20113 return true 20114 } 20115 // match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) 20116 // cond: !uaddOvf(c,d) 20117 // result: (Rsh64Ux64 x (Const64 <t> [c+d])) 20118 for { 20119 t := v.Type 20120 _ = v.Args[1] 20121 v_0 := v.Args[0] 20122 if v_0.Op != OpRsh64Ux64 { 20123 break 20124 } 20125 _ = v_0.Args[1] 20126 x := v_0.Args[0] 20127 v_0_1 := v_0.Args[1] 20128 if v_0_1.Op != OpConst64 { 20129 break 20130 } 20131 c := v_0_1.AuxInt 20132 v_1 := v.Args[1] 20133 if v_1.Op != OpConst64 { 20134 break 20135 } 20136 d := v_1.AuxInt 20137 if !(!uaddOvf(c, d)) { 20138 break 20139 } 20140 v.reset(OpRsh64Ux64) 20141 v.AddArg(x) 20142 v0 := b.NewValue0(v.Pos, OpConst64, t) 20143 v0.AuxInt = c + d 20144 v.AddArg(v0) 20145 return true 20146 } 20147 // match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 20148 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 20149 // result: (Rsh64Ux64 x (Const64 <typ.UInt64> [c1-c2+c3])) 20150 for { 20151 _ = v.Args[1] 20152 v_0 := v.Args[0] 20153 if v_0.Op != OpLsh64x64 { 20154 break 20155 } 20156 _ = v_0.Args[1] 20157 v_0_0 := v_0.Args[0] 20158 if v_0_0.Op != OpRsh64Ux64 { 20159 break 20160 } 20161 _ = v_0_0.Args[1] 20162 x := v_0_0.Args[0] 20163 v_0_0_1 := v_0_0.Args[1] 20164 if v_0_0_1.Op != OpConst64 { 20165 break 20166 } 20167 c1 := v_0_0_1.AuxInt 20168 v_0_1 := v_0.Args[1] 20169 if v_0_1.Op != OpConst64 { 20170 break 20171 } 20172 c2 := v_0_1.AuxInt 20173 v_1 := v.Args[1] 20174 if v_1.Op != OpConst64 { 20175 break 20176 } 20177 c3 := v_1.AuxInt 20178 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 20179 break 20180 } 20181 v.reset(OpRsh64Ux64) 20182 v.AddArg(x) 20183 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 20184 v0.AuxInt = c1 - c2 + c3 20185 v.AddArg(v0) 20186 return true 20187 } 20188 // match: (Rsh64Ux64 (Lsh64x64 x (Const64 [56])) (Const64 [56])) 20189 // cond: 20190 // result: (ZeroExt8to64 (Trunc64to8 <typ.UInt8> x)) 20191 for { 20192 _ = v.Args[1] 20193 v_0 := v.Args[0] 20194 if v_0.Op != OpLsh64x64 { 20195 break 20196 } 20197 _ = v_0.Args[1] 20198 x := v_0.Args[0] 20199 v_0_1 := v_0.Args[1] 20200 if v_0_1.Op != OpConst64 { 20201 break 20202 } 20203 if v_0_1.AuxInt != 56 { 20204 break 20205 } 20206 v_1 := v.Args[1] 20207 if v_1.Op != OpConst64 { 20208 break 20209 } 20210 if v_1.AuxInt != 56 { 20211 break 20212 } 20213 v.reset(OpZeroExt8to64) 20214 v0 := b.NewValue0(v.Pos, OpTrunc64to8, typ.UInt8) 20215 v0.AddArg(x) 20216 v.AddArg(v0) 20217 return true 20218 } 20219 // match: (Rsh64Ux64 (Lsh64x64 x (Const64 [48])) (Const64 [48])) 20220 // cond: 20221 // result: (ZeroExt16to64 (Trunc64to16 <typ.UInt16> x)) 20222 for { 20223 _ = v.Args[1] 20224 v_0 := v.Args[0] 20225 if v_0.Op != OpLsh64x64 { 20226 break 20227 } 20228 _ = v_0.Args[1] 20229 x := v_0.Args[0] 20230 v_0_1 := v_0.Args[1] 20231 if v_0_1.Op != OpConst64 { 20232 break 20233 } 20234 if v_0_1.AuxInt != 48 { 20235 break 20236 } 20237 v_1 := v.Args[1] 20238 if v_1.Op != OpConst64 { 20239 break 20240 } 20241 if v_1.AuxInt != 48 { 20242 break 20243 } 20244 v.reset(OpZeroExt16to64) 20245 v0 := b.NewValue0(v.Pos, OpTrunc64to16, typ.UInt16) 20246 v0.AddArg(x) 20247 v.AddArg(v0) 20248 return true 20249 } 20250 // match: (Rsh64Ux64 (Lsh64x64 x (Const64 [32])) (Const64 [32])) 20251 // cond: 20252 // result: (ZeroExt32to64 (Trunc64to32 <typ.UInt32> x)) 20253 for { 20254 _ = v.Args[1] 20255 v_0 := v.Args[0] 20256 if v_0.Op != OpLsh64x64 { 20257 break 20258 } 20259 _ = v_0.Args[1] 20260 x := v_0.Args[0] 20261 v_0_1 := v_0.Args[1] 20262 if v_0_1.Op != OpConst64 { 20263 break 20264 } 20265 if v_0_1.AuxInt != 32 { 20266 break 20267 } 20268 v_1 := v.Args[1] 20269 if v_1.Op != OpConst64 { 20270 break 20271 } 20272 if v_1.AuxInt != 32 { 20273 break 20274 } 20275 v.reset(OpZeroExt32to64) 20276 v0 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32) 20277 v0.AddArg(x) 20278 v.AddArg(v0) 20279 return true 20280 } 20281 return false 20282 } 20283 func rewriteValuegeneric_OpRsh64Ux8_0(v *Value) bool { 20284 b := v.Block 20285 _ = b 20286 // match: (Rsh64Ux8 <t> x (Const8 [c])) 20287 // cond: 20288 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))])) 20289 for { 20290 t := v.Type 20291 _ = v.Args[1] 20292 x := v.Args[0] 20293 v_1 := v.Args[1] 20294 if v_1.Op != OpConst8 { 20295 break 20296 } 20297 c := v_1.AuxInt 20298 v.reset(OpRsh64Ux64) 20299 v.AddArg(x) 20300 v0 := b.NewValue0(v.Pos, OpConst64, t) 20301 v0.AuxInt = int64(uint8(c)) 20302 v.AddArg(v0) 20303 return true 20304 } 20305 // match: (Rsh64Ux8 (Const64 [0]) _) 20306 // cond: 20307 // result: (Const64 [0]) 20308 for { 20309 _ = v.Args[1] 20310 v_0 := v.Args[0] 20311 if v_0.Op != OpConst64 { 20312 break 20313 } 20314 if v_0.AuxInt != 0 { 20315 break 20316 } 20317 v.reset(OpConst64) 20318 v.AuxInt = 0 20319 return true 20320 } 20321 return false 20322 } 20323 func rewriteValuegeneric_OpRsh64x16_0(v *Value) bool { 20324 b := v.Block 20325 _ = b 20326 // match: (Rsh64x16 <t> x (Const16 [c])) 20327 // cond: 20328 // result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))])) 20329 for { 20330 t := v.Type 20331 _ = v.Args[1] 20332 x := v.Args[0] 20333 v_1 := v.Args[1] 20334 if v_1.Op != OpConst16 { 20335 break 20336 } 20337 c := v_1.AuxInt 20338 v.reset(OpRsh64x64) 20339 v.AddArg(x) 20340 v0 := b.NewValue0(v.Pos, OpConst64, t) 20341 v0.AuxInt = int64(uint16(c)) 20342 v.AddArg(v0) 20343 return true 20344 } 20345 // match: (Rsh64x16 (Const64 [0]) _) 20346 // cond: 20347 // result: (Const64 [0]) 20348 for { 20349 _ = v.Args[1] 20350 v_0 := v.Args[0] 20351 if v_0.Op != OpConst64 { 20352 break 20353 } 20354 if v_0.AuxInt != 0 { 20355 break 20356 } 20357 v.reset(OpConst64) 20358 v.AuxInt = 0 20359 return true 20360 } 20361 return false 20362 } 20363 func rewriteValuegeneric_OpRsh64x32_0(v *Value) bool { 20364 b := v.Block 20365 _ = b 20366 // match: (Rsh64x32 <t> x (Const32 [c])) 20367 // cond: 20368 // result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))])) 20369 for { 20370 t := v.Type 20371 _ = v.Args[1] 20372 x := v.Args[0] 20373 v_1 := v.Args[1] 20374 if v_1.Op != OpConst32 { 20375 break 20376 } 20377 c := v_1.AuxInt 20378 v.reset(OpRsh64x64) 20379 v.AddArg(x) 20380 v0 := b.NewValue0(v.Pos, OpConst64, t) 20381 v0.AuxInt = int64(uint32(c)) 20382 v.AddArg(v0) 20383 return true 20384 } 20385 // match: (Rsh64x32 (Const64 [0]) _) 20386 // cond: 20387 // result: (Const64 [0]) 20388 for { 20389 _ = v.Args[1] 20390 v_0 := v.Args[0] 20391 if v_0.Op != OpConst64 { 20392 break 20393 } 20394 if v_0.AuxInt != 0 { 20395 break 20396 } 20397 v.reset(OpConst64) 20398 v.AuxInt = 0 20399 return true 20400 } 20401 return false 20402 } 20403 func rewriteValuegeneric_OpRsh64x64_0(v *Value) bool { 20404 b := v.Block 20405 _ = b 20406 typ := &b.Func.Config.Types 20407 _ = typ 20408 // match: (Rsh64x64 (Const64 [c]) (Const64 [d])) 20409 // cond: 20410 // result: (Const64 [c >> uint64(d)]) 20411 for { 20412 _ = v.Args[1] 20413 v_0 := v.Args[0] 20414 if v_0.Op != OpConst64 { 20415 break 20416 } 20417 c := v_0.AuxInt 20418 v_1 := v.Args[1] 20419 if v_1.Op != OpConst64 { 20420 break 20421 } 20422 d := v_1.AuxInt 20423 v.reset(OpConst64) 20424 v.AuxInt = c >> uint64(d) 20425 return true 20426 } 20427 // match: (Rsh64x64 x (Const64 [0])) 20428 // cond: 20429 // result: x 20430 for { 20431 _ = v.Args[1] 20432 x := v.Args[0] 20433 v_1 := v.Args[1] 20434 if v_1.Op != OpConst64 { 20435 break 20436 } 20437 if v_1.AuxInt != 0 { 20438 break 20439 } 20440 v.reset(OpCopy) 20441 v.Type = x.Type 20442 v.AddArg(x) 20443 return true 20444 } 20445 // match: (Rsh64x64 (Const64 [0]) _) 20446 // cond: 20447 // result: (Const64 [0]) 20448 for { 20449 _ = v.Args[1] 20450 v_0 := v.Args[0] 20451 if v_0.Op != OpConst64 { 20452 break 20453 } 20454 if v_0.AuxInt != 0 { 20455 break 20456 } 20457 v.reset(OpConst64) 20458 v.AuxInt = 0 20459 return true 20460 } 20461 // match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d])) 20462 // cond: !uaddOvf(c,d) 20463 // result: (Rsh64x64 x (Const64 <t> [c+d])) 20464 for { 20465 t := v.Type 20466 _ = v.Args[1] 20467 v_0 := v.Args[0] 20468 if v_0.Op != OpRsh64x64 { 20469 break 20470 } 20471 _ = v_0.Args[1] 20472 x := v_0.Args[0] 20473 v_0_1 := v_0.Args[1] 20474 if v_0_1.Op != OpConst64 { 20475 break 20476 } 20477 c := v_0_1.AuxInt 20478 v_1 := v.Args[1] 20479 if v_1.Op != OpConst64 { 20480 break 20481 } 20482 d := v_1.AuxInt 20483 if !(!uaddOvf(c, d)) { 20484 break 20485 } 20486 v.reset(OpRsh64x64) 20487 v.AddArg(x) 20488 v0 := b.NewValue0(v.Pos, OpConst64, t) 20489 v0.AuxInt = c + d 20490 v.AddArg(v0) 20491 return true 20492 } 20493 // match: (Rsh64x64 (Lsh64x64 x (Const64 [56])) (Const64 [56])) 20494 // cond: 20495 // result: (SignExt8to64 (Trunc64to8 <typ.Int8> x)) 20496 for { 20497 _ = v.Args[1] 20498 v_0 := v.Args[0] 20499 if v_0.Op != OpLsh64x64 { 20500 break 20501 } 20502 _ = v_0.Args[1] 20503 x := v_0.Args[0] 20504 v_0_1 := v_0.Args[1] 20505 if v_0_1.Op != OpConst64 { 20506 break 20507 } 20508 if v_0_1.AuxInt != 56 { 20509 break 20510 } 20511 v_1 := v.Args[1] 20512 if v_1.Op != OpConst64 { 20513 break 20514 } 20515 if v_1.AuxInt != 56 { 20516 break 20517 } 20518 v.reset(OpSignExt8to64) 20519 v0 := b.NewValue0(v.Pos, OpTrunc64to8, typ.Int8) 20520 v0.AddArg(x) 20521 v.AddArg(v0) 20522 return true 20523 } 20524 // match: (Rsh64x64 (Lsh64x64 x (Const64 [48])) (Const64 [48])) 20525 // cond: 20526 // result: (SignExt16to64 (Trunc64to16 <typ.Int16> x)) 20527 for { 20528 _ = v.Args[1] 20529 v_0 := v.Args[0] 20530 if v_0.Op != OpLsh64x64 { 20531 break 20532 } 20533 _ = v_0.Args[1] 20534 x := v_0.Args[0] 20535 v_0_1 := v_0.Args[1] 20536 if v_0_1.Op != OpConst64 { 20537 break 20538 } 20539 if v_0_1.AuxInt != 48 { 20540 break 20541 } 20542 v_1 := v.Args[1] 20543 if v_1.Op != OpConst64 { 20544 break 20545 } 20546 if v_1.AuxInt != 48 { 20547 break 20548 } 20549 v.reset(OpSignExt16to64) 20550 v0 := b.NewValue0(v.Pos, OpTrunc64to16, typ.Int16) 20551 v0.AddArg(x) 20552 v.AddArg(v0) 20553 return true 20554 } 20555 // match: (Rsh64x64 (Lsh64x64 x (Const64 [32])) (Const64 [32])) 20556 // cond: 20557 // result: (SignExt32to64 (Trunc64to32 <typ.Int32> x)) 20558 for { 20559 _ = v.Args[1] 20560 v_0 := v.Args[0] 20561 if v_0.Op != OpLsh64x64 { 20562 break 20563 } 20564 _ = v_0.Args[1] 20565 x := v_0.Args[0] 20566 v_0_1 := v_0.Args[1] 20567 if v_0_1.Op != OpConst64 { 20568 break 20569 } 20570 if v_0_1.AuxInt != 32 { 20571 break 20572 } 20573 v_1 := v.Args[1] 20574 if v_1.Op != OpConst64 { 20575 break 20576 } 20577 if v_1.AuxInt != 32 { 20578 break 20579 } 20580 v.reset(OpSignExt32to64) 20581 v0 := b.NewValue0(v.Pos, OpTrunc64to32, typ.Int32) 20582 v0.AddArg(x) 20583 v.AddArg(v0) 20584 return true 20585 } 20586 return false 20587 } 20588 func rewriteValuegeneric_OpRsh64x8_0(v *Value) bool { 20589 b := v.Block 20590 _ = b 20591 // match: (Rsh64x8 <t> x (Const8 [c])) 20592 // cond: 20593 // result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))])) 20594 for { 20595 t := v.Type 20596 _ = v.Args[1] 20597 x := v.Args[0] 20598 v_1 := v.Args[1] 20599 if v_1.Op != OpConst8 { 20600 break 20601 } 20602 c := v_1.AuxInt 20603 v.reset(OpRsh64x64) 20604 v.AddArg(x) 20605 v0 := b.NewValue0(v.Pos, OpConst64, t) 20606 v0.AuxInt = int64(uint8(c)) 20607 v.AddArg(v0) 20608 return true 20609 } 20610 // match: (Rsh64x8 (Const64 [0]) _) 20611 // cond: 20612 // result: (Const64 [0]) 20613 for { 20614 _ = v.Args[1] 20615 v_0 := v.Args[0] 20616 if v_0.Op != OpConst64 { 20617 break 20618 } 20619 if v_0.AuxInt != 0 { 20620 break 20621 } 20622 v.reset(OpConst64) 20623 v.AuxInt = 0 20624 return true 20625 } 20626 return false 20627 } 20628 func rewriteValuegeneric_OpRsh8Ux16_0(v *Value) bool { 20629 b := v.Block 20630 _ = b 20631 // match: (Rsh8Ux16 <t> x (Const16 [c])) 20632 // cond: 20633 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))])) 20634 for { 20635 t := v.Type 20636 _ = v.Args[1] 20637 x := v.Args[0] 20638 v_1 := v.Args[1] 20639 if v_1.Op != OpConst16 { 20640 break 20641 } 20642 c := v_1.AuxInt 20643 v.reset(OpRsh8Ux64) 20644 v.AddArg(x) 20645 v0 := b.NewValue0(v.Pos, OpConst64, t) 20646 v0.AuxInt = int64(uint16(c)) 20647 v.AddArg(v0) 20648 return true 20649 } 20650 // match: (Rsh8Ux16 (Const8 [0]) _) 20651 // cond: 20652 // result: (Const8 [0]) 20653 for { 20654 _ = v.Args[1] 20655 v_0 := v.Args[0] 20656 if v_0.Op != OpConst8 { 20657 break 20658 } 20659 if v_0.AuxInt != 0 { 20660 break 20661 } 20662 v.reset(OpConst8) 20663 v.AuxInt = 0 20664 return true 20665 } 20666 return false 20667 } 20668 func rewriteValuegeneric_OpRsh8Ux32_0(v *Value) bool { 20669 b := v.Block 20670 _ = b 20671 // match: (Rsh8Ux32 <t> x (Const32 [c])) 20672 // cond: 20673 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))])) 20674 for { 20675 t := v.Type 20676 _ = v.Args[1] 20677 x := v.Args[0] 20678 v_1 := v.Args[1] 20679 if v_1.Op != OpConst32 { 20680 break 20681 } 20682 c := v_1.AuxInt 20683 v.reset(OpRsh8Ux64) 20684 v.AddArg(x) 20685 v0 := b.NewValue0(v.Pos, OpConst64, t) 20686 v0.AuxInt = int64(uint32(c)) 20687 v.AddArg(v0) 20688 return true 20689 } 20690 // match: (Rsh8Ux32 (Const8 [0]) _) 20691 // cond: 20692 // result: (Const8 [0]) 20693 for { 20694 _ = v.Args[1] 20695 v_0 := v.Args[0] 20696 if v_0.Op != OpConst8 { 20697 break 20698 } 20699 if v_0.AuxInt != 0 { 20700 break 20701 } 20702 v.reset(OpConst8) 20703 v.AuxInt = 0 20704 return true 20705 } 20706 return false 20707 } 20708 func rewriteValuegeneric_OpRsh8Ux64_0(v *Value) bool { 20709 b := v.Block 20710 _ = b 20711 typ := &b.Func.Config.Types 20712 _ = typ 20713 // match: (Rsh8Ux64 (Const8 [c]) (Const64 [d])) 20714 // cond: 20715 // result: (Const8 [int64(int8(uint8(c) >> uint64(d)))]) 20716 for { 20717 _ = v.Args[1] 20718 v_0 := v.Args[0] 20719 if v_0.Op != OpConst8 { 20720 break 20721 } 20722 c := v_0.AuxInt 20723 v_1 := v.Args[1] 20724 if v_1.Op != OpConst64 { 20725 break 20726 } 20727 d := v_1.AuxInt 20728 v.reset(OpConst8) 20729 v.AuxInt = int64(int8(uint8(c) >> uint64(d))) 20730 return true 20731 } 20732 // match: (Rsh8Ux64 x (Const64 [0])) 20733 // cond: 20734 // result: x 20735 for { 20736 _ = v.Args[1] 20737 x := v.Args[0] 20738 v_1 := v.Args[1] 20739 if v_1.Op != OpConst64 { 20740 break 20741 } 20742 if v_1.AuxInt != 0 { 20743 break 20744 } 20745 v.reset(OpCopy) 20746 v.Type = x.Type 20747 v.AddArg(x) 20748 return true 20749 } 20750 // match: (Rsh8Ux64 (Const8 [0]) _) 20751 // cond: 20752 // result: (Const8 [0]) 20753 for { 20754 _ = v.Args[1] 20755 v_0 := v.Args[0] 20756 if v_0.Op != OpConst8 { 20757 break 20758 } 20759 if v_0.AuxInt != 0 { 20760 break 20761 } 20762 v.reset(OpConst8) 20763 v.AuxInt = 0 20764 return true 20765 } 20766 // match: (Rsh8Ux64 _ (Const64 [c])) 20767 // cond: uint64(c) >= 8 20768 // result: (Const8 [0]) 20769 for { 20770 _ = v.Args[1] 20771 v_1 := v.Args[1] 20772 if v_1.Op != OpConst64 { 20773 break 20774 } 20775 c := v_1.AuxInt 20776 if !(uint64(c) >= 8) { 20777 break 20778 } 20779 v.reset(OpConst8) 20780 v.AuxInt = 0 20781 return true 20782 } 20783 // match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) 20784 // cond: !uaddOvf(c,d) 20785 // result: (Rsh8Ux64 x (Const64 <t> [c+d])) 20786 for { 20787 t := v.Type 20788 _ = v.Args[1] 20789 v_0 := v.Args[0] 20790 if v_0.Op != OpRsh8Ux64 { 20791 break 20792 } 20793 _ = v_0.Args[1] 20794 x := v_0.Args[0] 20795 v_0_1 := v_0.Args[1] 20796 if v_0_1.Op != OpConst64 { 20797 break 20798 } 20799 c := v_0_1.AuxInt 20800 v_1 := v.Args[1] 20801 if v_1.Op != OpConst64 { 20802 break 20803 } 20804 d := v_1.AuxInt 20805 if !(!uaddOvf(c, d)) { 20806 break 20807 } 20808 v.reset(OpRsh8Ux64) 20809 v.AddArg(x) 20810 v0 := b.NewValue0(v.Pos, OpConst64, t) 20811 v0.AuxInt = c + d 20812 v.AddArg(v0) 20813 return true 20814 } 20815 // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 20816 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 20817 // result: (Rsh8Ux64 x (Const64 <typ.UInt64> [c1-c2+c3])) 20818 for { 20819 _ = v.Args[1] 20820 v_0 := v.Args[0] 20821 if v_0.Op != OpLsh8x64 { 20822 break 20823 } 20824 _ = v_0.Args[1] 20825 v_0_0 := v_0.Args[0] 20826 if v_0_0.Op != OpRsh8Ux64 { 20827 break 20828 } 20829 _ = v_0_0.Args[1] 20830 x := v_0_0.Args[0] 20831 v_0_0_1 := v_0_0.Args[1] 20832 if v_0_0_1.Op != OpConst64 { 20833 break 20834 } 20835 c1 := v_0_0_1.AuxInt 20836 v_0_1 := v_0.Args[1] 20837 if v_0_1.Op != OpConst64 { 20838 break 20839 } 20840 c2 := v_0_1.AuxInt 20841 v_1 := v.Args[1] 20842 if v_1.Op != OpConst64 { 20843 break 20844 } 20845 c3 := v_1.AuxInt 20846 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 20847 break 20848 } 20849 v.reset(OpRsh8Ux64) 20850 v.AddArg(x) 20851 v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 20852 v0.AuxInt = c1 - c2 + c3 20853 v.AddArg(v0) 20854 return true 20855 } 20856 return false 20857 } 20858 func rewriteValuegeneric_OpRsh8Ux8_0(v *Value) bool { 20859 b := v.Block 20860 _ = b 20861 // match: (Rsh8Ux8 <t> x (Const8 [c])) 20862 // cond: 20863 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))])) 20864 for { 20865 t := v.Type 20866 _ = v.Args[1] 20867 x := v.Args[0] 20868 v_1 := v.Args[1] 20869 if v_1.Op != OpConst8 { 20870 break 20871 } 20872 c := v_1.AuxInt 20873 v.reset(OpRsh8Ux64) 20874 v.AddArg(x) 20875 v0 := b.NewValue0(v.Pos, OpConst64, t) 20876 v0.AuxInt = int64(uint8(c)) 20877 v.AddArg(v0) 20878 return true 20879 } 20880 // match: (Rsh8Ux8 (Const8 [0]) _) 20881 // cond: 20882 // result: (Const8 [0]) 20883 for { 20884 _ = v.Args[1] 20885 v_0 := v.Args[0] 20886 if v_0.Op != OpConst8 { 20887 break 20888 } 20889 if v_0.AuxInt != 0 { 20890 break 20891 } 20892 v.reset(OpConst8) 20893 v.AuxInt = 0 20894 return true 20895 } 20896 return false 20897 } 20898 func rewriteValuegeneric_OpRsh8x16_0(v *Value) bool { 20899 b := v.Block 20900 _ = b 20901 // match: (Rsh8x16 <t> x (Const16 [c])) 20902 // cond: 20903 // result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))])) 20904 for { 20905 t := v.Type 20906 _ = v.Args[1] 20907 x := v.Args[0] 20908 v_1 := v.Args[1] 20909 if v_1.Op != OpConst16 { 20910 break 20911 } 20912 c := v_1.AuxInt 20913 v.reset(OpRsh8x64) 20914 v.AddArg(x) 20915 v0 := b.NewValue0(v.Pos, OpConst64, t) 20916 v0.AuxInt = int64(uint16(c)) 20917 v.AddArg(v0) 20918 return true 20919 } 20920 // match: (Rsh8x16 (Const8 [0]) _) 20921 // cond: 20922 // result: (Const8 [0]) 20923 for { 20924 _ = v.Args[1] 20925 v_0 := v.Args[0] 20926 if v_0.Op != OpConst8 { 20927 break 20928 } 20929 if v_0.AuxInt != 0 { 20930 break 20931 } 20932 v.reset(OpConst8) 20933 v.AuxInt = 0 20934 return true 20935 } 20936 return false 20937 } 20938 func rewriteValuegeneric_OpRsh8x32_0(v *Value) bool { 20939 b := v.Block 20940 _ = b 20941 // match: (Rsh8x32 <t> x (Const32 [c])) 20942 // cond: 20943 // result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))])) 20944 for { 20945 t := v.Type 20946 _ = v.Args[1] 20947 x := v.Args[0] 20948 v_1 := v.Args[1] 20949 if v_1.Op != OpConst32 { 20950 break 20951 } 20952 c := v_1.AuxInt 20953 v.reset(OpRsh8x64) 20954 v.AddArg(x) 20955 v0 := b.NewValue0(v.Pos, OpConst64, t) 20956 v0.AuxInt = int64(uint32(c)) 20957 v.AddArg(v0) 20958 return true 20959 } 20960 // match: (Rsh8x32 (Const8 [0]) _) 20961 // cond: 20962 // result: (Const8 [0]) 20963 for { 20964 _ = v.Args[1] 20965 v_0 := v.Args[0] 20966 if v_0.Op != OpConst8 { 20967 break 20968 } 20969 if v_0.AuxInt != 0 { 20970 break 20971 } 20972 v.reset(OpConst8) 20973 v.AuxInt = 0 20974 return true 20975 } 20976 return false 20977 } 20978 func rewriteValuegeneric_OpRsh8x64_0(v *Value) bool { 20979 b := v.Block 20980 _ = b 20981 // match: (Rsh8x64 (Const8 [c]) (Const64 [d])) 20982 // cond: 20983 // result: (Const8 [int64(int8(c) >> uint64(d))]) 20984 for { 20985 _ = v.Args[1] 20986 v_0 := v.Args[0] 20987 if v_0.Op != OpConst8 { 20988 break 20989 } 20990 c := v_0.AuxInt 20991 v_1 := v.Args[1] 20992 if v_1.Op != OpConst64 { 20993 break 20994 } 20995 d := v_1.AuxInt 20996 v.reset(OpConst8) 20997 v.AuxInt = int64(int8(c) >> uint64(d)) 20998 return true 20999 } 21000 // match: (Rsh8x64 x (Const64 [0])) 21001 // cond: 21002 // result: x 21003 for { 21004 _ = v.Args[1] 21005 x := v.Args[0] 21006 v_1 := v.Args[1] 21007 if v_1.Op != OpConst64 { 21008 break 21009 } 21010 if v_1.AuxInt != 0 { 21011 break 21012 } 21013 v.reset(OpCopy) 21014 v.Type = x.Type 21015 v.AddArg(x) 21016 return true 21017 } 21018 // match: (Rsh8x64 (Const8 [0]) _) 21019 // cond: 21020 // result: (Const8 [0]) 21021 for { 21022 _ = v.Args[1] 21023 v_0 := v.Args[0] 21024 if v_0.Op != OpConst8 { 21025 break 21026 } 21027 if v_0.AuxInt != 0 { 21028 break 21029 } 21030 v.reset(OpConst8) 21031 v.AuxInt = 0 21032 return true 21033 } 21034 // match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d])) 21035 // cond: !uaddOvf(c,d) 21036 // result: (Rsh8x64 x (Const64 <t> [c+d])) 21037 for { 21038 t := v.Type 21039 _ = v.Args[1] 21040 v_0 := v.Args[0] 21041 if v_0.Op != OpRsh8x64 { 21042 break 21043 } 21044 _ = v_0.Args[1] 21045 x := v_0.Args[0] 21046 v_0_1 := v_0.Args[1] 21047 if v_0_1.Op != OpConst64 { 21048 break 21049 } 21050 c := v_0_1.AuxInt 21051 v_1 := v.Args[1] 21052 if v_1.Op != OpConst64 { 21053 break 21054 } 21055 d := v_1.AuxInt 21056 if !(!uaddOvf(c, d)) { 21057 break 21058 } 21059 v.reset(OpRsh8x64) 21060 v.AddArg(x) 21061 v0 := b.NewValue0(v.Pos, OpConst64, t) 21062 v0.AuxInt = c + d 21063 v.AddArg(v0) 21064 return true 21065 } 21066 return false 21067 } 21068 func rewriteValuegeneric_OpRsh8x8_0(v *Value) bool { 21069 b := v.Block 21070 _ = b 21071 // match: (Rsh8x8 <t> x (Const8 [c])) 21072 // cond: 21073 // result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))])) 21074 for { 21075 t := v.Type 21076 _ = v.Args[1] 21077 x := v.Args[0] 21078 v_1 := v.Args[1] 21079 if v_1.Op != OpConst8 { 21080 break 21081 } 21082 c := v_1.AuxInt 21083 v.reset(OpRsh8x64) 21084 v.AddArg(x) 21085 v0 := b.NewValue0(v.Pos, OpConst64, t) 21086 v0.AuxInt = int64(uint8(c)) 21087 v.AddArg(v0) 21088 return true 21089 } 21090 // match: (Rsh8x8 (Const8 [0]) _) 21091 // cond: 21092 // result: (Const8 [0]) 21093 for { 21094 _ = v.Args[1] 21095 v_0 := v.Args[0] 21096 if v_0.Op != OpConst8 { 21097 break 21098 } 21099 if v_0.AuxInt != 0 { 21100 break 21101 } 21102 v.reset(OpConst8) 21103 v.AuxInt = 0 21104 return true 21105 } 21106 return false 21107 } 21108 func rewriteValuegeneric_OpSignExt16to32_0(v *Value) bool { 21109 // match: (SignExt16to32 (Const16 [c])) 21110 // cond: 21111 // result: (Const32 [int64( int16(c))]) 21112 for { 21113 v_0 := v.Args[0] 21114 if v_0.Op != OpConst16 { 21115 break 21116 } 21117 c := v_0.AuxInt 21118 v.reset(OpConst32) 21119 v.AuxInt = int64(int16(c)) 21120 return true 21121 } 21122 // match: (SignExt16to32 (Trunc32to16 x:(Rsh32x64 _ (Const64 [s])))) 21123 // cond: s >= 16 21124 // result: x 21125 for { 21126 v_0 := v.Args[0] 21127 if v_0.Op != OpTrunc32to16 { 21128 break 21129 } 21130 x := v_0.Args[0] 21131 if x.Op != OpRsh32x64 { 21132 break 21133 } 21134 _ = x.Args[1] 21135 x_1 := x.Args[1] 21136 if x_1.Op != OpConst64 { 21137 break 21138 } 21139 s := x_1.AuxInt 21140 if !(s >= 16) { 21141 break 21142 } 21143 v.reset(OpCopy) 21144 v.Type = x.Type 21145 v.AddArg(x) 21146 return true 21147 } 21148 return false 21149 } 21150 func rewriteValuegeneric_OpSignExt16to64_0(v *Value) bool { 21151 // match: (SignExt16to64 (Const16 [c])) 21152 // cond: 21153 // result: (Const64 [int64( int16(c))]) 21154 for { 21155 v_0 := v.Args[0] 21156 if v_0.Op != OpConst16 { 21157 break 21158 } 21159 c := v_0.AuxInt 21160 v.reset(OpConst64) 21161 v.AuxInt = int64(int16(c)) 21162 return true 21163 } 21164 // match: (SignExt16to64 (Trunc64to16 x:(Rsh64x64 _ (Const64 [s])))) 21165 // cond: s >= 48 21166 // result: x 21167 for { 21168 v_0 := v.Args[0] 21169 if v_0.Op != OpTrunc64to16 { 21170 break 21171 } 21172 x := v_0.Args[0] 21173 if x.Op != OpRsh64x64 { 21174 break 21175 } 21176 _ = x.Args[1] 21177 x_1 := x.Args[1] 21178 if x_1.Op != OpConst64 { 21179 break 21180 } 21181 s := x_1.AuxInt 21182 if !(s >= 48) { 21183 break 21184 } 21185 v.reset(OpCopy) 21186 v.Type = x.Type 21187 v.AddArg(x) 21188 return true 21189 } 21190 return false 21191 } 21192 func rewriteValuegeneric_OpSignExt32to64_0(v *Value) bool { 21193 // match: (SignExt32to64 (Const32 [c])) 21194 // cond: 21195 // result: (Const64 [int64( int32(c))]) 21196 for { 21197 v_0 := v.Args[0] 21198 if v_0.Op != OpConst32 { 21199 break 21200 } 21201 c := v_0.AuxInt 21202 v.reset(OpConst64) 21203 v.AuxInt = int64(int32(c)) 21204 return true 21205 } 21206 // match: (SignExt32to64 (Trunc64to32 x:(Rsh64x64 _ (Const64 [s])))) 21207 // cond: s >= 32 21208 // result: x 21209 for { 21210 v_0 := v.Args[0] 21211 if v_0.Op != OpTrunc64to32 { 21212 break 21213 } 21214 x := v_0.Args[0] 21215 if x.Op != OpRsh64x64 { 21216 break 21217 } 21218 _ = x.Args[1] 21219 x_1 := x.Args[1] 21220 if x_1.Op != OpConst64 { 21221 break 21222 } 21223 s := x_1.AuxInt 21224 if !(s >= 32) { 21225 break 21226 } 21227 v.reset(OpCopy) 21228 v.Type = x.Type 21229 v.AddArg(x) 21230 return true 21231 } 21232 return false 21233 } 21234 func rewriteValuegeneric_OpSignExt8to16_0(v *Value) bool { 21235 // match: (SignExt8to16 (Const8 [c])) 21236 // cond: 21237 // result: (Const16 [int64( int8(c))]) 21238 for { 21239 v_0 := v.Args[0] 21240 if v_0.Op != OpConst8 { 21241 break 21242 } 21243 c := v_0.AuxInt 21244 v.reset(OpConst16) 21245 v.AuxInt = int64(int8(c)) 21246 return true 21247 } 21248 // match: (SignExt8to16 (Trunc16to8 x:(Rsh16x64 _ (Const64 [s])))) 21249 // cond: s >= 8 21250 // result: x 21251 for { 21252 v_0 := v.Args[0] 21253 if v_0.Op != OpTrunc16to8 { 21254 break 21255 } 21256 x := v_0.Args[0] 21257 if x.Op != OpRsh16x64 { 21258 break 21259 } 21260 _ = x.Args[1] 21261 x_1 := x.Args[1] 21262 if x_1.Op != OpConst64 { 21263 break 21264 } 21265 s := x_1.AuxInt 21266 if !(s >= 8) { 21267 break 21268 } 21269 v.reset(OpCopy) 21270 v.Type = x.Type 21271 v.AddArg(x) 21272 return true 21273 } 21274 return false 21275 } 21276 func rewriteValuegeneric_OpSignExt8to32_0(v *Value) bool { 21277 // match: (SignExt8to32 (Const8 [c])) 21278 // cond: 21279 // result: (Const32 [int64( int8(c))]) 21280 for { 21281 v_0 := v.Args[0] 21282 if v_0.Op != OpConst8 { 21283 break 21284 } 21285 c := v_0.AuxInt 21286 v.reset(OpConst32) 21287 v.AuxInt = int64(int8(c)) 21288 return true 21289 } 21290 // match: (SignExt8to32 (Trunc32to8 x:(Rsh32x64 _ (Const64 [s])))) 21291 // cond: s >= 24 21292 // result: x 21293 for { 21294 v_0 := v.Args[0] 21295 if v_0.Op != OpTrunc32to8 { 21296 break 21297 } 21298 x := v_0.Args[0] 21299 if x.Op != OpRsh32x64 { 21300 break 21301 } 21302 _ = x.Args[1] 21303 x_1 := x.Args[1] 21304 if x_1.Op != OpConst64 { 21305 break 21306 } 21307 s := x_1.AuxInt 21308 if !(s >= 24) { 21309 break 21310 } 21311 v.reset(OpCopy) 21312 v.Type = x.Type 21313 v.AddArg(x) 21314 return true 21315 } 21316 return false 21317 } 21318 func rewriteValuegeneric_OpSignExt8to64_0(v *Value) bool { 21319 // match: (SignExt8to64 (Const8 [c])) 21320 // cond: 21321 // result: (Const64 [int64( int8(c))]) 21322 for { 21323 v_0 := v.Args[0] 21324 if v_0.Op != OpConst8 { 21325 break 21326 } 21327 c := v_0.AuxInt 21328 v.reset(OpConst64) 21329 v.AuxInt = int64(int8(c)) 21330 return true 21331 } 21332 // match: (SignExt8to64 (Trunc64to8 x:(Rsh64x64 _ (Const64 [s])))) 21333 // cond: s >= 56 21334 // result: x 21335 for { 21336 v_0 := v.Args[0] 21337 if v_0.Op != OpTrunc64to8 { 21338 break 21339 } 21340 x := v_0.Args[0] 21341 if x.Op != OpRsh64x64 { 21342 break 21343 } 21344 _ = x.Args[1] 21345 x_1 := x.Args[1] 21346 if x_1.Op != OpConst64 { 21347 break 21348 } 21349 s := x_1.AuxInt 21350 if !(s >= 56) { 21351 break 21352 } 21353 v.reset(OpCopy) 21354 v.Type = x.Type 21355 v.AddArg(x) 21356 return true 21357 } 21358 return false 21359 } 21360 func rewriteValuegeneric_OpSliceCap_0(v *Value) bool { 21361 // match: (SliceCap (SliceMake _ _ (Const64 <t> [c]))) 21362 // cond: 21363 // result: (Const64 <t> [c]) 21364 for { 21365 v_0 := v.Args[0] 21366 if v_0.Op != OpSliceMake { 21367 break 21368 } 21369 _ = v_0.Args[2] 21370 v_0_2 := v_0.Args[2] 21371 if v_0_2.Op != OpConst64 { 21372 break 21373 } 21374 t := v_0_2.Type 21375 c := v_0_2.AuxInt 21376 v.reset(OpConst64) 21377 v.Type = t 21378 v.AuxInt = c 21379 return true 21380 } 21381 // match: (SliceCap (SliceMake _ _ (Const32 <t> [c]))) 21382 // cond: 21383 // result: (Const32 <t> [c]) 21384 for { 21385 v_0 := v.Args[0] 21386 if v_0.Op != OpSliceMake { 21387 break 21388 } 21389 _ = v_0.Args[2] 21390 v_0_2 := v_0.Args[2] 21391 if v_0_2.Op != OpConst32 { 21392 break 21393 } 21394 t := v_0_2.Type 21395 c := v_0_2.AuxInt 21396 v.reset(OpConst32) 21397 v.Type = t 21398 v.AuxInt = c 21399 return true 21400 } 21401 // match: (SliceCap (SliceMake _ _ (SliceCap x))) 21402 // cond: 21403 // result: (SliceCap x) 21404 for { 21405 v_0 := v.Args[0] 21406 if v_0.Op != OpSliceMake { 21407 break 21408 } 21409 _ = v_0.Args[2] 21410 v_0_2 := v_0.Args[2] 21411 if v_0_2.Op != OpSliceCap { 21412 break 21413 } 21414 x := v_0_2.Args[0] 21415 v.reset(OpSliceCap) 21416 v.AddArg(x) 21417 return true 21418 } 21419 // match: (SliceCap (SliceMake _ _ (SliceLen x))) 21420 // cond: 21421 // result: (SliceLen x) 21422 for { 21423 v_0 := v.Args[0] 21424 if v_0.Op != OpSliceMake { 21425 break 21426 } 21427 _ = v_0.Args[2] 21428 v_0_2 := v_0.Args[2] 21429 if v_0_2.Op != OpSliceLen { 21430 break 21431 } 21432 x := v_0_2.Args[0] 21433 v.reset(OpSliceLen) 21434 v.AddArg(x) 21435 return true 21436 } 21437 return false 21438 } 21439 func rewriteValuegeneric_OpSliceLen_0(v *Value) bool { 21440 // match: (SliceLen (SliceMake _ (Const64 <t> [c]) _)) 21441 // cond: 21442 // result: (Const64 <t> [c]) 21443 for { 21444 v_0 := v.Args[0] 21445 if v_0.Op != OpSliceMake { 21446 break 21447 } 21448 _ = v_0.Args[2] 21449 v_0_1 := v_0.Args[1] 21450 if v_0_1.Op != OpConst64 { 21451 break 21452 } 21453 t := v_0_1.Type 21454 c := v_0_1.AuxInt 21455 v.reset(OpConst64) 21456 v.Type = t 21457 v.AuxInt = c 21458 return true 21459 } 21460 // match: (SliceLen (SliceMake _ (Const32 <t> [c]) _)) 21461 // cond: 21462 // result: (Const32 <t> [c]) 21463 for { 21464 v_0 := v.Args[0] 21465 if v_0.Op != OpSliceMake { 21466 break 21467 } 21468 _ = v_0.Args[2] 21469 v_0_1 := v_0.Args[1] 21470 if v_0_1.Op != OpConst32 { 21471 break 21472 } 21473 t := v_0_1.Type 21474 c := v_0_1.AuxInt 21475 v.reset(OpConst32) 21476 v.Type = t 21477 v.AuxInt = c 21478 return true 21479 } 21480 // match: (SliceLen (SliceMake _ (SliceLen x) _)) 21481 // cond: 21482 // result: (SliceLen x) 21483 for { 21484 v_0 := v.Args[0] 21485 if v_0.Op != OpSliceMake { 21486 break 21487 } 21488 _ = v_0.Args[2] 21489 v_0_1 := v_0.Args[1] 21490 if v_0_1.Op != OpSliceLen { 21491 break 21492 } 21493 x := v_0_1.Args[0] 21494 v.reset(OpSliceLen) 21495 v.AddArg(x) 21496 return true 21497 } 21498 return false 21499 } 21500 func rewriteValuegeneric_OpSlicePtr_0(v *Value) bool { 21501 // match: (SlicePtr (SliceMake (SlicePtr x) _ _)) 21502 // cond: 21503 // result: (SlicePtr x) 21504 for { 21505 v_0 := v.Args[0] 21506 if v_0.Op != OpSliceMake { 21507 break 21508 } 21509 _ = v_0.Args[2] 21510 v_0_0 := v_0.Args[0] 21511 if v_0_0.Op != OpSlicePtr { 21512 break 21513 } 21514 x := v_0_0.Args[0] 21515 v.reset(OpSlicePtr) 21516 v.AddArg(x) 21517 return true 21518 } 21519 return false 21520 } 21521 func rewriteValuegeneric_OpSlicemask_0(v *Value) bool { 21522 // match: (Slicemask (Const32 [x])) 21523 // cond: x > 0 21524 // result: (Const32 [-1]) 21525 for { 21526 v_0 := v.Args[0] 21527 if v_0.Op != OpConst32 { 21528 break 21529 } 21530 x := v_0.AuxInt 21531 if !(x > 0) { 21532 break 21533 } 21534 v.reset(OpConst32) 21535 v.AuxInt = -1 21536 return true 21537 } 21538 // match: (Slicemask (Const32 [0])) 21539 // cond: 21540 // result: (Const32 [0]) 21541 for { 21542 v_0 := v.Args[0] 21543 if v_0.Op != OpConst32 { 21544 break 21545 } 21546 if v_0.AuxInt != 0 { 21547 break 21548 } 21549 v.reset(OpConst32) 21550 v.AuxInt = 0 21551 return true 21552 } 21553 // match: (Slicemask (Const64 [x])) 21554 // cond: x > 0 21555 // result: (Const64 [-1]) 21556 for { 21557 v_0 := v.Args[0] 21558 if v_0.Op != OpConst64 { 21559 break 21560 } 21561 x := v_0.AuxInt 21562 if !(x > 0) { 21563 break 21564 } 21565 v.reset(OpConst64) 21566 v.AuxInt = -1 21567 return true 21568 } 21569 // match: (Slicemask (Const64 [0])) 21570 // cond: 21571 // result: (Const64 [0]) 21572 for { 21573 v_0 := v.Args[0] 21574 if v_0.Op != OpConst64 { 21575 break 21576 } 21577 if v_0.AuxInt != 0 { 21578 break 21579 } 21580 v.reset(OpConst64) 21581 v.AuxInt = 0 21582 return true 21583 } 21584 return false 21585 } 21586 func rewriteValuegeneric_OpSqrt_0(v *Value) bool { 21587 // match: (Sqrt (Const64F [c])) 21588 // cond: 21589 // result: (Const64F [f2i(math.Sqrt(i2f(c)))]) 21590 for { 21591 v_0 := v.Args[0] 21592 if v_0.Op != OpConst64F { 21593 break 21594 } 21595 c := v_0.AuxInt 21596 v.reset(OpConst64F) 21597 v.AuxInt = f2i(math.Sqrt(i2f(c))) 21598 return true 21599 } 21600 return false 21601 } 21602 func rewriteValuegeneric_OpStore_0(v *Value) bool { 21603 b := v.Block 21604 _ = b 21605 fe := b.Func.fe 21606 _ = fe 21607 // match: (Store {t1} p1 (Load <t2> p2 mem) mem) 21608 // cond: isSamePtr(p1, p2) && t2.Size() == t1.(*types.Type).Size() 21609 // result: mem 21610 for { 21611 t1 := v.Aux 21612 _ = v.Args[2] 21613 p1 := v.Args[0] 21614 v_1 := v.Args[1] 21615 if v_1.Op != OpLoad { 21616 break 21617 } 21618 t2 := v_1.Type 21619 _ = v_1.Args[1] 21620 p2 := v_1.Args[0] 21621 mem := v_1.Args[1] 21622 if mem != v.Args[2] { 21623 break 21624 } 21625 if !(isSamePtr(p1, p2) && t2.Size() == t1.(*types.Type).Size()) { 21626 break 21627 } 21628 v.reset(OpCopy) 21629 v.Type = mem.Type 21630 v.AddArg(mem) 21631 return true 21632 } 21633 // match: (Store {t1} (OffPtr [o1] p1) (Load <t2> (OffPtr [o1] p2) oldmem) mem:(Store {t3} (OffPtr [o3] p3) _ oldmem)) 21634 // cond: isSamePtr(p1, p2) && isSamePtr(p1, p3) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) 21635 // result: mem 21636 for { 21637 t1 := v.Aux 21638 _ = v.Args[2] 21639 v_0 := v.Args[0] 21640 if v_0.Op != OpOffPtr { 21641 break 21642 } 21643 o1 := v_0.AuxInt 21644 p1 := v_0.Args[0] 21645 v_1 := v.Args[1] 21646 if v_1.Op != OpLoad { 21647 break 21648 } 21649 t2 := v_1.Type 21650 _ = v_1.Args[1] 21651 v_1_0 := v_1.Args[0] 21652 if v_1_0.Op != OpOffPtr { 21653 break 21654 } 21655 if v_1_0.AuxInt != o1 { 21656 break 21657 } 21658 p2 := v_1_0.Args[0] 21659 oldmem := v_1.Args[1] 21660 mem := v.Args[2] 21661 if mem.Op != OpStore { 21662 break 21663 } 21664 t3 := mem.Aux 21665 _ = mem.Args[2] 21666 mem_0 := mem.Args[0] 21667 if mem_0.Op != OpOffPtr { 21668 break 21669 } 21670 o3 := mem_0.AuxInt 21671 p3 := mem_0.Args[0] 21672 if oldmem != mem.Args[2] { 21673 break 21674 } 21675 if !(isSamePtr(p1, p2) && isSamePtr(p1, p3) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size())) { 21676 break 21677 } 21678 v.reset(OpCopy) 21679 v.Type = mem.Type 21680 v.AddArg(mem) 21681 return true 21682 } 21683 // match: (Store {t1} (OffPtr [o1] p1) (Load <t2> (OffPtr [o1] p2) oldmem) mem:(Store {t3} (OffPtr [o3] p3) _ (Store {t4} (OffPtr [o4] p4) _ oldmem))) 21684 // cond: isSamePtr(p1, p2) && isSamePtr(p1, p3) && isSamePtr(p1, p4) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) && !overlap(o1, t2.Size(), o4, t4.(*types.Type).Size()) 21685 // result: mem 21686 for { 21687 t1 := v.Aux 21688 _ = v.Args[2] 21689 v_0 := v.Args[0] 21690 if v_0.Op != OpOffPtr { 21691 break 21692 } 21693 o1 := v_0.AuxInt 21694 p1 := v_0.Args[0] 21695 v_1 := v.Args[1] 21696 if v_1.Op != OpLoad { 21697 break 21698 } 21699 t2 := v_1.Type 21700 _ = v_1.Args[1] 21701 v_1_0 := v_1.Args[0] 21702 if v_1_0.Op != OpOffPtr { 21703 break 21704 } 21705 if v_1_0.AuxInt != o1 { 21706 break 21707 } 21708 p2 := v_1_0.Args[0] 21709 oldmem := v_1.Args[1] 21710 mem := v.Args[2] 21711 if mem.Op != OpStore { 21712 break 21713 } 21714 t3 := mem.Aux 21715 _ = mem.Args[2] 21716 mem_0 := mem.Args[0] 21717 if mem_0.Op != OpOffPtr { 21718 break 21719 } 21720 o3 := mem_0.AuxInt 21721 p3 := mem_0.Args[0] 21722 mem_2 := mem.Args[2] 21723 if mem_2.Op != OpStore { 21724 break 21725 } 21726 t4 := mem_2.Aux 21727 _ = mem_2.Args[2] 21728 mem_2_0 := mem_2.Args[0] 21729 if mem_2_0.Op != OpOffPtr { 21730 break 21731 } 21732 o4 := mem_2_0.AuxInt 21733 p4 := mem_2_0.Args[0] 21734 if oldmem != mem_2.Args[2] { 21735 break 21736 } 21737 if !(isSamePtr(p1, p2) && isSamePtr(p1, p3) && isSamePtr(p1, p4) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) && !overlap(o1, t2.Size(), o4, t4.(*types.Type).Size())) { 21738 break 21739 } 21740 v.reset(OpCopy) 21741 v.Type = mem.Type 21742 v.AddArg(mem) 21743 return true 21744 } 21745 // match: (Store {t1} (OffPtr [o1] p1) (Load <t2> (OffPtr [o1] p2) oldmem) mem:(Store {t3} (OffPtr [o3] p3) _ (Store {t4} (OffPtr [o4] p4) _ (Store {t5} (OffPtr [o5] p5) _ oldmem)))) 21746 // cond: isSamePtr(p1, p2) && isSamePtr(p1, p3) && isSamePtr(p1, p4) && isSamePtr(p1, p5) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) && !overlap(o1, t2.Size(), o4, t4.(*types.Type).Size()) && !overlap(o1, t2.Size(), o5, t5.(*types.Type).Size()) 21747 // result: mem 21748 for { 21749 t1 := v.Aux 21750 _ = v.Args[2] 21751 v_0 := v.Args[0] 21752 if v_0.Op != OpOffPtr { 21753 break 21754 } 21755 o1 := v_0.AuxInt 21756 p1 := v_0.Args[0] 21757 v_1 := v.Args[1] 21758 if v_1.Op != OpLoad { 21759 break 21760 } 21761 t2 := v_1.Type 21762 _ = v_1.Args[1] 21763 v_1_0 := v_1.Args[0] 21764 if v_1_0.Op != OpOffPtr { 21765 break 21766 } 21767 if v_1_0.AuxInt != o1 { 21768 break 21769 } 21770 p2 := v_1_0.Args[0] 21771 oldmem := v_1.Args[1] 21772 mem := v.Args[2] 21773 if mem.Op != OpStore { 21774 break 21775 } 21776 t3 := mem.Aux 21777 _ = mem.Args[2] 21778 mem_0 := mem.Args[0] 21779 if mem_0.Op != OpOffPtr { 21780 break 21781 } 21782 o3 := mem_0.AuxInt 21783 p3 := mem_0.Args[0] 21784 mem_2 := mem.Args[2] 21785 if mem_2.Op != OpStore { 21786 break 21787 } 21788 t4 := mem_2.Aux 21789 _ = mem_2.Args[2] 21790 mem_2_0 := mem_2.Args[0] 21791 if mem_2_0.Op != OpOffPtr { 21792 break 21793 } 21794 o4 := mem_2_0.AuxInt 21795 p4 := mem_2_0.Args[0] 21796 mem_2_2 := mem_2.Args[2] 21797 if mem_2_2.Op != OpStore { 21798 break 21799 } 21800 t5 := mem_2_2.Aux 21801 _ = mem_2_2.Args[2] 21802 mem_2_2_0 := mem_2_2.Args[0] 21803 if mem_2_2_0.Op != OpOffPtr { 21804 break 21805 } 21806 o5 := mem_2_2_0.AuxInt 21807 p5 := mem_2_2_0.Args[0] 21808 if oldmem != mem_2_2.Args[2] { 21809 break 21810 } 21811 if !(isSamePtr(p1, p2) && isSamePtr(p1, p3) && isSamePtr(p1, p4) && isSamePtr(p1, p5) && t2.Size() == t1.(*types.Type).Size() && !overlap(o1, t2.Size(), o3, t3.(*types.Type).Size()) && !overlap(o1, t2.Size(), o4, t4.(*types.Type).Size()) && !overlap(o1, t2.Size(), o5, t5.(*types.Type).Size())) { 21812 break 21813 } 21814 v.reset(OpCopy) 21815 v.Type = mem.Type 21816 v.AddArg(mem) 21817 return true 21818 } 21819 // match: (Store _ (StructMake0) mem) 21820 // cond: 21821 // result: mem 21822 for { 21823 _ = v.Args[2] 21824 v_1 := v.Args[1] 21825 if v_1.Op != OpStructMake0 { 21826 break 21827 } 21828 mem := v.Args[2] 21829 v.reset(OpCopy) 21830 v.Type = mem.Type 21831 v.AddArg(mem) 21832 return true 21833 } 21834 // match: (Store dst (StructMake1 <t> f0) mem) 21835 // cond: 21836 // result: (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem) 21837 for { 21838 _ = v.Args[2] 21839 dst := v.Args[0] 21840 v_1 := v.Args[1] 21841 if v_1.Op != OpStructMake1 { 21842 break 21843 } 21844 t := v_1.Type 21845 f0 := v_1.Args[0] 21846 mem := v.Args[2] 21847 v.reset(OpStore) 21848 v.Aux = t.FieldType(0) 21849 v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) 21850 v0.AuxInt = 0 21851 v0.AddArg(dst) 21852 v.AddArg(v0) 21853 v.AddArg(f0) 21854 v.AddArg(mem) 21855 return true 21856 } 21857 // match: (Store dst (StructMake2 <t> f0 f1) mem) 21858 // cond: 21859 // result: (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)) 21860 for { 21861 _ = v.Args[2] 21862 dst := v.Args[0] 21863 v_1 := v.Args[1] 21864 if v_1.Op != OpStructMake2 { 21865 break 21866 } 21867 t := v_1.Type 21868 _ = v_1.Args[1] 21869 f0 := v_1.Args[0] 21870 f1 := v_1.Args[1] 21871 mem := v.Args[2] 21872 v.reset(OpStore) 21873 v.Aux = t.FieldType(1) 21874 v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo()) 21875 v0.AuxInt = t.FieldOff(1) 21876 v0.AddArg(dst) 21877 v.AddArg(v0) 21878 v.AddArg(f1) 21879 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem) 21880 v1.Aux = t.FieldType(0) 21881 v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) 21882 v2.AuxInt = 0 21883 v2.AddArg(dst) 21884 v1.AddArg(v2) 21885 v1.AddArg(f0) 21886 v1.AddArg(mem) 21887 v.AddArg(v1) 21888 return true 21889 } 21890 // match: (Store dst (StructMake3 <t> f0 f1 f2) mem) 21891 // cond: 21892 // result: (Store {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))) 21893 for { 21894 _ = v.Args[2] 21895 dst := v.Args[0] 21896 v_1 := v.Args[1] 21897 if v_1.Op != OpStructMake3 { 21898 break 21899 } 21900 t := v_1.Type 21901 _ = v_1.Args[2] 21902 f0 := v_1.Args[0] 21903 f1 := v_1.Args[1] 21904 f2 := v_1.Args[2] 21905 mem := v.Args[2] 21906 v.reset(OpStore) 21907 v.Aux = t.FieldType(2) 21908 v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo()) 21909 v0.AuxInt = t.FieldOff(2) 21910 v0.AddArg(dst) 21911 v.AddArg(v0) 21912 v.AddArg(f2) 21913 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem) 21914 v1.Aux = t.FieldType(1) 21915 v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo()) 21916 v2.AuxInt = t.FieldOff(1) 21917 v2.AddArg(dst) 21918 v1.AddArg(v2) 21919 v1.AddArg(f1) 21920 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem) 21921 v3.Aux = t.FieldType(0) 21922 v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) 21923 v4.AuxInt = 0 21924 v4.AddArg(dst) 21925 v3.AddArg(v4) 21926 v3.AddArg(f0) 21927 v3.AddArg(mem) 21928 v1.AddArg(v3) 21929 v.AddArg(v1) 21930 return true 21931 } 21932 // match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) 21933 // cond: 21934 // result: (Store {t.FieldType(3)} (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)))) 21935 for { 21936 _ = v.Args[2] 21937 dst := v.Args[0] 21938 v_1 := v.Args[1] 21939 if v_1.Op != OpStructMake4 { 21940 break 21941 } 21942 t := v_1.Type 21943 _ = v_1.Args[3] 21944 f0 := v_1.Args[0] 21945 f1 := v_1.Args[1] 21946 f2 := v_1.Args[2] 21947 f3 := v_1.Args[3] 21948 mem := v.Args[2] 21949 v.reset(OpStore) 21950 v.Aux = t.FieldType(3) 21951 v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo()) 21952 v0.AuxInt = t.FieldOff(3) 21953 v0.AddArg(dst) 21954 v.AddArg(v0) 21955 v.AddArg(f3) 21956 v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem) 21957 v1.Aux = t.FieldType(2) 21958 v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo()) 21959 v2.AuxInt = t.FieldOff(2) 21960 v2.AddArg(dst) 21961 v1.AddArg(v2) 21962 v1.AddArg(f2) 21963 v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem) 21964 v3.Aux = t.FieldType(1) 21965 v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo()) 21966 v4.AuxInt = t.FieldOff(1) 21967 v4.AddArg(dst) 21968 v3.AddArg(v4) 21969 v3.AddArg(f1) 21970 v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem) 21971 v5.Aux = t.FieldType(0) 21972 v6 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo()) 21973 v6.AuxInt = 0 21974 v6.AddArg(dst) 21975 v5.AddArg(v6) 21976 v5.AddArg(f0) 21977 v5.AddArg(mem) 21978 v3.AddArg(v5) 21979 v1.AddArg(v3) 21980 v.AddArg(v1) 21981 return true 21982 } 21983 // match: (Store {t} dst (Load src mem) mem) 21984 // cond: !fe.CanSSA(t.(*types.Type)) 21985 // result: (Move {t} [t.(*types.Type).Size()] dst src mem) 21986 for { 21987 t := v.Aux 21988 _ = v.Args[2] 21989 dst := v.Args[0] 21990 v_1 := v.Args[1] 21991 if v_1.Op != OpLoad { 21992 break 21993 } 21994 _ = v_1.Args[1] 21995 src := v_1.Args[0] 21996 mem := v_1.Args[1] 21997 if mem != v.Args[2] { 21998 break 21999 } 22000 if !(!fe.CanSSA(t.(*types.Type))) { 22001 break 22002 } 22003 v.reset(OpMove) 22004 v.AuxInt = t.(*types.Type).Size() 22005 v.Aux = t 22006 v.AddArg(dst) 22007 v.AddArg(src) 22008 v.AddArg(mem) 22009 return true 22010 } 22011 return false 22012 } 22013 func rewriteValuegeneric_OpStore_10(v *Value) bool { 22014 b := v.Block 22015 _ = b 22016 config := b.Func.Config 22017 _ = config 22018 fe := b.Func.fe 22019 _ = fe 22020 // match: (Store {t} dst (Load src mem) (VarDef {x} mem)) 22021 // cond: !fe.CanSSA(t.(*types.Type)) 22022 // result: (Move {t} [t.(*types.Type).Size()] dst src (VarDef {x} mem)) 22023 for { 22024 t := v.Aux 22025 _ = v.Args[2] 22026 dst := v.Args[0] 22027 v_1 := v.Args[1] 22028 if v_1.Op != OpLoad { 22029 break 22030 } 22031 _ = v_1.Args[1] 22032 src := v_1.Args[0] 22033 mem := v_1.Args[1] 22034 v_2 := v.Args[2] 22035 if v_2.Op != OpVarDef { 22036 break 22037 } 22038 x := v_2.Aux 22039 if mem != v_2.Args[0] { 22040 break 22041 } 22042 if !(!fe.CanSSA(t.(*types.Type))) { 22043 break 22044 } 22045 v.reset(OpMove) 22046 v.AuxInt = t.(*types.Type).Size() 22047 v.Aux = t 22048 v.AddArg(dst) 22049 v.AddArg(src) 22050 v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem) 22051 v0.Aux = x 22052 v0.AddArg(mem) 22053 v.AddArg(v0) 22054 return true 22055 } 22056 // match: (Store _ (ArrayMake0) mem) 22057 // cond: 22058 // result: mem 22059 for { 22060 _ = v.Args[2] 22061 v_1 := v.Args[1] 22062 if v_1.Op != OpArrayMake0 { 22063 break 22064 } 22065 mem := v.Args[2] 22066 v.reset(OpCopy) 22067 v.Type = mem.Type 22068 v.AddArg(mem) 22069 return true 22070 } 22071 // match: (Store dst (ArrayMake1 e) mem) 22072 // cond: 22073 // result: (Store {e.Type} dst e mem) 22074 for { 22075 _ = v.Args[2] 22076 dst := v.Args[0] 22077 v_1 := v.Args[1] 22078 if v_1.Op != OpArrayMake1 { 22079 break 22080 } 22081 e := v_1.Args[0] 22082 mem := v.Args[2] 22083 v.reset(OpStore) 22084 v.Aux = e.Type 22085 v.AddArg(dst) 22086 v.AddArg(e) 22087 v.AddArg(mem) 22088 return true 22089 } 22090 // match: (Store (Load (OffPtr [c] (SP)) mem) x mem) 22091 // cond: isConstZero(x) && mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize 22092 // result: mem 22093 for { 22094 _ = v.Args[2] 22095 v_0 := v.Args[0] 22096 if v_0.Op != OpLoad { 22097 break 22098 } 22099 _ = v_0.Args[1] 22100 v_0_0 := v_0.Args[0] 22101 if v_0_0.Op != OpOffPtr { 22102 break 22103 } 22104 c := v_0_0.AuxInt 22105 v_0_0_0 := v_0_0.Args[0] 22106 if v_0_0_0.Op != OpSP { 22107 break 22108 } 22109 mem := v_0.Args[1] 22110 x := v.Args[1] 22111 if mem != v.Args[2] { 22112 break 22113 } 22114 if !(isConstZero(x) && mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize) { 22115 break 22116 } 22117 v.reset(OpCopy) 22118 v.Type = mem.Type 22119 v.AddArg(mem) 22120 return true 22121 } 22122 // match: (Store (OffPtr (Load (OffPtr [c] (SP)) mem)) x mem) 22123 // cond: isConstZero(x) && mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize 22124 // result: mem 22125 for { 22126 _ = v.Args[2] 22127 v_0 := v.Args[0] 22128 if v_0.Op != OpOffPtr { 22129 break 22130 } 22131 v_0_0 := v_0.Args[0] 22132 if v_0_0.Op != OpLoad { 22133 break 22134 } 22135 _ = v_0_0.Args[1] 22136 v_0_0_0 := v_0_0.Args[0] 22137 if v_0_0_0.Op != OpOffPtr { 22138 break 22139 } 22140 c := v_0_0_0.AuxInt 22141 v_0_0_0_0 := v_0_0_0.Args[0] 22142 if v_0_0_0_0.Op != OpSP { 22143 break 22144 } 22145 mem := v_0_0.Args[1] 22146 x := v.Args[1] 22147 if mem != v.Args[2] { 22148 break 22149 } 22150 if !(isConstZero(x) && mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize) { 22151 break 22152 } 22153 v.reset(OpCopy) 22154 v.Type = mem.Type 22155 v.AddArg(mem) 22156 return true 22157 } 22158 return false 22159 } 22160 func rewriteValuegeneric_OpStringLen_0(v *Value) bool { 22161 // match: (StringLen (StringMake _ (Const64 <t> [c]))) 22162 // cond: 22163 // result: (Const64 <t> [c]) 22164 for { 22165 v_0 := v.Args[0] 22166 if v_0.Op != OpStringMake { 22167 break 22168 } 22169 _ = v_0.Args[1] 22170 v_0_1 := v_0.Args[1] 22171 if v_0_1.Op != OpConst64 { 22172 break 22173 } 22174 t := v_0_1.Type 22175 c := v_0_1.AuxInt 22176 v.reset(OpConst64) 22177 v.Type = t 22178 v.AuxInt = c 22179 return true 22180 } 22181 return false 22182 } 22183 func rewriteValuegeneric_OpStringPtr_0(v *Value) bool { 22184 // match: (StringPtr (StringMake (Const64 <t> [c]) _)) 22185 // cond: 22186 // result: (Const64 <t> [c]) 22187 for { 22188 v_0 := v.Args[0] 22189 if v_0.Op != OpStringMake { 22190 break 22191 } 22192 _ = v_0.Args[1] 22193 v_0_0 := v_0.Args[0] 22194 if v_0_0.Op != OpConst64 { 22195 break 22196 } 22197 t := v_0_0.Type 22198 c := v_0_0.AuxInt 22199 v.reset(OpConst64) 22200 v.Type = t 22201 v.AuxInt = c 22202 return true 22203 } 22204 return false 22205 } 22206 func rewriteValuegeneric_OpStructSelect_0(v *Value) bool { 22207 // match: (StructSelect (StructMake1 x)) 22208 // cond: 22209 // result: x 22210 for { 22211 v_0 := v.Args[0] 22212 if v_0.Op != OpStructMake1 { 22213 break 22214 } 22215 x := v_0.Args[0] 22216 v.reset(OpCopy) 22217 v.Type = x.Type 22218 v.AddArg(x) 22219 return true 22220 } 22221 // match: (StructSelect [0] (StructMake2 x _)) 22222 // cond: 22223 // result: x 22224 for { 22225 if v.AuxInt != 0 { 22226 break 22227 } 22228 v_0 := v.Args[0] 22229 if v_0.Op != OpStructMake2 { 22230 break 22231 } 22232 _ = v_0.Args[1] 22233 x := v_0.Args[0] 22234 v.reset(OpCopy) 22235 v.Type = x.Type 22236 v.AddArg(x) 22237 return true 22238 } 22239 // match: (StructSelect [1] (StructMake2 _ x)) 22240 // cond: 22241 // result: x 22242 for { 22243 if v.AuxInt != 1 { 22244 break 22245 } 22246 v_0 := v.Args[0] 22247 if v_0.Op != OpStructMake2 { 22248 break 22249 } 22250 _ = v_0.Args[1] 22251 x := v_0.Args[1] 22252 v.reset(OpCopy) 22253 v.Type = x.Type 22254 v.AddArg(x) 22255 return true 22256 } 22257 // match: (StructSelect [0] (StructMake3 x _ _)) 22258 // cond: 22259 // result: x 22260 for { 22261 if v.AuxInt != 0 { 22262 break 22263 } 22264 v_0 := v.Args[0] 22265 if v_0.Op != OpStructMake3 { 22266 break 22267 } 22268 _ = v_0.Args[2] 22269 x := v_0.Args[0] 22270 v.reset(OpCopy) 22271 v.Type = x.Type 22272 v.AddArg(x) 22273 return true 22274 } 22275 // match: (StructSelect [1] (StructMake3 _ x _)) 22276 // cond: 22277 // result: x 22278 for { 22279 if v.AuxInt != 1 { 22280 break 22281 } 22282 v_0 := v.Args[0] 22283 if v_0.Op != OpStructMake3 { 22284 break 22285 } 22286 _ = v_0.Args[2] 22287 x := v_0.Args[1] 22288 v.reset(OpCopy) 22289 v.Type = x.Type 22290 v.AddArg(x) 22291 return true 22292 } 22293 // match: (StructSelect [2] (StructMake3 _ _ x)) 22294 // cond: 22295 // result: x 22296 for { 22297 if v.AuxInt != 2 { 22298 break 22299 } 22300 v_0 := v.Args[0] 22301 if v_0.Op != OpStructMake3 { 22302 break 22303 } 22304 _ = v_0.Args[2] 22305 x := v_0.Args[2] 22306 v.reset(OpCopy) 22307 v.Type = x.Type 22308 v.AddArg(x) 22309 return true 22310 } 22311 // match: (StructSelect [0] (StructMake4 x _ _ _)) 22312 // cond: 22313 // result: x 22314 for { 22315 if v.AuxInt != 0 { 22316 break 22317 } 22318 v_0 := v.Args[0] 22319 if v_0.Op != OpStructMake4 { 22320 break 22321 } 22322 _ = v_0.Args[3] 22323 x := v_0.Args[0] 22324 v.reset(OpCopy) 22325 v.Type = x.Type 22326 v.AddArg(x) 22327 return true 22328 } 22329 // match: (StructSelect [1] (StructMake4 _ x _ _)) 22330 // cond: 22331 // result: x 22332 for { 22333 if v.AuxInt != 1 { 22334 break 22335 } 22336 v_0 := v.Args[0] 22337 if v_0.Op != OpStructMake4 { 22338 break 22339 } 22340 _ = v_0.Args[3] 22341 x := v_0.Args[1] 22342 v.reset(OpCopy) 22343 v.Type = x.Type 22344 v.AddArg(x) 22345 return true 22346 } 22347 // match: (StructSelect [2] (StructMake4 _ _ x _)) 22348 // cond: 22349 // result: x 22350 for { 22351 if v.AuxInt != 2 { 22352 break 22353 } 22354 v_0 := v.Args[0] 22355 if v_0.Op != OpStructMake4 { 22356 break 22357 } 22358 _ = v_0.Args[3] 22359 x := v_0.Args[2] 22360 v.reset(OpCopy) 22361 v.Type = x.Type 22362 v.AddArg(x) 22363 return true 22364 } 22365 // match: (StructSelect [3] (StructMake4 _ _ _ x)) 22366 // cond: 22367 // result: x 22368 for { 22369 if v.AuxInt != 3 { 22370 break 22371 } 22372 v_0 := v.Args[0] 22373 if v_0.Op != OpStructMake4 { 22374 break 22375 } 22376 _ = v_0.Args[3] 22377 x := v_0.Args[3] 22378 v.reset(OpCopy) 22379 v.Type = x.Type 22380 v.AddArg(x) 22381 return true 22382 } 22383 return false 22384 } 22385 func rewriteValuegeneric_OpStructSelect_10(v *Value) bool { 22386 b := v.Block 22387 _ = b 22388 fe := b.Func.fe 22389 _ = fe 22390 // match: (StructSelect [i] x:(Load <t> ptr mem)) 22391 // cond: !fe.CanSSA(t) 22392 // result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem) 22393 for { 22394 i := v.AuxInt 22395 x := v.Args[0] 22396 if x.Op != OpLoad { 22397 break 22398 } 22399 t := x.Type 22400 _ = x.Args[1] 22401 ptr := x.Args[0] 22402 mem := x.Args[1] 22403 if !(!fe.CanSSA(t)) { 22404 break 22405 } 22406 b = x.Block 22407 v0 := b.NewValue0(v.Pos, OpLoad, v.Type) 22408 v.reset(OpCopy) 22409 v.AddArg(v0) 22410 v1 := b.NewValue0(v.Pos, OpOffPtr, v.Type.PtrTo()) 22411 v1.AuxInt = t.FieldOff(int(i)) 22412 v1.AddArg(ptr) 22413 v0.AddArg(v1) 22414 v0.AddArg(mem) 22415 return true 22416 } 22417 // match: (StructSelect [0] x:(IData _)) 22418 // cond: 22419 // result: x 22420 for { 22421 if v.AuxInt != 0 { 22422 break 22423 } 22424 x := v.Args[0] 22425 if x.Op != OpIData { 22426 break 22427 } 22428 v.reset(OpCopy) 22429 v.Type = x.Type 22430 v.AddArg(x) 22431 return true 22432 } 22433 return false 22434 } 22435 func rewriteValuegeneric_OpSub16_0(v *Value) bool { 22436 b := v.Block 22437 _ = b 22438 // match: (Sub16 (Const16 [c]) (Const16 [d])) 22439 // cond: 22440 // result: (Const16 [int64(int16(c-d))]) 22441 for { 22442 _ = v.Args[1] 22443 v_0 := v.Args[0] 22444 if v_0.Op != OpConst16 { 22445 break 22446 } 22447 c := v_0.AuxInt 22448 v_1 := v.Args[1] 22449 if v_1.Op != OpConst16 { 22450 break 22451 } 22452 d := v_1.AuxInt 22453 v.reset(OpConst16) 22454 v.AuxInt = int64(int16(c - d)) 22455 return true 22456 } 22457 // match: (Sub16 x (Const16 <t> [c])) 22458 // cond: x.Op != OpConst16 22459 // result: (Add16 (Const16 <t> [int64(int16(-c))]) x) 22460 for { 22461 _ = v.Args[1] 22462 x := v.Args[0] 22463 v_1 := v.Args[1] 22464 if v_1.Op != OpConst16 { 22465 break 22466 } 22467 t := v_1.Type 22468 c := v_1.AuxInt 22469 if !(x.Op != OpConst16) { 22470 break 22471 } 22472 v.reset(OpAdd16) 22473 v0 := b.NewValue0(v.Pos, OpConst16, t) 22474 v0.AuxInt = int64(int16(-c)) 22475 v.AddArg(v0) 22476 v.AddArg(x) 22477 return true 22478 } 22479 // match: (Sub16 x x) 22480 // cond: 22481 // result: (Const16 [0]) 22482 for { 22483 _ = v.Args[1] 22484 x := v.Args[0] 22485 if x != v.Args[1] { 22486 break 22487 } 22488 v.reset(OpConst16) 22489 v.AuxInt = 0 22490 return true 22491 } 22492 // match: (Sub16 (Add16 x y) x) 22493 // cond: 22494 // result: y 22495 for { 22496 _ = v.Args[1] 22497 v_0 := v.Args[0] 22498 if v_0.Op != OpAdd16 { 22499 break 22500 } 22501 _ = v_0.Args[1] 22502 x := v_0.Args[0] 22503 y := v_0.Args[1] 22504 if x != v.Args[1] { 22505 break 22506 } 22507 v.reset(OpCopy) 22508 v.Type = y.Type 22509 v.AddArg(y) 22510 return true 22511 } 22512 // match: (Sub16 (Add16 y x) x) 22513 // cond: 22514 // result: y 22515 for { 22516 _ = v.Args[1] 22517 v_0 := v.Args[0] 22518 if v_0.Op != OpAdd16 { 22519 break 22520 } 22521 _ = v_0.Args[1] 22522 y := v_0.Args[0] 22523 x := v_0.Args[1] 22524 if x != v.Args[1] { 22525 break 22526 } 22527 v.reset(OpCopy) 22528 v.Type = y.Type 22529 v.AddArg(y) 22530 return true 22531 } 22532 // match: (Sub16 (Add16 x y) y) 22533 // cond: 22534 // result: x 22535 for { 22536 _ = v.Args[1] 22537 v_0 := v.Args[0] 22538 if v_0.Op != OpAdd16 { 22539 break 22540 } 22541 _ = v_0.Args[1] 22542 x := v_0.Args[0] 22543 y := v_0.Args[1] 22544 if y != v.Args[1] { 22545 break 22546 } 22547 v.reset(OpCopy) 22548 v.Type = x.Type 22549 v.AddArg(x) 22550 return true 22551 } 22552 // match: (Sub16 (Add16 y x) y) 22553 // cond: 22554 // result: x 22555 for { 22556 _ = v.Args[1] 22557 v_0 := v.Args[0] 22558 if v_0.Op != OpAdd16 { 22559 break 22560 } 22561 _ = v_0.Args[1] 22562 y := v_0.Args[0] 22563 x := v_0.Args[1] 22564 if y != v.Args[1] { 22565 break 22566 } 22567 v.reset(OpCopy) 22568 v.Type = x.Type 22569 v.AddArg(x) 22570 return true 22571 } 22572 // match: (Sub16 x (Sub16 i:(Const16 <t>) z)) 22573 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 22574 // result: (Sub16 (Add16 <t> x z) i) 22575 for { 22576 _ = v.Args[1] 22577 x := v.Args[0] 22578 v_1 := v.Args[1] 22579 if v_1.Op != OpSub16 { 22580 break 22581 } 22582 _ = v_1.Args[1] 22583 i := v_1.Args[0] 22584 if i.Op != OpConst16 { 22585 break 22586 } 22587 t := i.Type 22588 z := v_1.Args[1] 22589 if !(z.Op != OpConst16 && x.Op != OpConst16) { 22590 break 22591 } 22592 v.reset(OpSub16) 22593 v0 := b.NewValue0(v.Pos, OpAdd16, t) 22594 v0.AddArg(x) 22595 v0.AddArg(z) 22596 v.AddArg(v0) 22597 v.AddArg(i) 22598 return true 22599 } 22600 // match: (Sub16 x (Sub16 z i:(Const16 <t>))) 22601 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 22602 // result: (Add16 i (Sub16 <t> x z)) 22603 for { 22604 _ = v.Args[1] 22605 x := v.Args[0] 22606 v_1 := v.Args[1] 22607 if v_1.Op != OpSub16 { 22608 break 22609 } 22610 _ = v_1.Args[1] 22611 z := v_1.Args[0] 22612 i := v_1.Args[1] 22613 if i.Op != OpConst16 { 22614 break 22615 } 22616 t := i.Type 22617 if !(z.Op != OpConst16 && x.Op != OpConst16) { 22618 break 22619 } 22620 v.reset(OpAdd16) 22621 v.AddArg(i) 22622 v0 := b.NewValue0(v.Pos, OpSub16, t) 22623 v0.AddArg(x) 22624 v0.AddArg(z) 22625 v.AddArg(v0) 22626 return true 22627 } 22628 // match: (Sub16 (Const16 <t> [c]) (Sub16 x (Const16 <t> [d]))) 22629 // cond: 22630 // result: (Sub16 (Const16 <t> [int64(int16(c+d))]) x) 22631 for { 22632 _ = v.Args[1] 22633 v_0 := v.Args[0] 22634 if v_0.Op != OpConst16 { 22635 break 22636 } 22637 t := v_0.Type 22638 c := v_0.AuxInt 22639 v_1 := v.Args[1] 22640 if v_1.Op != OpSub16 { 22641 break 22642 } 22643 _ = v_1.Args[1] 22644 x := v_1.Args[0] 22645 v_1_1 := v_1.Args[1] 22646 if v_1_1.Op != OpConst16 { 22647 break 22648 } 22649 if v_1_1.Type != t { 22650 break 22651 } 22652 d := v_1_1.AuxInt 22653 v.reset(OpSub16) 22654 v0 := b.NewValue0(v.Pos, OpConst16, t) 22655 v0.AuxInt = int64(int16(c + d)) 22656 v.AddArg(v0) 22657 v.AddArg(x) 22658 return true 22659 } 22660 return false 22661 } 22662 func rewriteValuegeneric_OpSub16_10(v *Value) bool { 22663 b := v.Block 22664 _ = b 22665 // match: (Sub16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x)) 22666 // cond: 22667 // result: (Add16 (Const16 <t> [int64(int16(c-d))]) x) 22668 for { 22669 _ = v.Args[1] 22670 v_0 := v.Args[0] 22671 if v_0.Op != OpConst16 { 22672 break 22673 } 22674 t := v_0.Type 22675 c := v_0.AuxInt 22676 v_1 := v.Args[1] 22677 if v_1.Op != OpSub16 { 22678 break 22679 } 22680 _ = v_1.Args[1] 22681 v_1_0 := v_1.Args[0] 22682 if v_1_0.Op != OpConst16 { 22683 break 22684 } 22685 if v_1_0.Type != t { 22686 break 22687 } 22688 d := v_1_0.AuxInt 22689 x := v_1.Args[1] 22690 v.reset(OpAdd16) 22691 v0 := b.NewValue0(v.Pos, OpConst16, t) 22692 v0.AuxInt = int64(int16(c - d)) 22693 v.AddArg(v0) 22694 v.AddArg(x) 22695 return true 22696 } 22697 return false 22698 } 22699 func rewriteValuegeneric_OpSub32_0(v *Value) bool { 22700 b := v.Block 22701 _ = b 22702 // match: (Sub32 (Const32 [c]) (Const32 [d])) 22703 // cond: 22704 // result: (Const32 [int64(int32(c-d))]) 22705 for { 22706 _ = v.Args[1] 22707 v_0 := v.Args[0] 22708 if v_0.Op != OpConst32 { 22709 break 22710 } 22711 c := v_0.AuxInt 22712 v_1 := v.Args[1] 22713 if v_1.Op != OpConst32 { 22714 break 22715 } 22716 d := v_1.AuxInt 22717 v.reset(OpConst32) 22718 v.AuxInt = int64(int32(c - d)) 22719 return true 22720 } 22721 // match: (Sub32 x (Const32 <t> [c])) 22722 // cond: x.Op != OpConst32 22723 // result: (Add32 (Const32 <t> [int64(int32(-c))]) x) 22724 for { 22725 _ = v.Args[1] 22726 x := v.Args[0] 22727 v_1 := v.Args[1] 22728 if v_1.Op != OpConst32 { 22729 break 22730 } 22731 t := v_1.Type 22732 c := v_1.AuxInt 22733 if !(x.Op != OpConst32) { 22734 break 22735 } 22736 v.reset(OpAdd32) 22737 v0 := b.NewValue0(v.Pos, OpConst32, t) 22738 v0.AuxInt = int64(int32(-c)) 22739 v.AddArg(v0) 22740 v.AddArg(x) 22741 return true 22742 } 22743 // match: (Sub32 x x) 22744 // cond: 22745 // result: (Const32 [0]) 22746 for { 22747 _ = v.Args[1] 22748 x := v.Args[0] 22749 if x != v.Args[1] { 22750 break 22751 } 22752 v.reset(OpConst32) 22753 v.AuxInt = 0 22754 return true 22755 } 22756 // match: (Sub32 (Add32 x y) x) 22757 // cond: 22758 // result: y 22759 for { 22760 _ = v.Args[1] 22761 v_0 := v.Args[0] 22762 if v_0.Op != OpAdd32 { 22763 break 22764 } 22765 _ = v_0.Args[1] 22766 x := v_0.Args[0] 22767 y := v_0.Args[1] 22768 if x != v.Args[1] { 22769 break 22770 } 22771 v.reset(OpCopy) 22772 v.Type = y.Type 22773 v.AddArg(y) 22774 return true 22775 } 22776 // match: (Sub32 (Add32 y x) x) 22777 // cond: 22778 // result: y 22779 for { 22780 _ = v.Args[1] 22781 v_0 := v.Args[0] 22782 if v_0.Op != OpAdd32 { 22783 break 22784 } 22785 _ = v_0.Args[1] 22786 y := v_0.Args[0] 22787 x := v_0.Args[1] 22788 if x != v.Args[1] { 22789 break 22790 } 22791 v.reset(OpCopy) 22792 v.Type = y.Type 22793 v.AddArg(y) 22794 return true 22795 } 22796 // match: (Sub32 (Add32 x y) y) 22797 // cond: 22798 // result: x 22799 for { 22800 _ = v.Args[1] 22801 v_0 := v.Args[0] 22802 if v_0.Op != OpAdd32 { 22803 break 22804 } 22805 _ = v_0.Args[1] 22806 x := v_0.Args[0] 22807 y := v_0.Args[1] 22808 if y != v.Args[1] { 22809 break 22810 } 22811 v.reset(OpCopy) 22812 v.Type = x.Type 22813 v.AddArg(x) 22814 return true 22815 } 22816 // match: (Sub32 (Add32 y x) y) 22817 // cond: 22818 // result: x 22819 for { 22820 _ = v.Args[1] 22821 v_0 := v.Args[0] 22822 if v_0.Op != OpAdd32 { 22823 break 22824 } 22825 _ = v_0.Args[1] 22826 y := v_0.Args[0] 22827 x := v_0.Args[1] 22828 if y != v.Args[1] { 22829 break 22830 } 22831 v.reset(OpCopy) 22832 v.Type = x.Type 22833 v.AddArg(x) 22834 return true 22835 } 22836 // match: (Sub32 x (Sub32 i:(Const32 <t>) z)) 22837 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 22838 // result: (Sub32 (Add32 <t> x z) i) 22839 for { 22840 _ = v.Args[1] 22841 x := v.Args[0] 22842 v_1 := v.Args[1] 22843 if v_1.Op != OpSub32 { 22844 break 22845 } 22846 _ = v_1.Args[1] 22847 i := v_1.Args[0] 22848 if i.Op != OpConst32 { 22849 break 22850 } 22851 t := i.Type 22852 z := v_1.Args[1] 22853 if !(z.Op != OpConst32 && x.Op != OpConst32) { 22854 break 22855 } 22856 v.reset(OpSub32) 22857 v0 := b.NewValue0(v.Pos, OpAdd32, t) 22858 v0.AddArg(x) 22859 v0.AddArg(z) 22860 v.AddArg(v0) 22861 v.AddArg(i) 22862 return true 22863 } 22864 // match: (Sub32 x (Sub32 z i:(Const32 <t>))) 22865 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 22866 // result: (Add32 i (Sub32 <t> x z)) 22867 for { 22868 _ = v.Args[1] 22869 x := v.Args[0] 22870 v_1 := v.Args[1] 22871 if v_1.Op != OpSub32 { 22872 break 22873 } 22874 _ = v_1.Args[1] 22875 z := v_1.Args[0] 22876 i := v_1.Args[1] 22877 if i.Op != OpConst32 { 22878 break 22879 } 22880 t := i.Type 22881 if !(z.Op != OpConst32 && x.Op != OpConst32) { 22882 break 22883 } 22884 v.reset(OpAdd32) 22885 v.AddArg(i) 22886 v0 := b.NewValue0(v.Pos, OpSub32, t) 22887 v0.AddArg(x) 22888 v0.AddArg(z) 22889 v.AddArg(v0) 22890 return true 22891 } 22892 // match: (Sub32 (Const32 <t> [c]) (Sub32 x (Const32 <t> [d]))) 22893 // cond: 22894 // result: (Sub32 (Const32 <t> [int64(int32(c+d))]) x) 22895 for { 22896 _ = v.Args[1] 22897 v_0 := v.Args[0] 22898 if v_0.Op != OpConst32 { 22899 break 22900 } 22901 t := v_0.Type 22902 c := v_0.AuxInt 22903 v_1 := v.Args[1] 22904 if v_1.Op != OpSub32 { 22905 break 22906 } 22907 _ = v_1.Args[1] 22908 x := v_1.Args[0] 22909 v_1_1 := v_1.Args[1] 22910 if v_1_1.Op != OpConst32 { 22911 break 22912 } 22913 if v_1_1.Type != t { 22914 break 22915 } 22916 d := v_1_1.AuxInt 22917 v.reset(OpSub32) 22918 v0 := b.NewValue0(v.Pos, OpConst32, t) 22919 v0.AuxInt = int64(int32(c + d)) 22920 v.AddArg(v0) 22921 v.AddArg(x) 22922 return true 22923 } 22924 return false 22925 } 22926 func rewriteValuegeneric_OpSub32_10(v *Value) bool { 22927 b := v.Block 22928 _ = b 22929 // match: (Sub32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x)) 22930 // cond: 22931 // result: (Add32 (Const32 <t> [int64(int32(c-d))]) x) 22932 for { 22933 _ = v.Args[1] 22934 v_0 := v.Args[0] 22935 if v_0.Op != OpConst32 { 22936 break 22937 } 22938 t := v_0.Type 22939 c := v_0.AuxInt 22940 v_1 := v.Args[1] 22941 if v_1.Op != OpSub32 { 22942 break 22943 } 22944 _ = v_1.Args[1] 22945 v_1_0 := v_1.Args[0] 22946 if v_1_0.Op != OpConst32 { 22947 break 22948 } 22949 if v_1_0.Type != t { 22950 break 22951 } 22952 d := v_1_0.AuxInt 22953 x := v_1.Args[1] 22954 v.reset(OpAdd32) 22955 v0 := b.NewValue0(v.Pos, OpConst32, t) 22956 v0.AuxInt = int64(int32(c - d)) 22957 v.AddArg(v0) 22958 v.AddArg(x) 22959 return true 22960 } 22961 return false 22962 } 22963 func rewriteValuegeneric_OpSub32F_0(v *Value) bool { 22964 // match: (Sub32F (Const32F [c]) (Const32F [d])) 22965 // cond: 22966 // result: (Const32F [f2i(float64(i2f32(c) - i2f32(d)))]) 22967 for { 22968 _ = v.Args[1] 22969 v_0 := v.Args[0] 22970 if v_0.Op != OpConst32F { 22971 break 22972 } 22973 c := v_0.AuxInt 22974 v_1 := v.Args[1] 22975 if v_1.Op != OpConst32F { 22976 break 22977 } 22978 d := v_1.AuxInt 22979 v.reset(OpConst32F) 22980 v.AuxInt = f2i(float64(i2f32(c) - i2f32(d))) 22981 return true 22982 } 22983 // match: (Sub32F x (Const32F [0])) 22984 // cond: 22985 // result: x 22986 for { 22987 _ = v.Args[1] 22988 x := v.Args[0] 22989 v_1 := v.Args[1] 22990 if v_1.Op != OpConst32F { 22991 break 22992 } 22993 if v_1.AuxInt != 0 { 22994 break 22995 } 22996 v.reset(OpCopy) 22997 v.Type = x.Type 22998 v.AddArg(x) 22999 return true 23000 } 23001 return false 23002 } 23003 func rewriteValuegeneric_OpSub64_0(v *Value) bool { 23004 b := v.Block 23005 _ = b 23006 // match: (Sub64 (Const64 [c]) (Const64 [d])) 23007 // cond: 23008 // result: (Const64 [c-d]) 23009 for { 23010 _ = v.Args[1] 23011 v_0 := v.Args[0] 23012 if v_0.Op != OpConst64 { 23013 break 23014 } 23015 c := v_0.AuxInt 23016 v_1 := v.Args[1] 23017 if v_1.Op != OpConst64 { 23018 break 23019 } 23020 d := v_1.AuxInt 23021 v.reset(OpConst64) 23022 v.AuxInt = c - d 23023 return true 23024 } 23025 // match: (Sub64 x (Const64 <t> [c])) 23026 // cond: x.Op != OpConst64 23027 // result: (Add64 (Const64 <t> [-c]) x) 23028 for { 23029 _ = v.Args[1] 23030 x := v.Args[0] 23031 v_1 := v.Args[1] 23032 if v_1.Op != OpConst64 { 23033 break 23034 } 23035 t := v_1.Type 23036 c := v_1.AuxInt 23037 if !(x.Op != OpConst64) { 23038 break 23039 } 23040 v.reset(OpAdd64) 23041 v0 := b.NewValue0(v.Pos, OpConst64, t) 23042 v0.AuxInt = -c 23043 v.AddArg(v0) 23044 v.AddArg(x) 23045 return true 23046 } 23047 // match: (Sub64 x x) 23048 // cond: 23049 // result: (Const64 [0]) 23050 for { 23051 _ = v.Args[1] 23052 x := v.Args[0] 23053 if x != v.Args[1] { 23054 break 23055 } 23056 v.reset(OpConst64) 23057 v.AuxInt = 0 23058 return true 23059 } 23060 // match: (Sub64 (Add64 x y) x) 23061 // cond: 23062 // result: y 23063 for { 23064 _ = v.Args[1] 23065 v_0 := v.Args[0] 23066 if v_0.Op != OpAdd64 { 23067 break 23068 } 23069 _ = v_0.Args[1] 23070 x := v_0.Args[0] 23071 y := v_0.Args[1] 23072 if x != v.Args[1] { 23073 break 23074 } 23075 v.reset(OpCopy) 23076 v.Type = y.Type 23077 v.AddArg(y) 23078 return true 23079 } 23080 // match: (Sub64 (Add64 y x) x) 23081 // cond: 23082 // result: y 23083 for { 23084 _ = v.Args[1] 23085 v_0 := v.Args[0] 23086 if v_0.Op != OpAdd64 { 23087 break 23088 } 23089 _ = v_0.Args[1] 23090 y := v_0.Args[0] 23091 x := v_0.Args[1] 23092 if x != v.Args[1] { 23093 break 23094 } 23095 v.reset(OpCopy) 23096 v.Type = y.Type 23097 v.AddArg(y) 23098 return true 23099 } 23100 // match: (Sub64 (Add64 x y) y) 23101 // cond: 23102 // result: x 23103 for { 23104 _ = v.Args[1] 23105 v_0 := v.Args[0] 23106 if v_0.Op != OpAdd64 { 23107 break 23108 } 23109 _ = v_0.Args[1] 23110 x := v_0.Args[0] 23111 y := v_0.Args[1] 23112 if y != v.Args[1] { 23113 break 23114 } 23115 v.reset(OpCopy) 23116 v.Type = x.Type 23117 v.AddArg(x) 23118 return true 23119 } 23120 // match: (Sub64 (Add64 y x) y) 23121 // cond: 23122 // result: x 23123 for { 23124 _ = v.Args[1] 23125 v_0 := v.Args[0] 23126 if v_0.Op != OpAdd64 { 23127 break 23128 } 23129 _ = v_0.Args[1] 23130 y := v_0.Args[0] 23131 x := v_0.Args[1] 23132 if y != v.Args[1] { 23133 break 23134 } 23135 v.reset(OpCopy) 23136 v.Type = x.Type 23137 v.AddArg(x) 23138 return true 23139 } 23140 // match: (Sub64 x (Sub64 i:(Const64 <t>) z)) 23141 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 23142 // result: (Sub64 (Add64 <t> x z) i) 23143 for { 23144 _ = v.Args[1] 23145 x := v.Args[0] 23146 v_1 := v.Args[1] 23147 if v_1.Op != OpSub64 { 23148 break 23149 } 23150 _ = v_1.Args[1] 23151 i := v_1.Args[0] 23152 if i.Op != OpConst64 { 23153 break 23154 } 23155 t := i.Type 23156 z := v_1.Args[1] 23157 if !(z.Op != OpConst64 && x.Op != OpConst64) { 23158 break 23159 } 23160 v.reset(OpSub64) 23161 v0 := b.NewValue0(v.Pos, OpAdd64, t) 23162 v0.AddArg(x) 23163 v0.AddArg(z) 23164 v.AddArg(v0) 23165 v.AddArg(i) 23166 return true 23167 } 23168 // match: (Sub64 x (Sub64 z i:(Const64 <t>))) 23169 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 23170 // result: (Add64 i (Sub64 <t> x z)) 23171 for { 23172 _ = v.Args[1] 23173 x := v.Args[0] 23174 v_1 := v.Args[1] 23175 if v_1.Op != OpSub64 { 23176 break 23177 } 23178 _ = v_1.Args[1] 23179 z := v_1.Args[0] 23180 i := v_1.Args[1] 23181 if i.Op != OpConst64 { 23182 break 23183 } 23184 t := i.Type 23185 if !(z.Op != OpConst64 && x.Op != OpConst64) { 23186 break 23187 } 23188 v.reset(OpAdd64) 23189 v.AddArg(i) 23190 v0 := b.NewValue0(v.Pos, OpSub64, t) 23191 v0.AddArg(x) 23192 v0.AddArg(z) 23193 v.AddArg(v0) 23194 return true 23195 } 23196 // match: (Sub64 (Const64 <t> [c]) (Sub64 x (Const64 <t> [d]))) 23197 // cond: 23198 // result: (Sub64 (Const64 <t> [c+d]) x) 23199 for { 23200 _ = v.Args[1] 23201 v_0 := v.Args[0] 23202 if v_0.Op != OpConst64 { 23203 break 23204 } 23205 t := v_0.Type 23206 c := v_0.AuxInt 23207 v_1 := v.Args[1] 23208 if v_1.Op != OpSub64 { 23209 break 23210 } 23211 _ = v_1.Args[1] 23212 x := v_1.Args[0] 23213 v_1_1 := v_1.Args[1] 23214 if v_1_1.Op != OpConst64 { 23215 break 23216 } 23217 if v_1_1.Type != t { 23218 break 23219 } 23220 d := v_1_1.AuxInt 23221 v.reset(OpSub64) 23222 v0 := b.NewValue0(v.Pos, OpConst64, t) 23223 v0.AuxInt = c + d 23224 v.AddArg(v0) 23225 v.AddArg(x) 23226 return true 23227 } 23228 return false 23229 } 23230 func rewriteValuegeneric_OpSub64_10(v *Value) bool { 23231 b := v.Block 23232 _ = b 23233 // match: (Sub64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x)) 23234 // cond: 23235 // result: (Add64 (Const64 <t> [c-d]) x) 23236 for { 23237 _ = v.Args[1] 23238 v_0 := v.Args[0] 23239 if v_0.Op != OpConst64 { 23240 break 23241 } 23242 t := v_0.Type 23243 c := v_0.AuxInt 23244 v_1 := v.Args[1] 23245 if v_1.Op != OpSub64 { 23246 break 23247 } 23248 _ = v_1.Args[1] 23249 v_1_0 := v_1.Args[0] 23250 if v_1_0.Op != OpConst64 { 23251 break 23252 } 23253 if v_1_0.Type != t { 23254 break 23255 } 23256 d := v_1_0.AuxInt 23257 x := v_1.Args[1] 23258 v.reset(OpAdd64) 23259 v0 := b.NewValue0(v.Pos, OpConst64, t) 23260 v0.AuxInt = c - d 23261 v.AddArg(v0) 23262 v.AddArg(x) 23263 return true 23264 } 23265 return false 23266 } 23267 func rewriteValuegeneric_OpSub64F_0(v *Value) bool { 23268 // match: (Sub64F (Const64F [c]) (Const64F [d])) 23269 // cond: 23270 // result: (Const64F [f2i(i2f(c) - i2f(d))]) 23271 for { 23272 _ = v.Args[1] 23273 v_0 := v.Args[0] 23274 if v_0.Op != OpConst64F { 23275 break 23276 } 23277 c := v_0.AuxInt 23278 v_1 := v.Args[1] 23279 if v_1.Op != OpConst64F { 23280 break 23281 } 23282 d := v_1.AuxInt 23283 v.reset(OpConst64F) 23284 v.AuxInt = f2i(i2f(c) - i2f(d)) 23285 return true 23286 } 23287 // match: (Sub64F x (Const64F [0])) 23288 // cond: 23289 // result: x 23290 for { 23291 _ = v.Args[1] 23292 x := v.Args[0] 23293 v_1 := v.Args[1] 23294 if v_1.Op != OpConst64F { 23295 break 23296 } 23297 if v_1.AuxInt != 0 { 23298 break 23299 } 23300 v.reset(OpCopy) 23301 v.Type = x.Type 23302 v.AddArg(x) 23303 return true 23304 } 23305 return false 23306 } 23307 func rewriteValuegeneric_OpSub8_0(v *Value) bool { 23308 b := v.Block 23309 _ = b 23310 // match: (Sub8 (Const8 [c]) (Const8 [d])) 23311 // cond: 23312 // result: (Const8 [int64(int8(c-d))]) 23313 for { 23314 _ = v.Args[1] 23315 v_0 := v.Args[0] 23316 if v_0.Op != OpConst8 { 23317 break 23318 } 23319 c := v_0.AuxInt 23320 v_1 := v.Args[1] 23321 if v_1.Op != OpConst8 { 23322 break 23323 } 23324 d := v_1.AuxInt 23325 v.reset(OpConst8) 23326 v.AuxInt = int64(int8(c - d)) 23327 return true 23328 } 23329 // match: (Sub8 x (Const8 <t> [c])) 23330 // cond: x.Op != OpConst8 23331 // result: (Add8 (Const8 <t> [int64(int8(-c))]) x) 23332 for { 23333 _ = v.Args[1] 23334 x := v.Args[0] 23335 v_1 := v.Args[1] 23336 if v_1.Op != OpConst8 { 23337 break 23338 } 23339 t := v_1.Type 23340 c := v_1.AuxInt 23341 if !(x.Op != OpConst8) { 23342 break 23343 } 23344 v.reset(OpAdd8) 23345 v0 := b.NewValue0(v.Pos, OpConst8, t) 23346 v0.AuxInt = int64(int8(-c)) 23347 v.AddArg(v0) 23348 v.AddArg(x) 23349 return true 23350 } 23351 // match: (Sub8 x x) 23352 // cond: 23353 // result: (Const8 [0]) 23354 for { 23355 _ = v.Args[1] 23356 x := v.Args[0] 23357 if x != v.Args[1] { 23358 break 23359 } 23360 v.reset(OpConst8) 23361 v.AuxInt = 0 23362 return true 23363 } 23364 // match: (Sub8 (Add8 x y) x) 23365 // cond: 23366 // result: y 23367 for { 23368 _ = v.Args[1] 23369 v_0 := v.Args[0] 23370 if v_0.Op != OpAdd8 { 23371 break 23372 } 23373 _ = v_0.Args[1] 23374 x := v_0.Args[0] 23375 y := v_0.Args[1] 23376 if x != v.Args[1] { 23377 break 23378 } 23379 v.reset(OpCopy) 23380 v.Type = y.Type 23381 v.AddArg(y) 23382 return true 23383 } 23384 // match: (Sub8 (Add8 y x) x) 23385 // cond: 23386 // result: y 23387 for { 23388 _ = v.Args[1] 23389 v_0 := v.Args[0] 23390 if v_0.Op != OpAdd8 { 23391 break 23392 } 23393 _ = v_0.Args[1] 23394 y := v_0.Args[0] 23395 x := v_0.Args[1] 23396 if x != v.Args[1] { 23397 break 23398 } 23399 v.reset(OpCopy) 23400 v.Type = y.Type 23401 v.AddArg(y) 23402 return true 23403 } 23404 // match: (Sub8 (Add8 x y) y) 23405 // cond: 23406 // result: x 23407 for { 23408 _ = v.Args[1] 23409 v_0 := v.Args[0] 23410 if v_0.Op != OpAdd8 { 23411 break 23412 } 23413 _ = v_0.Args[1] 23414 x := v_0.Args[0] 23415 y := v_0.Args[1] 23416 if y != v.Args[1] { 23417 break 23418 } 23419 v.reset(OpCopy) 23420 v.Type = x.Type 23421 v.AddArg(x) 23422 return true 23423 } 23424 // match: (Sub8 (Add8 y x) y) 23425 // cond: 23426 // result: x 23427 for { 23428 _ = v.Args[1] 23429 v_0 := v.Args[0] 23430 if v_0.Op != OpAdd8 { 23431 break 23432 } 23433 _ = v_0.Args[1] 23434 y := v_0.Args[0] 23435 x := v_0.Args[1] 23436 if y != v.Args[1] { 23437 break 23438 } 23439 v.reset(OpCopy) 23440 v.Type = x.Type 23441 v.AddArg(x) 23442 return true 23443 } 23444 // match: (Sub8 x (Sub8 i:(Const8 <t>) z)) 23445 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 23446 // result: (Sub8 (Add8 <t> x z) i) 23447 for { 23448 _ = v.Args[1] 23449 x := v.Args[0] 23450 v_1 := v.Args[1] 23451 if v_1.Op != OpSub8 { 23452 break 23453 } 23454 _ = v_1.Args[1] 23455 i := v_1.Args[0] 23456 if i.Op != OpConst8 { 23457 break 23458 } 23459 t := i.Type 23460 z := v_1.Args[1] 23461 if !(z.Op != OpConst8 && x.Op != OpConst8) { 23462 break 23463 } 23464 v.reset(OpSub8) 23465 v0 := b.NewValue0(v.Pos, OpAdd8, t) 23466 v0.AddArg(x) 23467 v0.AddArg(z) 23468 v.AddArg(v0) 23469 v.AddArg(i) 23470 return true 23471 } 23472 // match: (Sub8 x (Sub8 z i:(Const8 <t>))) 23473 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 23474 // result: (Add8 i (Sub8 <t> x z)) 23475 for { 23476 _ = v.Args[1] 23477 x := v.Args[0] 23478 v_1 := v.Args[1] 23479 if v_1.Op != OpSub8 { 23480 break 23481 } 23482 _ = v_1.Args[1] 23483 z := v_1.Args[0] 23484 i := v_1.Args[1] 23485 if i.Op != OpConst8 { 23486 break 23487 } 23488 t := i.Type 23489 if !(z.Op != OpConst8 && x.Op != OpConst8) { 23490 break 23491 } 23492 v.reset(OpAdd8) 23493 v.AddArg(i) 23494 v0 := b.NewValue0(v.Pos, OpSub8, t) 23495 v0.AddArg(x) 23496 v0.AddArg(z) 23497 v.AddArg(v0) 23498 return true 23499 } 23500 // match: (Sub8 (Const8 <t> [c]) (Sub8 x (Const8 <t> [d]))) 23501 // cond: 23502 // result: (Sub8 (Const8 <t> [int64(int8(c+d))]) x) 23503 for { 23504 _ = v.Args[1] 23505 v_0 := v.Args[0] 23506 if v_0.Op != OpConst8 { 23507 break 23508 } 23509 t := v_0.Type 23510 c := v_0.AuxInt 23511 v_1 := v.Args[1] 23512 if v_1.Op != OpSub8 { 23513 break 23514 } 23515 _ = v_1.Args[1] 23516 x := v_1.Args[0] 23517 v_1_1 := v_1.Args[1] 23518 if v_1_1.Op != OpConst8 { 23519 break 23520 } 23521 if v_1_1.Type != t { 23522 break 23523 } 23524 d := v_1_1.AuxInt 23525 v.reset(OpSub8) 23526 v0 := b.NewValue0(v.Pos, OpConst8, t) 23527 v0.AuxInt = int64(int8(c + d)) 23528 v.AddArg(v0) 23529 v.AddArg(x) 23530 return true 23531 } 23532 return false 23533 } 23534 func rewriteValuegeneric_OpSub8_10(v *Value) bool { 23535 b := v.Block 23536 _ = b 23537 // match: (Sub8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x)) 23538 // cond: 23539 // result: (Add8 (Const8 <t> [int64(int8(c-d))]) x) 23540 for { 23541 _ = v.Args[1] 23542 v_0 := v.Args[0] 23543 if v_0.Op != OpConst8 { 23544 break 23545 } 23546 t := v_0.Type 23547 c := v_0.AuxInt 23548 v_1 := v.Args[1] 23549 if v_1.Op != OpSub8 { 23550 break 23551 } 23552 _ = v_1.Args[1] 23553 v_1_0 := v_1.Args[0] 23554 if v_1_0.Op != OpConst8 { 23555 break 23556 } 23557 if v_1_0.Type != t { 23558 break 23559 } 23560 d := v_1_0.AuxInt 23561 x := v_1.Args[1] 23562 v.reset(OpAdd8) 23563 v0 := b.NewValue0(v.Pos, OpConst8, t) 23564 v0.AuxInt = int64(int8(c - d)) 23565 v.AddArg(v0) 23566 v.AddArg(x) 23567 return true 23568 } 23569 return false 23570 } 23571 func rewriteValuegeneric_OpTrunc16to8_0(v *Value) bool { 23572 // match: (Trunc16to8 (Const16 [c])) 23573 // cond: 23574 // result: (Const8 [int64(int8(c))]) 23575 for { 23576 v_0 := v.Args[0] 23577 if v_0.Op != OpConst16 { 23578 break 23579 } 23580 c := v_0.AuxInt 23581 v.reset(OpConst8) 23582 v.AuxInt = int64(int8(c)) 23583 return true 23584 } 23585 // match: (Trunc16to8 (ZeroExt8to16 x)) 23586 // cond: 23587 // result: x 23588 for { 23589 v_0 := v.Args[0] 23590 if v_0.Op != OpZeroExt8to16 { 23591 break 23592 } 23593 x := v_0.Args[0] 23594 v.reset(OpCopy) 23595 v.Type = x.Type 23596 v.AddArg(x) 23597 return true 23598 } 23599 // match: (Trunc16to8 (SignExt8to16 x)) 23600 // cond: 23601 // result: x 23602 for { 23603 v_0 := v.Args[0] 23604 if v_0.Op != OpSignExt8to16 { 23605 break 23606 } 23607 x := v_0.Args[0] 23608 v.reset(OpCopy) 23609 v.Type = x.Type 23610 v.AddArg(x) 23611 return true 23612 } 23613 // match: (Trunc16to8 (And16 (Const16 [y]) x)) 23614 // cond: y&0xFF == 0xFF 23615 // result: (Trunc16to8 x) 23616 for { 23617 v_0 := v.Args[0] 23618 if v_0.Op != OpAnd16 { 23619 break 23620 } 23621 _ = v_0.Args[1] 23622 v_0_0 := v_0.Args[0] 23623 if v_0_0.Op != OpConst16 { 23624 break 23625 } 23626 y := v_0_0.AuxInt 23627 x := v_0.Args[1] 23628 if !(y&0xFF == 0xFF) { 23629 break 23630 } 23631 v.reset(OpTrunc16to8) 23632 v.AddArg(x) 23633 return true 23634 } 23635 // match: (Trunc16to8 (And16 x (Const16 [y]))) 23636 // cond: y&0xFF == 0xFF 23637 // result: (Trunc16to8 x) 23638 for { 23639 v_0 := v.Args[0] 23640 if v_0.Op != OpAnd16 { 23641 break 23642 } 23643 _ = v_0.Args[1] 23644 x := v_0.Args[0] 23645 v_0_1 := v_0.Args[1] 23646 if v_0_1.Op != OpConst16 { 23647 break 23648 } 23649 y := v_0_1.AuxInt 23650 if !(y&0xFF == 0xFF) { 23651 break 23652 } 23653 v.reset(OpTrunc16to8) 23654 v.AddArg(x) 23655 return true 23656 } 23657 return false 23658 } 23659 func rewriteValuegeneric_OpTrunc32to16_0(v *Value) bool { 23660 // match: (Trunc32to16 (Const32 [c])) 23661 // cond: 23662 // result: (Const16 [int64(int16(c))]) 23663 for { 23664 v_0 := v.Args[0] 23665 if v_0.Op != OpConst32 { 23666 break 23667 } 23668 c := v_0.AuxInt 23669 v.reset(OpConst16) 23670 v.AuxInt = int64(int16(c)) 23671 return true 23672 } 23673 // match: (Trunc32to16 (ZeroExt8to32 x)) 23674 // cond: 23675 // result: (ZeroExt8to16 x) 23676 for { 23677 v_0 := v.Args[0] 23678 if v_0.Op != OpZeroExt8to32 { 23679 break 23680 } 23681 x := v_0.Args[0] 23682 v.reset(OpZeroExt8to16) 23683 v.AddArg(x) 23684 return true 23685 } 23686 // match: (Trunc32to16 (ZeroExt16to32 x)) 23687 // cond: 23688 // result: x 23689 for { 23690 v_0 := v.Args[0] 23691 if v_0.Op != OpZeroExt16to32 { 23692 break 23693 } 23694 x := v_0.Args[0] 23695 v.reset(OpCopy) 23696 v.Type = x.Type 23697 v.AddArg(x) 23698 return true 23699 } 23700 // match: (Trunc32to16 (SignExt8to32 x)) 23701 // cond: 23702 // result: (SignExt8to16 x) 23703 for { 23704 v_0 := v.Args[0] 23705 if v_0.Op != OpSignExt8to32 { 23706 break 23707 } 23708 x := v_0.Args[0] 23709 v.reset(OpSignExt8to16) 23710 v.AddArg(x) 23711 return true 23712 } 23713 // match: (Trunc32to16 (SignExt16to32 x)) 23714 // cond: 23715 // result: x 23716 for { 23717 v_0 := v.Args[0] 23718 if v_0.Op != OpSignExt16to32 { 23719 break 23720 } 23721 x := v_0.Args[0] 23722 v.reset(OpCopy) 23723 v.Type = x.Type 23724 v.AddArg(x) 23725 return true 23726 } 23727 // match: (Trunc32to16 (And32 (Const32 [y]) x)) 23728 // cond: y&0xFFFF == 0xFFFF 23729 // result: (Trunc32to16 x) 23730 for { 23731 v_0 := v.Args[0] 23732 if v_0.Op != OpAnd32 { 23733 break 23734 } 23735 _ = v_0.Args[1] 23736 v_0_0 := v_0.Args[0] 23737 if v_0_0.Op != OpConst32 { 23738 break 23739 } 23740 y := v_0_0.AuxInt 23741 x := v_0.Args[1] 23742 if !(y&0xFFFF == 0xFFFF) { 23743 break 23744 } 23745 v.reset(OpTrunc32to16) 23746 v.AddArg(x) 23747 return true 23748 } 23749 // match: (Trunc32to16 (And32 x (Const32 [y]))) 23750 // cond: y&0xFFFF == 0xFFFF 23751 // result: (Trunc32to16 x) 23752 for { 23753 v_0 := v.Args[0] 23754 if v_0.Op != OpAnd32 { 23755 break 23756 } 23757 _ = v_0.Args[1] 23758 x := v_0.Args[0] 23759 v_0_1 := v_0.Args[1] 23760 if v_0_1.Op != OpConst32 { 23761 break 23762 } 23763 y := v_0_1.AuxInt 23764 if !(y&0xFFFF == 0xFFFF) { 23765 break 23766 } 23767 v.reset(OpTrunc32to16) 23768 v.AddArg(x) 23769 return true 23770 } 23771 return false 23772 } 23773 func rewriteValuegeneric_OpTrunc32to8_0(v *Value) bool { 23774 // match: (Trunc32to8 (Const32 [c])) 23775 // cond: 23776 // result: (Const8 [int64(int8(c))]) 23777 for { 23778 v_0 := v.Args[0] 23779 if v_0.Op != OpConst32 { 23780 break 23781 } 23782 c := v_0.AuxInt 23783 v.reset(OpConst8) 23784 v.AuxInt = int64(int8(c)) 23785 return true 23786 } 23787 // match: (Trunc32to8 (ZeroExt8to32 x)) 23788 // cond: 23789 // result: x 23790 for { 23791 v_0 := v.Args[0] 23792 if v_0.Op != OpZeroExt8to32 { 23793 break 23794 } 23795 x := v_0.Args[0] 23796 v.reset(OpCopy) 23797 v.Type = x.Type 23798 v.AddArg(x) 23799 return true 23800 } 23801 // match: (Trunc32to8 (SignExt8to32 x)) 23802 // cond: 23803 // result: x 23804 for { 23805 v_0 := v.Args[0] 23806 if v_0.Op != OpSignExt8to32 { 23807 break 23808 } 23809 x := v_0.Args[0] 23810 v.reset(OpCopy) 23811 v.Type = x.Type 23812 v.AddArg(x) 23813 return true 23814 } 23815 // match: (Trunc32to8 (And32 (Const32 [y]) x)) 23816 // cond: y&0xFF == 0xFF 23817 // result: (Trunc32to8 x) 23818 for { 23819 v_0 := v.Args[0] 23820 if v_0.Op != OpAnd32 { 23821 break 23822 } 23823 _ = v_0.Args[1] 23824 v_0_0 := v_0.Args[0] 23825 if v_0_0.Op != OpConst32 { 23826 break 23827 } 23828 y := v_0_0.AuxInt 23829 x := v_0.Args[1] 23830 if !(y&0xFF == 0xFF) { 23831 break 23832 } 23833 v.reset(OpTrunc32to8) 23834 v.AddArg(x) 23835 return true 23836 } 23837 // match: (Trunc32to8 (And32 x (Const32 [y]))) 23838 // cond: y&0xFF == 0xFF 23839 // result: (Trunc32to8 x) 23840 for { 23841 v_0 := v.Args[0] 23842 if v_0.Op != OpAnd32 { 23843 break 23844 } 23845 _ = v_0.Args[1] 23846 x := v_0.Args[0] 23847 v_0_1 := v_0.Args[1] 23848 if v_0_1.Op != OpConst32 { 23849 break 23850 } 23851 y := v_0_1.AuxInt 23852 if !(y&0xFF == 0xFF) { 23853 break 23854 } 23855 v.reset(OpTrunc32to8) 23856 v.AddArg(x) 23857 return true 23858 } 23859 return false 23860 } 23861 func rewriteValuegeneric_OpTrunc64to16_0(v *Value) bool { 23862 // match: (Trunc64to16 (Const64 [c])) 23863 // cond: 23864 // result: (Const16 [int64(int16(c))]) 23865 for { 23866 v_0 := v.Args[0] 23867 if v_0.Op != OpConst64 { 23868 break 23869 } 23870 c := v_0.AuxInt 23871 v.reset(OpConst16) 23872 v.AuxInt = int64(int16(c)) 23873 return true 23874 } 23875 // match: (Trunc64to16 (ZeroExt8to64 x)) 23876 // cond: 23877 // result: (ZeroExt8to16 x) 23878 for { 23879 v_0 := v.Args[0] 23880 if v_0.Op != OpZeroExt8to64 { 23881 break 23882 } 23883 x := v_0.Args[0] 23884 v.reset(OpZeroExt8to16) 23885 v.AddArg(x) 23886 return true 23887 } 23888 // match: (Trunc64to16 (ZeroExt16to64 x)) 23889 // cond: 23890 // result: x 23891 for { 23892 v_0 := v.Args[0] 23893 if v_0.Op != OpZeroExt16to64 { 23894 break 23895 } 23896 x := v_0.Args[0] 23897 v.reset(OpCopy) 23898 v.Type = x.Type 23899 v.AddArg(x) 23900 return true 23901 } 23902 // match: (Trunc64to16 (SignExt8to64 x)) 23903 // cond: 23904 // result: (SignExt8to16 x) 23905 for { 23906 v_0 := v.Args[0] 23907 if v_0.Op != OpSignExt8to64 { 23908 break 23909 } 23910 x := v_0.Args[0] 23911 v.reset(OpSignExt8to16) 23912 v.AddArg(x) 23913 return true 23914 } 23915 // match: (Trunc64to16 (SignExt16to64 x)) 23916 // cond: 23917 // result: x 23918 for { 23919 v_0 := v.Args[0] 23920 if v_0.Op != OpSignExt16to64 { 23921 break 23922 } 23923 x := v_0.Args[0] 23924 v.reset(OpCopy) 23925 v.Type = x.Type 23926 v.AddArg(x) 23927 return true 23928 } 23929 // match: (Trunc64to16 (And64 (Const64 [y]) x)) 23930 // cond: y&0xFFFF == 0xFFFF 23931 // result: (Trunc64to16 x) 23932 for { 23933 v_0 := v.Args[0] 23934 if v_0.Op != OpAnd64 { 23935 break 23936 } 23937 _ = v_0.Args[1] 23938 v_0_0 := v_0.Args[0] 23939 if v_0_0.Op != OpConst64 { 23940 break 23941 } 23942 y := v_0_0.AuxInt 23943 x := v_0.Args[1] 23944 if !(y&0xFFFF == 0xFFFF) { 23945 break 23946 } 23947 v.reset(OpTrunc64to16) 23948 v.AddArg(x) 23949 return true 23950 } 23951 // match: (Trunc64to16 (And64 x (Const64 [y]))) 23952 // cond: y&0xFFFF == 0xFFFF 23953 // result: (Trunc64to16 x) 23954 for { 23955 v_0 := v.Args[0] 23956 if v_0.Op != OpAnd64 { 23957 break 23958 } 23959 _ = v_0.Args[1] 23960 x := v_0.Args[0] 23961 v_0_1 := v_0.Args[1] 23962 if v_0_1.Op != OpConst64 { 23963 break 23964 } 23965 y := v_0_1.AuxInt 23966 if !(y&0xFFFF == 0xFFFF) { 23967 break 23968 } 23969 v.reset(OpTrunc64to16) 23970 v.AddArg(x) 23971 return true 23972 } 23973 return false 23974 } 23975 func rewriteValuegeneric_OpTrunc64to32_0(v *Value) bool { 23976 // match: (Trunc64to32 (Const64 [c])) 23977 // cond: 23978 // result: (Const32 [int64(int32(c))]) 23979 for { 23980 v_0 := v.Args[0] 23981 if v_0.Op != OpConst64 { 23982 break 23983 } 23984 c := v_0.AuxInt 23985 v.reset(OpConst32) 23986 v.AuxInt = int64(int32(c)) 23987 return true 23988 } 23989 // match: (Trunc64to32 (ZeroExt8to64 x)) 23990 // cond: 23991 // result: (ZeroExt8to32 x) 23992 for { 23993 v_0 := v.Args[0] 23994 if v_0.Op != OpZeroExt8to64 { 23995 break 23996 } 23997 x := v_0.Args[0] 23998 v.reset(OpZeroExt8to32) 23999 v.AddArg(x) 24000 return true 24001 } 24002 // match: (Trunc64to32 (ZeroExt16to64 x)) 24003 // cond: 24004 // result: (ZeroExt16to32 x) 24005 for { 24006 v_0 := v.Args[0] 24007 if v_0.Op != OpZeroExt16to64 { 24008 break 24009 } 24010 x := v_0.Args[0] 24011 v.reset(OpZeroExt16to32) 24012 v.AddArg(x) 24013 return true 24014 } 24015 // match: (Trunc64to32 (ZeroExt32to64 x)) 24016 // cond: 24017 // result: x 24018 for { 24019 v_0 := v.Args[0] 24020 if v_0.Op != OpZeroExt32to64 { 24021 break 24022 } 24023 x := v_0.Args[0] 24024 v.reset(OpCopy) 24025 v.Type = x.Type 24026 v.AddArg(x) 24027 return true 24028 } 24029 // match: (Trunc64to32 (SignExt8to64 x)) 24030 // cond: 24031 // result: (SignExt8to32 x) 24032 for { 24033 v_0 := v.Args[0] 24034 if v_0.Op != OpSignExt8to64 { 24035 break 24036 } 24037 x := v_0.Args[0] 24038 v.reset(OpSignExt8to32) 24039 v.AddArg(x) 24040 return true 24041 } 24042 // match: (Trunc64to32 (SignExt16to64 x)) 24043 // cond: 24044 // result: (SignExt16to32 x) 24045 for { 24046 v_0 := v.Args[0] 24047 if v_0.Op != OpSignExt16to64 { 24048 break 24049 } 24050 x := v_0.Args[0] 24051 v.reset(OpSignExt16to32) 24052 v.AddArg(x) 24053 return true 24054 } 24055 // match: (Trunc64to32 (SignExt32to64 x)) 24056 // cond: 24057 // result: x 24058 for { 24059 v_0 := v.Args[0] 24060 if v_0.Op != OpSignExt32to64 { 24061 break 24062 } 24063 x := v_0.Args[0] 24064 v.reset(OpCopy) 24065 v.Type = x.Type 24066 v.AddArg(x) 24067 return true 24068 } 24069 // match: (Trunc64to32 (And64 (Const64 [y]) x)) 24070 // cond: y&0xFFFFFFFF == 0xFFFFFFFF 24071 // result: (Trunc64to32 x) 24072 for { 24073 v_0 := v.Args[0] 24074 if v_0.Op != OpAnd64 { 24075 break 24076 } 24077 _ = v_0.Args[1] 24078 v_0_0 := v_0.Args[0] 24079 if v_0_0.Op != OpConst64 { 24080 break 24081 } 24082 y := v_0_0.AuxInt 24083 x := v_0.Args[1] 24084 if !(y&0xFFFFFFFF == 0xFFFFFFFF) { 24085 break 24086 } 24087 v.reset(OpTrunc64to32) 24088 v.AddArg(x) 24089 return true 24090 } 24091 // match: (Trunc64to32 (And64 x (Const64 [y]))) 24092 // cond: y&0xFFFFFFFF == 0xFFFFFFFF 24093 // result: (Trunc64to32 x) 24094 for { 24095 v_0 := v.Args[0] 24096 if v_0.Op != OpAnd64 { 24097 break 24098 } 24099 _ = v_0.Args[1] 24100 x := v_0.Args[0] 24101 v_0_1 := v_0.Args[1] 24102 if v_0_1.Op != OpConst64 { 24103 break 24104 } 24105 y := v_0_1.AuxInt 24106 if !(y&0xFFFFFFFF == 0xFFFFFFFF) { 24107 break 24108 } 24109 v.reset(OpTrunc64to32) 24110 v.AddArg(x) 24111 return true 24112 } 24113 return false 24114 } 24115 func rewriteValuegeneric_OpTrunc64to8_0(v *Value) bool { 24116 // match: (Trunc64to8 (Const64 [c])) 24117 // cond: 24118 // result: (Const8 [int64(int8(c))]) 24119 for { 24120 v_0 := v.Args[0] 24121 if v_0.Op != OpConst64 { 24122 break 24123 } 24124 c := v_0.AuxInt 24125 v.reset(OpConst8) 24126 v.AuxInt = int64(int8(c)) 24127 return true 24128 } 24129 // match: (Trunc64to8 (ZeroExt8to64 x)) 24130 // cond: 24131 // result: x 24132 for { 24133 v_0 := v.Args[0] 24134 if v_0.Op != OpZeroExt8to64 { 24135 break 24136 } 24137 x := v_0.Args[0] 24138 v.reset(OpCopy) 24139 v.Type = x.Type 24140 v.AddArg(x) 24141 return true 24142 } 24143 // match: (Trunc64to8 (SignExt8to64 x)) 24144 // cond: 24145 // result: x 24146 for { 24147 v_0 := v.Args[0] 24148 if v_0.Op != OpSignExt8to64 { 24149 break 24150 } 24151 x := v_0.Args[0] 24152 v.reset(OpCopy) 24153 v.Type = x.Type 24154 v.AddArg(x) 24155 return true 24156 } 24157 // match: (Trunc64to8 (And64 (Const64 [y]) x)) 24158 // cond: y&0xFF == 0xFF 24159 // result: (Trunc64to8 x) 24160 for { 24161 v_0 := v.Args[0] 24162 if v_0.Op != OpAnd64 { 24163 break 24164 } 24165 _ = v_0.Args[1] 24166 v_0_0 := v_0.Args[0] 24167 if v_0_0.Op != OpConst64 { 24168 break 24169 } 24170 y := v_0_0.AuxInt 24171 x := v_0.Args[1] 24172 if !(y&0xFF == 0xFF) { 24173 break 24174 } 24175 v.reset(OpTrunc64to8) 24176 v.AddArg(x) 24177 return true 24178 } 24179 // match: (Trunc64to8 (And64 x (Const64 [y]))) 24180 // cond: y&0xFF == 0xFF 24181 // result: (Trunc64to8 x) 24182 for { 24183 v_0 := v.Args[0] 24184 if v_0.Op != OpAnd64 { 24185 break 24186 } 24187 _ = v_0.Args[1] 24188 x := v_0.Args[0] 24189 v_0_1 := v_0.Args[1] 24190 if v_0_1.Op != OpConst64 { 24191 break 24192 } 24193 y := v_0_1.AuxInt 24194 if !(y&0xFF == 0xFF) { 24195 break 24196 } 24197 v.reset(OpTrunc64to8) 24198 v.AddArg(x) 24199 return true 24200 } 24201 return false 24202 } 24203 func rewriteValuegeneric_OpXor16_0(v *Value) bool { 24204 b := v.Block 24205 _ = b 24206 // match: (Xor16 (Const16 [c]) (Const16 [d])) 24207 // cond: 24208 // result: (Const16 [int64(int16(c^d))]) 24209 for { 24210 _ = v.Args[1] 24211 v_0 := v.Args[0] 24212 if v_0.Op != OpConst16 { 24213 break 24214 } 24215 c := v_0.AuxInt 24216 v_1 := v.Args[1] 24217 if v_1.Op != OpConst16 { 24218 break 24219 } 24220 d := v_1.AuxInt 24221 v.reset(OpConst16) 24222 v.AuxInt = int64(int16(c ^ d)) 24223 return true 24224 } 24225 // match: (Xor16 (Const16 [d]) (Const16 [c])) 24226 // cond: 24227 // result: (Const16 [int64(int16(c^d))]) 24228 for { 24229 _ = v.Args[1] 24230 v_0 := v.Args[0] 24231 if v_0.Op != OpConst16 { 24232 break 24233 } 24234 d := v_0.AuxInt 24235 v_1 := v.Args[1] 24236 if v_1.Op != OpConst16 { 24237 break 24238 } 24239 c := v_1.AuxInt 24240 v.reset(OpConst16) 24241 v.AuxInt = int64(int16(c ^ d)) 24242 return true 24243 } 24244 // match: (Xor16 x x) 24245 // cond: 24246 // result: (Const16 [0]) 24247 for { 24248 _ = v.Args[1] 24249 x := v.Args[0] 24250 if x != v.Args[1] { 24251 break 24252 } 24253 v.reset(OpConst16) 24254 v.AuxInt = 0 24255 return true 24256 } 24257 // match: (Xor16 (Const16 [0]) x) 24258 // cond: 24259 // result: x 24260 for { 24261 _ = v.Args[1] 24262 v_0 := v.Args[0] 24263 if v_0.Op != OpConst16 { 24264 break 24265 } 24266 if v_0.AuxInt != 0 { 24267 break 24268 } 24269 x := v.Args[1] 24270 v.reset(OpCopy) 24271 v.Type = x.Type 24272 v.AddArg(x) 24273 return true 24274 } 24275 // match: (Xor16 x (Const16 [0])) 24276 // cond: 24277 // result: x 24278 for { 24279 _ = v.Args[1] 24280 x := v.Args[0] 24281 v_1 := v.Args[1] 24282 if v_1.Op != OpConst16 { 24283 break 24284 } 24285 if v_1.AuxInt != 0 { 24286 break 24287 } 24288 v.reset(OpCopy) 24289 v.Type = x.Type 24290 v.AddArg(x) 24291 return true 24292 } 24293 // match: (Xor16 x (Xor16 x y)) 24294 // cond: 24295 // result: y 24296 for { 24297 _ = v.Args[1] 24298 x := v.Args[0] 24299 v_1 := v.Args[1] 24300 if v_1.Op != OpXor16 { 24301 break 24302 } 24303 _ = v_1.Args[1] 24304 if x != v_1.Args[0] { 24305 break 24306 } 24307 y := v_1.Args[1] 24308 v.reset(OpCopy) 24309 v.Type = y.Type 24310 v.AddArg(y) 24311 return true 24312 } 24313 // match: (Xor16 x (Xor16 y x)) 24314 // cond: 24315 // result: y 24316 for { 24317 _ = v.Args[1] 24318 x := v.Args[0] 24319 v_1 := v.Args[1] 24320 if v_1.Op != OpXor16 { 24321 break 24322 } 24323 _ = v_1.Args[1] 24324 y := v_1.Args[0] 24325 if x != v_1.Args[1] { 24326 break 24327 } 24328 v.reset(OpCopy) 24329 v.Type = y.Type 24330 v.AddArg(y) 24331 return true 24332 } 24333 // match: (Xor16 (Xor16 x y) x) 24334 // cond: 24335 // result: y 24336 for { 24337 _ = v.Args[1] 24338 v_0 := v.Args[0] 24339 if v_0.Op != OpXor16 { 24340 break 24341 } 24342 _ = v_0.Args[1] 24343 x := v_0.Args[0] 24344 y := v_0.Args[1] 24345 if x != v.Args[1] { 24346 break 24347 } 24348 v.reset(OpCopy) 24349 v.Type = y.Type 24350 v.AddArg(y) 24351 return true 24352 } 24353 // match: (Xor16 (Xor16 y x) x) 24354 // cond: 24355 // result: y 24356 for { 24357 _ = v.Args[1] 24358 v_0 := v.Args[0] 24359 if v_0.Op != OpXor16 { 24360 break 24361 } 24362 _ = v_0.Args[1] 24363 y := v_0.Args[0] 24364 x := v_0.Args[1] 24365 if x != v.Args[1] { 24366 break 24367 } 24368 v.reset(OpCopy) 24369 v.Type = y.Type 24370 v.AddArg(y) 24371 return true 24372 } 24373 // match: (Xor16 (Xor16 i:(Const16 <t>) z) x) 24374 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 24375 // result: (Xor16 i (Xor16 <t> z x)) 24376 for { 24377 _ = v.Args[1] 24378 v_0 := v.Args[0] 24379 if v_0.Op != OpXor16 { 24380 break 24381 } 24382 _ = v_0.Args[1] 24383 i := v_0.Args[0] 24384 if i.Op != OpConst16 { 24385 break 24386 } 24387 t := i.Type 24388 z := v_0.Args[1] 24389 x := v.Args[1] 24390 if !(z.Op != OpConst16 && x.Op != OpConst16) { 24391 break 24392 } 24393 v.reset(OpXor16) 24394 v.AddArg(i) 24395 v0 := b.NewValue0(v.Pos, OpXor16, t) 24396 v0.AddArg(z) 24397 v0.AddArg(x) 24398 v.AddArg(v0) 24399 return true 24400 } 24401 return false 24402 } 24403 func rewriteValuegeneric_OpXor16_10(v *Value) bool { 24404 b := v.Block 24405 _ = b 24406 // match: (Xor16 (Xor16 z i:(Const16 <t>)) x) 24407 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 24408 // result: (Xor16 i (Xor16 <t> z x)) 24409 for { 24410 _ = v.Args[1] 24411 v_0 := v.Args[0] 24412 if v_0.Op != OpXor16 { 24413 break 24414 } 24415 _ = v_0.Args[1] 24416 z := v_0.Args[0] 24417 i := v_0.Args[1] 24418 if i.Op != OpConst16 { 24419 break 24420 } 24421 t := i.Type 24422 x := v.Args[1] 24423 if !(z.Op != OpConst16 && x.Op != OpConst16) { 24424 break 24425 } 24426 v.reset(OpXor16) 24427 v.AddArg(i) 24428 v0 := b.NewValue0(v.Pos, OpXor16, t) 24429 v0.AddArg(z) 24430 v0.AddArg(x) 24431 v.AddArg(v0) 24432 return true 24433 } 24434 // match: (Xor16 x (Xor16 i:(Const16 <t>) z)) 24435 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 24436 // result: (Xor16 i (Xor16 <t> z x)) 24437 for { 24438 _ = v.Args[1] 24439 x := v.Args[0] 24440 v_1 := v.Args[1] 24441 if v_1.Op != OpXor16 { 24442 break 24443 } 24444 _ = v_1.Args[1] 24445 i := v_1.Args[0] 24446 if i.Op != OpConst16 { 24447 break 24448 } 24449 t := i.Type 24450 z := v_1.Args[1] 24451 if !(z.Op != OpConst16 && x.Op != OpConst16) { 24452 break 24453 } 24454 v.reset(OpXor16) 24455 v.AddArg(i) 24456 v0 := b.NewValue0(v.Pos, OpXor16, t) 24457 v0.AddArg(z) 24458 v0.AddArg(x) 24459 v.AddArg(v0) 24460 return true 24461 } 24462 // match: (Xor16 x (Xor16 z i:(Const16 <t>))) 24463 // cond: (z.Op != OpConst16 && x.Op != OpConst16) 24464 // result: (Xor16 i (Xor16 <t> z x)) 24465 for { 24466 _ = v.Args[1] 24467 x := v.Args[0] 24468 v_1 := v.Args[1] 24469 if v_1.Op != OpXor16 { 24470 break 24471 } 24472 _ = v_1.Args[1] 24473 z := v_1.Args[0] 24474 i := v_1.Args[1] 24475 if i.Op != OpConst16 { 24476 break 24477 } 24478 t := i.Type 24479 if !(z.Op != OpConst16 && x.Op != OpConst16) { 24480 break 24481 } 24482 v.reset(OpXor16) 24483 v.AddArg(i) 24484 v0 := b.NewValue0(v.Pos, OpXor16, t) 24485 v0.AddArg(z) 24486 v0.AddArg(x) 24487 v.AddArg(v0) 24488 return true 24489 } 24490 // match: (Xor16 (Const16 <t> [c]) (Xor16 (Const16 <t> [d]) x)) 24491 // cond: 24492 // result: (Xor16 (Const16 <t> [int64(int16(c^d))]) x) 24493 for { 24494 _ = v.Args[1] 24495 v_0 := v.Args[0] 24496 if v_0.Op != OpConst16 { 24497 break 24498 } 24499 t := v_0.Type 24500 c := v_0.AuxInt 24501 v_1 := v.Args[1] 24502 if v_1.Op != OpXor16 { 24503 break 24504 } 24505 _ = v_1.Args[1] 24506 v_1_0 := v_1.Args[0] 24507 if v_1_0.Op != OpConst16 { 24508 break 24509 } 24510 if v_1_0.Type != t { 24511 break 24512 } 24513 d := v_1_0.AuxInt 24514 x := v_1.Args[1] 24515 v.reset(OpXor16) 24516 v0 := b.NewValue0(v.Pos, OpConst16, t) 24517 v0.AuxInt = int64(int16(c ^ d)) 24518 v.AddArg(v0) 24519 v.AddArg(x) 24520 return true 24521 } 24522 // match: (Xor16 (Const16 <t> [c]) (Xor16 x (Const16 <t> [d]))) 24523 // cond: 24524 // result: (Xor16 (Const16 <t> [int64(int16(c^d))]) x) 24525 for { 24526 _ = v.Args[1] 24527 v_0 := v.Args[0] 24528 if v_0.Op != OpConst16 { 24529 break 24530 } 24531 t := v_0.Type 24532 c := v_0.AuxInt 24533 v_1 := v.Args[1] 24534 if v_1.Op != OpXor16 { 24535 break 24536 } 24537 _ = v_1.Args[1] 24538 x := v_1.Args[0] 24539 v_1_1 := v_1.Args[1] 24540 if v_1_1.Op != OpConst16 { 24541 break 24542 } 24543 if v_1_1.Type != t { 24544 break 24545 } 24546 d := v_1_1.AuxInt 24547 v.reset(OpXor16) 24548 v0 := b.NewValue0(v.Pos, OpConst16, t) 24549 v0.AuxInt = int64(int16(c ^ d)) 24550 v.AddArg(v0) 24551 v.AddArg(x) 24552 return true 24553 } 24554 // match: (Xor16 (Xor16 (Const16 <t> [d]) x) (Const16 <t> [c])) 24555 // cond: 24556 // result: (Xor16 (Const16 <t> [int64(int16(c^d))]) x) 24557 for { 24558 _ = v.Args[1] 24559 v_0 := v.Args[0] 24560 if v_0.Op != OpXor16 { 24561 break 24562 } 24563 _ = v_0.Args[1] 24564 v_0_0 := v_0.Args[0] 24565 if v_0_0.Op != OpConst16 { 24566 break 24567 } 24568 t := v_0_0.Type 24569 d := v_0_0.AuxInt 24570 x := v_0.Args[1] 24571 v_1 := v.Args[1] 24572 if v_1.Op != OpConst16 { 24573 break 24574 } 24575 if v_1.Type != t { 24576 break 24577 } 24578 c := v_1.AuxInt 24579 v.reset(OpXor16) 24580 v0 := b.NewValue0(v.Pos, OpConst16, t) 24581 v0.AuxInt = int64(int16(c ^ d)) 24582 v.AddArg(v0) 24583 v.AddArg(x) 24584 return true 24585 } 24586 // match: (Xor16 (Xor16 x (Const16 <t> [d])) (Const16 <t> [c])) 24587 // cond: 24588 // result: (Xor16 (Const16 <t> [int64(int16(c^d))]) x) 24589 for { 24590 _ = v.Args[1] 24591 v_0 := v.Args[0] 24592 if v_0.Op != OpXor16 { 24593 break 24594 } 24595 _ = v_0.Args[1] 24596 x := v_0.Args[0] 24597 v_0_1 := v_0.Args[1] 24598 if v_0_1.Op != OpConst16 { 24599 break 24600 } 24601 t := v_0_1.Type 24602 d := v_0_1.AuxInt 24603 v_1 := v.Args[1] 24604 if v_1.Op != OpConst16 { 24605 break 24606 } 24607 if v_1.Type != t { 24608 break 24609 } 24610 c := v_1.AuxInt 24611 v.reset(OpXor16) 24612 v0 := b.NewValue0(v.Pos, OpConst16, t) 24613 v0.AuxInt = int64(int16(c ^ d)) 24614 v.AddArg(v0) 24615 v.AddArg(x) 24616 return true 24617 } 24618 return false 24619 } 24620 func rewriteValuegeneric_OpXor32_0(v *Value) bool { 24621 b := v.Block 24622 _ = b 24623 // match: (Xor32 (Const32 [c]) (Const32 [d])) 24624 // cond: 24625 // result: (Const32 [int64(int32(c^d))]) 24626 for { 24627 _ = v.Args[1] 24628 v_0 := v.Args[0] 24629 if v_0.Op != OpConst32 { 24630 break 24631 } 24632 c := v_0.AuxInt 24633 v_1 := v.Args[1] 24634 if v_1.Op != OpConst32 { 24635 break 24636 } 24637 d := v_1.AuxInt 24638 v.reset(OpConst32) 24639 v.AuxInt = int64(int32(c ^ d)) 24640 return true 24641 } 24642 // match: (Xor32 (Const32 [d]) (Const32 [c])) 24643 // cond: 24644 // result: (Const32 [int64(int32(c^d))]) 24645 for { 24646 _ = v.Args[1] 24647 v_0 := v.Args[0] 24648 if v_0.Op != OpConst32 { 24649 break 24650 } 24651 d := v_0.AuxInt 24652 v_1 := v.Args[1] 24653 if v_1.Op != OpConst32 { 24654 break 24655 } 24656 c := v_1.AuxInt 24657 v.reset(OpConst32) 24658 v.AuxInt = int64(int32(c ^ d)) 24659 return true 24660 } 24661 // match: (Xor32 x x) 24662 // cond: 24663 // result: (Const32 [0]) 24664 for { 24665 _ = v.Args[1] 24666 x := v.Args[0] 24667 if x != v.Args[1] { 24668 break 24669 } 24670 v.reset(OpConst32) 24671 v.AuxInt = 0 24672 return true 24673 } 24674 // match: (Xor32 (Const32 [0]) x) 24675 // cond: 24676 // result: x 24677 for { 24678 _ = v.Args[1] 24679 v_0 := v.Args[0] 24680 if v_0.Op != OpConst32 { 24681 break 24682 } 24683 if v_0.AuxInt != 0 { 24684 break 24685 } 24686 x := v.Args[1] 24687 v.reset(OpCopy) 24688 v.Type = x.Type 24689 v.AddArg(x) 24690 return true 24691 } 24692 // match: (Xor32 x (Const32 [0])) 24693 // cond: 24694 // result: x 24695 for { 24696 _ = v.Args[1] 24697 x := v.Args[0] 24698 v_1 := v.Args[1] 24699 if v_1.Op != OpConst32 { 24700 break 24701 } 24702 if v_1.AuxInt != 0 { 24703 break 24704 } 24705 v.reset(OpCopy) 24706 v.Type = x.Type 24707 v.AddArg(x) 24708 return true 24709 } 24710 // match: (Xor32 x (Xor32 x y)) 24711 // cond: 24712 // result: y 24713 for { 24714 _ = v.Args[1] 24715 x := v.Args[0] 24716 v_1 := v.Args[1] 24717 if v_1.Op != OpXor32 { 24718 break 24719 } 24720 _ = v_1.Args[1] 24721 if x != v_1.Args[0] { 24722 break 24723 } 24724 y := v_1.Args[1] 24725 v.reset(OpCopy) 24726 v.Type = y.Type 24727 v.AddArg(y) 24728 return true 24729 } 24730 // match: (Xor32 x (Xor32 y x)) 24731 // cond: 24732 // result: y 24733 for { 24734 _ = v.Args[1] 24735 x := v.Args[0] 24736 v_1 := v.Args[1] 24737 if v_1.Op != OpXor32 { 24738 break 24739 } 24740 _ = v_1.Args[1] 24741 y := v_1.Args[0] 24742 if x != v_1.Args[1] { 24743 break 24744 } 24745 v.reset(OpCopy) 24746 v.Type = y.Type 24747 v.AddArg(y) 24748 return true 24749 } 24750 // match: (Xor32 (Xor32 x y) x) 24751 // cond: 24752 // result: y 24753 for { 24754 _ = v.Args[1] 24755 v_0 := v.Args[0] 24756 if v_0.Op != OpXor32 { 24757 break 24758 } 24759 _ = v_0.Args[1] 24760 x := v_0.Args[0] 24761 y := v_0.Args[1] 24762 if x != v.Args[1] { 24763 break 24764 } 24765 v.reset(OpCopy) 24766 v.Type = y.Type 24767 v.AddArg(y) 24768 return true 24769 } 24770 // match: (Xor32 (Xor32 y x) x) 24771 // cond: 24772 // result: y 24773 for { 24774 _ = v.Args[1] 24775 v_0 := v.Args[0] 24776 if v_0.Op != OpXor32 { 24777 break 24778 } 24779 _ = v_0.Args[1] 24780 y := v_0.Args[0] 24781 x := v_0.Args[1] 24782 if x != v.Args[1] { 24783 break 24784 } 24785 v.reset(OpCopy) 24786 v.Type = y.Type 24787 v.AddArg(y) 24788 return true 24789 } 24790 // match: (Xor32 (Xor32 i:(Const32 <t>) z) x) 24791 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 24792 // result: (Xor32 i (Xor32 <t> z x)) 24793 for { 24794 _ = v.Args[1] 24795 v_0 := v.Args[0] 24796 if v_0.Op != OpXor32 { 24797 break 24798 } 24799 _ = v_0.Args[1] 24800 i := v_0.Args[0] 24801 if i.Op != OpConst32 { 24802 break 24803 } 24804 t := i.Type 24805 z := v_0.Args[1] 24806 x := v.Args[1] 24807 if !(z.Op != OpConst32 && x.Op != OpConst32) { 24808 break 24809 } 24810 v.reset(OpXor32) 24811 v.AddArg(i) 24812 v0 := b.NewValue0(v.Pos, OpXor32, t) 24813 v0.AddArg(z) 24814 v0.AddArg(x) 24815 v.AddArg(v0) 24816 return true 24817 } 24818 return false 24819 } 24820 func rewriteValuegeneric_OpXor32_10(v *Value) bool { 24821 b := v.Block 24822 _ = b 24823 // match: (Xor32 (Xor32 z i:(Const32 <t>)) x) 24824 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 24825 // result: (Xor32 i (Xor32 <t> z x)) 24826 for { 24827 _ = v.Args[1] 24828 v_0 := v.Args[0] 24829 if v_0.Op != OpXor32 { 24830 break 24831 } 24832 _ = v_0.Args[1] 24833 z := v_0.Args[0] 24834 i := v_0.Args[1] 24835 if i.Op != OpConst32 { 24836 break 24837 } 24838 t := i.Type 24839 x := v.Args[1] 24840 if !(z.Op != OpConst32 && x.Op != OpConst32) { 24841 break 24842 } 24843 v.reset(OpXor32) 24844 v.AddArg(i) 24845 v0 := b.NewValue0(v.Pos, OpXor32, t) 24846 v0.AddArg(z) 24847 v0.AddArg(x) 24848 v.AddArg(v0) 24849 return true 24850 } 24851 // match: (Xor32 x (Xor32 i:(Const32 <t>) z)) 24852 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 24853 // result: (Xor32 i (Xor32 <t> z x)) 24854 for { 24855 _ = v.Args[1] 24856 x := v.Args[0] 24857 v_1 := v.Args[1] 24858 if v_1.Op != OpXor32 { 24859 break 24860 } 24861 _ = v_1.Args[1] 24862 i := v_1.Args[0] 24863 if i.Op != OpConst32 { 24864 break 24865 } 24866 t := i.Type 24867 z := v_1.Args[1] 24868 if !(z.Op != OpConst32 && x.Op != OpConst32) { 24869 break 24870 } 24871 v.reset(OpXor32) 24872 v.AddArg(i) 24873 v0 := b.NewValue0(v.Pos, OpXor32, t) 24874 v0.AddArg(z) 24875 v0.AddArg(x) 24876 v.AddArg(v0) 24877 return true 24878 } 24879 // match: (Xor32 x (Xor32 z i:(Const32 <t>))) 24880 // cond: (z.Op != OpConst32 && x.Op != OpConst32) 24881 // result: (Xor32 i (Xor32 <t> z x)) 24882 for { 24883 _ = v.Args[1] 24884 x := v.Args[0] 24885 v_1 := v.Args[1] 24886 if v_1.Op != OpXor32 { 24887 break 24888 } 24889 _ = v_1.Args[1] 24890 z := v_1.Args[0] 24891 i := v_1.Args[1] 24892 if i.Op != OpConst32 { 24893 break 24894 } 24895 t := i.Type 24896 if !(z.Op != OpConst32 && x.Op != OpConst32) { 24897 break 24898 } 24899 v.reset(OpXor32) 24900 v.AddArg(i) 24901 v0 := b.NewValue0(v.Pos, OpXor32, t) 24902 v0.AddArg(z) 24903 v0.AddArg(x) 24904 v.AddArg(v0) 24905 return true 24906 } 24907 // match: (Xor32 (Const32 <t> [c]) (Xor32 (Const32 <t> [d]) x)) 24908 // cond: 24909 // result: (Xor32 (Const32 <t> [int64(int32(c^d))]) x) 24910 for { 24911 _ = v.Args[1] 24912 v_0 := v.Args[0] 24913 if v_0.Op != OpConst32 { 24914 break 24915 } 24916 t := v_0.Type 24917 c := v_0.AuxInt 24918 v_1 := v.Args[1] 24919 if v_1.Op != OpXor32 { 24920 break 24921 } 24922 _ = v_1.Args[1] 24923 v_1_0 := v_1.Args[0] 24924 if v_1_0.Op != OpConst32 { 24925 break 24926 } 24927 if v_1_0.Type != t { 24928 break 24929 } 24930 d := v_1_0.AuxInt 24931 x := v_1.Args[1] 24932 v.reset(OpXor32) 24933 v0 := b.NewValue0(v.Pos, OpConst32, t) 24934 v0.AuxInt = int64(int32(c ^ d)) 24935 v.AddArg(v0) 24936 v.AddArg(x) 24937 return true 24938 } 24939 // match: (Xor32 (Const32 <t> [c]) (Xor32 x (Const32 <t> [d]))) 24940 // cond: 24941 // result: (Xor32 (Const32 <t> [int64(int32(c^d))]) x) 24942 for { 24943 _ = v.Args[1] 24944 v_0 := v.Args[0] 24945 if v_0.Op != OpConst32 { 24946 break 24947 } 24948 t := v_0.Type 24949 c := v_0.AuxInt 24950 v_1 := v.Args[1] 24951 if v_1.Op != OpXor32 { 24952 break 24953 } 24954 _ = v_1.Args[1] 24955 x := v_1.Args[0] 24956 v_1_1 := v_1.Args[1] 24957 if v_1_1.Op != OpConst32 { 24958 break 24959 } 24960 if v_1_1.Type != t { 24961 break 24962 } 24963 d := v_1_1.AuxInt 24964 v.reset(OpXor32) 24965 v0 := b.NewValue0(v.Pos, OpConst32, t) 24966 v0.AuxInt = int64(int32(c ^ d)) 24967 v.AddArg(v0) 24968 v.AddArg(x) 24969 return true 24970 } 24971 // match: (Xor32 (Xor32 (Const32 <t> [d]) x) (Const32 <t> [c])) 24972 // cond: 24973 // result: (Xor32 (Const32 <t> [int64(int32(c^d))]) x) 24974 for { 24975 _ = v.Args[1] 24976 v_0 := v.Args[0] 24977 if v_0.Op != OpXor32 { 24978 break 24979 } 24980 _ = v_0.Args[1] 24981 v_0_0 := v_0.Args[0] 24982 if v_0_0.Op != OpConst32 { 24983 break 24984 } 24985 t := v_0_0.Type 24986 d := v_0_0.AuxInt 24987 x := v_0.Args[1] 24988 v_1 := v.Args[1] 24989 if v_1.Op != OpConst32 { 24990 break 24991 } 24992 if v_1.Type != t { 24993 break 24994 } 24995 c := v_1.AuxInt 24996 v.reset(OpXor32) 24997 v0 := b.NewValue0(v.Pos, OpConst32, t) 24998 v0.AuxInt = int64(int32(c ^ d)) 24999 v.AddArg(v0) 25000 v.AddArg(x) 25001 return true 25002 } 25003 // match: (Xor32 (Xor32 x (Const32 <t> [d])) (Const32 <t> [c])) 25004 // cond: 25005 // result: (Xor32 (Const32 <t> [int64(int32(c^d))]) x) 25006 for { 25007 _ = v.Args[1] 25008 v_0 := v.Args[0] 25009 if v_0.Op != OpXor32 { 25010 break 25011 } 25012 _ = v_0.Args[1] 25013 x := v_0.Args[0] 25014 v_0_1 := v_0.Args[1] 25015 if v_0_1.Op != OpConst32 { 25016 break 25017 } 25018 t := v_0_1.Type 25019 d := v_0_1.AuxInt 25020 v_1 := v.Args[1] 25021 if v_1.Op != OpConst32 { 25022 break 25023 } 25024 if v_1.Type != t { 25025 break 25026 } 25027 c := v_1.AuxInt 25028 v.reset(OpXor32) 25029 v0 := b.NewValue0(v.Pos, OpConst32, t) 25030 v0.AuxInt = int64(int32(c ^ d)) 25031 v.AddArg(v0) 25032 v.AddArg(x) 25033 return true 25034 } 25035 return false 25036 } 25037 func rewriteValuegeneric_OpXor64_0(v *Value) bool { 25038 b := v.Block 25039 _ = b 25040 // match: (Xor64 (Const64 [c]) (Const64 [d])) 25041 // cond: 25042 // result: (Const64 [c^d]) 25043 for { 25044 _ = v.Args[1] 25045 v_0 := v.Args[0] 25046 if v_0.Op != OpConst64 { 25047 break 25048 } 25049 c := v_0.AuxInt 25050 v_1 := v.Args[1] 25051 if v_1.Op != OpConst64 { 25052 break 25053 } 25054 d := v_1.AuxInt 25055 v.reset(OpConst64) 25056 v.AuxInt = c ^ d 25057 return true 25058 } 25059 // match: (Xor64 (Const64 [d]) (Const64 [c])) 25060 // cond: 25061 // result: (Const64 [c^d]) 25062 for { 25063 _ = v.Args[1] 25064 v_0 := v.Args[0] 25065 if v_0.Op != OpConst64 { 25066 break 25067 } 25068 d := v_0.AuxInt 25069 v_1 := v.Args[1] 25070 if v_1.Op != OpConst64 { 25071 break 25072 } 25073 c := v_1.AuxInt 25074 v.reset(OpConst64) 25075 v.AuxInt = c ^ d 25076 return true 25077 } 25078 // match: (Xor64 x x) 25079 // cond: 25080 // result: (Const64 [0]) 25081 for { 25082 _ = v.Args[1] 25083 x := v.Args[0] 25084 if x != v.Args[1] { 25085 break 25086 } 25087 v.reset(OpConst64) 25088 v.AuxInt = 0 25089 return true 25090 } 25091 // match: (Xor64 (Const64 [0]) x) 25092 // cond: 25093 // result: x 25094 for { 25095 _ = v.Args[1] 25096 v_0 := v.Args[0] 25097 if v_0.Op != OpConst64 { 25098 break 25099 } 25100 if v_0.AuxInt != 0 { 25101 break 25102 } 25103 x := v.Args[1] 25104 v.reset(OpCopy) 25105 v.Type = x.Type 25106 v.AddArg(x) 25107 return true 25108 } 25109 // match: (Xor64 x (Const64 [0])) 25110 // cond: 25111 // result: x 25112 for { 25113 _ = v.Args[1] 25114 x := v.Args[0] 25115 v_1 := v.Args[1] 25116 if v_1.Op != OpConst64 { 25117 break 25118 } 25119 if v_1.AuxInt != 0 { 25120 break 25121 } 25122 v.reset(OpCopy) 25123 v.Type = x.Type 25124 v.AddArg(x) 25125 return true 25126 } 25127 // match: (Xor64 x (Xor64 x y)) 25128 // cond: 25129 // result: y 25130 for { 25131 _ = v.Args[1] 25132 x := v.Args[0] 25133 v_1 := v.Args[1] 25134 if v_1.Op != OpXor64 { 25135 break 25136 } 25137 _ = v_1.Args[1] 25138 if x != v_1.Args[0] { 25139 break 25140 } 25141 y := v_1.Args[1] 25142 v.reset(OpCopy) 25143 v.Type = y.Type 25144 v.AddArg(y) 25145 return true 25146 } 25147 // match: (Xor64 x (Xor64 y x)) 25148 // cond: 25149 // result: y 25150 for { 25151 _ = v.Args[1] 25152 x := v.Args[0] 25153 v_1 := v.Args[1] 25154 if v_1.Op != OpXor64 { 25155 break 25156 } 25157 _ = v_1.Args[1] 25158 y := v_1.Args[0] 25159 if x != v_1.Args[1] { 25160 break 25161 } 25162 v.reset(OpCopy) 25163 v.Type = y.Type 25164 v.AddArg(y) 25165 return true 25166 } 25167 // match: (Xor64 (Xor64 x y) x) 25168 // cond: 25169 // result: y 25170 for { 25171 _ = v.Args[1] 25172 v_0 := v.Args[0] 25173 if v_0.Op != OpXor64 { 25174 break 25175 } 25176 _ = v_0.Args[1] 25177 x := v_0.Args[0] 25178 y := v_0.Args[1] 25179 if x != v.Args[1] { 25180 break 25181 } 25182 v.reset(OpCopy) 25183 v.Type = y.Type 25184 v.AddArg(y) 25185 return true 25186 } 25187 // match: (Xor64 (Xor64 y x) x) 25188 // cond: 25189 // result: y 25190 for { 25191 _ = v.Args[1] 25192 v_0 := v.Args[0] 25193 if v_0.Op != OpXor64 { 25194 break 25195 } 25196 _ = v_0.Args[1] 25197 y := v_0.Args[0] 25198 x := v_0.Args[1] 25199 if x != v.Args[1] { 25200 break 25201 } 25202 v.reset(OpCopy) 25203 v.Type = y.Type 25204 v.AddArg(y) 25205 return true 25206 } 25207 // match: (Xor64 (Xor64 i:(Const64 <t>) z) x) 25208 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 25209 // result: (Xor64 i (Xor64 <t> z x)) 25210 for { 25211 _ = v.Args[1] 25212 v_0 := v.Args[0] 25213 if v_0.Op != OpXor64 { 25214 break 25215 } 25216 _ = v_0.Args[1] 25217 i := v_0.Args[0] 25218 if i.Op != OpConst64 { 25219 break 25220 } 25221 t := i.Type 25222 z := v_0.Args[1] 25223 x := v.Args[1] 25224 if !(z.Op != OpConst64 && x.Op != OpConst64) { 25225 break 25226 } 25227 v.reset(OpXor64) 25228 v.AddArg(i) 25229 v0 := b.NewValue0(v.Pos, OpXor64, t) 25230 v0.AddArg(z) 25231 v0.AddArg(x) 25232 v.AddArg(v0) 25233 return true 25234 } 25235 return false 25236 } 25237 func rewriteValuegeneric_OpXor64_10(v *Value) bool { 25238 b := v.Block 25239 _ = b 25240 // match: (Xor64 (Xor64 z i:(Const64 <t>)) x) 25241 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 25242 // result: (Xor64 i (Xor64 <t> z x)) 25243 for { 25244 _ = v.Args[1] 25245 v_0 := v.Args[0] 25246 if v_0.Op != OpXor64 { 25247 break 25248 } 25249 _ = v_0.Args[1] 25250 z := v_0.Args[0] 25251 i := v_0.Args[1] 25252 if i.Op != OpConst64 { 25253 break 25254 } 25255 t := i.Type 25256 x := v.Args[1] 25257 if !(z.Op != OpConst64 && x.Op != OpConst64) { 25258 break 25259 } 25260 v.reset(OpXor64) 25261 v.AddArg(i) 25262 v0 := b.NewValue0(v.Pos, OpXor64, t) 25263 v0.AddArg(z) 25264 v0.AddArg(x) 25265 v.AddArg(v0) 25266 return true 25267 } 25268 // match: (Xor64 x (Xor64 i:(Const64 <t>) z)) 25269 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 25270 // result: (Xor64 i (Xor64 <t> z x)) 25271 for { 25272 _ = v.Args[1] 25273 x := v.Args[0] 25274 v_1 := v.Args[1] 25275 if v_1.Op != OpXor64 { 25276 break 25277 } 25278 _ = v_1.Args[1] 25279 i := v_1.Args[0] 25280 if i.Op != OpConst64 { 25281 break 25282 } 25283 t := i.Type 25284 z := v_1.Args[1] 25285 if !(z.Op != OpConst64 && x.Op != OpConst64) { 25286 break 25287 } 25288 v.reset(OpXor64) 25289 v.AddArg(i) 25290 v0 := b.NewValue0(v.Pos, OpXor64, t) 25291 v0.AddArg(z) 25292 v0.AddArg(x) 25293 v.AddArg(v0) 25294 return true 25295 } 25296 // match: (Xor64 x (Xor64 z i:(Const64 <t>))) 25297 // cond: (z.Op != OpConst64 && x.Op != OpConst64) 25298 // result: (Xor64 i (Xor64 <t> z x)) 25299 for { 25300 _ = v.Args[1] 25301 x := v.Args[0] 25302 v_1 := v.Args[1] 25303 if v_1.Op != OpXor64 { 25304 break 25305 } 25306 _ = v_1.Args[1] 25307 z := v_1.Args[0] 25308 i := v_1.Args[1] 25309 if i.Op != OpConst64 { 25310 break 25311 } 25312 t := i.Type 25313 if !(z.Op != OpConst64 && x.Op != OpConst64) { 25314 break 25315 } 25316 v.reset(OpXor64) 25317 v.AddArg(i) 25318 v0 := b.NewValue0(v.Pos, OpXor64, t) 25319 v0.AddArg(z) 25320 v0.AddArg(x) 25321 v.AddArg(v0) 25322 return true 25323 } 25324 // match: (Xor64 (Const64 <t> [c]) (Xor64 (Const64 <t> [d]) x)) 25325 // cond: 25326 // result: (Xor64 (Const64 <t> [c^d]) x) 25327 for { 25328 _ = v.Args[1] 25329 v_0 := v.Args[0] 25330 if v_0.Op != OpConst64 { 25331 break 25332 } 25333 t := v_0.Type 25334 c := v_0.AuxInt 25335 v_1 := v.Args[1] 25336 if v_1.Op != OpXor64 { 25337 break 25338 } 25339 _ = v_1.Args[1] 25340 v_1_0 := v_1.Args[0] 25341 if v_1_0.Op != OpConst64 { 25342 break 25343 } 25344 if v_1_0.Type != t { 25345 break 25346 } 25347 d := v_1_0.AuxInt 25348 x := v_1.Args[1] 25349 v.reset(OpXor64) 25350 v0 := b.NewValue0(v.Pos, OpConst64, t) 25351 v0.AuxInt = c ^ d 25352 v.AddArg(v0) 25353 v.AddArg(x) 25354 return true 25355 } 25356 // match: (Xor64 (Const64 <t> [c]) (Xor64 x (Const64 <t> [d]))) 25357 // cond: 25358 // result: (Xor64 (Const64 <t> [c^d]) x) 25359 for { 25360 _ = v.Args[1] 25361 v_0 := v.Args[0] 25362 if v_0.Op != OpConst64 { 25363 break 25364 } 25365 t := v_0.Type 25366 c := v_0.AuxInt 25367 v_1 := v.Args[1] 25368 if v_1.Op != OpXor64 { 25369 break 25370 } 25371 _ = v_1.Args[1] 25372 x := v_1.Args[0] 25373 v_1_1 := v_1.Args[1] 25374 if v_1_1.Op != OpConst64 { 25375 break 25376 } 25377 if v_1_1.Type != t { 25378 break 25379 } 25380 d := v_1_1.AuxInt 25381 v.reset(OpXor64) 25382 v0 := b.NewValue0(v.Pos, OpConst64, t) 25383 v0.AuxInt = c ^ d 25384 v.AddArg(v0) 25385 v.AddArg(x) 25386 return true 25387 } 25388 // match: (Xor64 (Xor64 (Const64 <t> [d]) x) (Const64 <t> [c])) 25389 // cond: 25390 // result: (Xor64 (Const64 <t> [c^d]) x) 25391 for { 25392 _ = v.Args[1] 25393 v_0 := v.Args[0] 25394 if v_0.Op != OpXor64 { 25395 break 25396 } 25397 _ = v_0.Args[1] 25398 v_0_0 := v_0.Args[0] 25399 if v_0_0.Op != OpConst64 { 25400 break 25401 } 25402 t := v_0_0.Type 25403 d := v_0_0.AuxInt 25404 x := v_0.Args[1] 25405 v_1 := v.Args[1] 25406 if v_1.Op != OpConst64 { 25407 break 25408 } 25409 if v_1.Type != t { 25410 break 25411 } 25412 c := v_1.AuxInt 25413 v.reset(OpXor64) 25414 v0 := b.NewValue0(v.Pos, OpConst64, t) 25415 v0.AuxInt = c ^ d 25416 v.AddArg(v0) 25417 v.AddArg(x) 25418 return true 25419 } 25420 // match: (Xor64 (Xor64 x (Const64 <t> [d])) (Const64 <t> [c])) 25421 // cond: 25422 // result: (Xor64 (Const64 <t> [c^d]) x) 25423 for { 25424 _ = v.Args[1] 25425 v_0 := v.Args[0] 25426 if v_0.Op != OpXor64 { 25427 break 25428 } 25429 _ = v_0.Args[1] 25430 x := v_0.Args[0] 25431 v_0_1 := v_0.Args[1] 25432 if v_0_1.Op != OpConst64 { 25433 break 25434 } 25435 t := v_0_1.Type 25436 d := v_0_1.AuxInt 25437 v_1 := v.Args[1] 25438 if v_1.Op != OpConst64 { 25439 break 25440 } 25441 if v_1.Type != t { 25442 break 25443 } 25444 c := v_1.AuxInt 25445 v.reset(OpXor64) 25446 v0 := b.NewValue0(v.Pos, OpConst64, t) 25447 v0.AuxInt = c ^ d 25448 v.AddArg(v0) 25449 v.AddArg(x) 25450 return true 25451 } 25452 return false 25453 } 25454 func rewriteValuegeneric_OpXor8_0(v *Value) bool { 25455 b := v.Block 25456 _ = b 25457 // match: (Xor8 (Const8 [c]) (Const8 [d])) 25458 // cond: 25459 // result: (Const8 [int64(int8(c^d))]) 25460 for { 25461 _ = v.Args[1] 25462 v_0 := v.Args[0] 25463 if v_0.Op != OpConst8 { 25464 break 25465 } 25466 c := v_0.AuxInt 25467 v_1 := v.Args[1] 25468 if v_1.Op != OpConst8 { 25469 break 25470 } 25471 d := v_1.AuxInt 25472 v.reset(OpConst8) 25473 v.AuxInt = int64(int8(c ^ d)) 25474 return true 25475 } 25476 // match: (Xor8 (Const8 [d]) (Const8 [c])) 25477 // cond: 25478 // result: (Const8 [int64(int8(c^d))]) 25479 for { 25480 _ = v.Args[1] 25481 v_0 := v.Args[0] 25482 if v_0.Op != OpConst8 { 25483 break 25484 } 25485 d := v_0.AuxInt 25486 v_1 := v.Args[1] 25487 if v_1.Op != OpConst8 { 25488 break 25489 } 25490 c := v_1.AuxInt 25491 v.reset(OpConst8) 25492 v.AuxInt = int64(int8(c ^ d)) 25493 return true 25494 } 25495 // match: (Xor8 x x) 25496 // cond: 25497 // result: (Const8 [0]) 25498 for { 25499 _ = v.Args[1] 25500 x := v.Args[0] 25501 if x != v.Args[1] { 25502 break 25503 } 25504 v.reset(OpConst8) 25505 v.AuxInt = 0 25506 return true 25507 } 25508 // match: (Xor8 (Const8 [0]) x) 25509 // cond: 25510 // result: x 25511 for { 25512 _ = v.Args[1] 25513 v_0 := v.Args[0] 25514 if v_0.Op != OpConst8 { 25515 break 25516 } 25517 if v_0.AuxInt != 0 { 25518 break 25519 } 25520 x := v.Args[1] 25521 v.reset(OpCopy) 25522 v.Type = x.Type 25523 v.AddArg(x) 25524 return true 25525 } 25526 // match: (Xor8 x (Const8 [0])) 25527 // cond: 25528 // result: x 25529 for { 25530 _ = v.Args[1] 25531 x := v.Args[0] 25532 v_1 := v.Args[1] 25533 if v_1.Op != OpConst8 { 25534 break 25535 } 25536 if v_1.AuxInt != 0 { 25537 break 25538 } 25539 v.reset(OpCopy) 25540 v.Type = x.Type 25541 v.AddArg(x) 25542 return true 25543 } 25544 // match: (Xor8 x (Xor8 x y)) 25545 // cond: 25546 // result: y 25547 for { 25548 _ = v.Args[1] 25549 x := v.Args[0] 25550 v_1 := v.Args[1] 25551 if v_1.Op != OpXor8 { 25552 break 25553 } 25554 _ = v_1.Args[1] 25555 if x != v_1.Args[0] { 25556 break 25557 } 25558 y := v_1.Args[1] 25559 v.reset(OpCopy) 25560 v.Type = y.Type 25561 v.AddArg(y) 25562 return true 25563 } 25564 // match: (Xor8 x (Xor8 y x)) 25565 // cond: 25566 // result: y 25567 for { 25568 _ = v.Args[1] 25569 x := v.Args[0] 25570 v_1 := v.Args[1] 25571 if v_1.Op != OpXor8 { 25572 break 25573 } 25574 _ = v_1.Args[1] 25575 y := v_1.Args[0] 25576 if x != v_1.Args[1] { 25577 break 25578 } 25579 v.reset(OpCopy) 25580 v.Type = y.Type 25581 v.AddArg(y) 25582 return true 25583 } 25584 // match: (Xor8 (Xor8 x y) x) 25585 // cond: 25586 // result: y 25587 for { 25588 _ = v.Args[1] 25589 v_0 := v.Args[0] 25590 if v_0.Op != OpXor8 { 25591 break 25592 } 25593 _ = v_0.Args[1] 25594 x := v_0.Args[0] 25595 y := v_0.Args[1] 25596 if x != v.Args[1] { 25597 break 25598 } 25599 v.reset(OpCopy) 25600 v.Type = y.Type 25601 v.AddArg(y) 25602 return true 25603 } 25604 // match: (Xor8 (Xor8 y x) x) 25605 // cond: 25606 // result: y 25607 for { 25608 _ = v.Args[1] 25609 v_0 := v.Args[0] 25610 if v_0.Op != OpXor8 { 25611 break 25612 } 25613 _ = v_0.Args[1] 25614 y := v_0.Args[0] 25615 x := v_0.Args[1] 25616 if x != v.Args[1] { 25617 break 25618 } 25619 v.reset(OpCopy) 25620 v.Type = y.Type 25621 v.AddArg(y) 25622 return true 25623 } 25624 // match: (Xor8 (Xor8 i:(Const8 <t>) z) x) 25625 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 25626 // result: (Xor8 i (Xor8 <t> z x)) 25627 for { 25628 _ = v.Args[1] 25629 v_0 := v.Args[0] 25630 if v_0.Op != OpXor8 { 25631 break 25632 } 25633 _ = v_0.Args[1] 25634 i := v_0.Args[0] 25635 if i.Op != OpConst8 { 25636 break 25637 } 25638 t := i.Type 25639 z := v_0.Args[1] 25640 x := v.Args[1] 25641 if !(z.Op != OpConst8 && x.Op != OpConst8) { 25642 break 25643 } 25644 v.reset(OpXor8) 25645 v.AddArg(i) 25646 v0 := b.NewValue0(v.Pos, OpXor8, t) 25647 v0.AddArg(z) 25648 v0.AddArg(x) 25649 v.AddArg(v0) 25650 return true 25651 } 25652 return false 25653 } 25654 func rewriteValuegeneric_OpXor8_10(v *Value) bool { 25655 b := v.Block 25656 _ = b 25657 // match: (Xor8 (Xor8 z i:(Const8 <t>)) x) 25658 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 25659 // result: (Xor8 i (Xor8 <t> z x)) 25660 for { 25661 _ = v.Args[1] 25662 v_0 := v.Args[0] 25663 if v_0.Op != OpXor8 { 25664 break 25665 } 25666 _ = v_0.Args[1] 25667 z := v_0.Args[0] 25668 i := v_0.Args[1] 25669 if i.Op != OpConst8 { 25670 break 25671 } 25672 t := i.Type 25673 x := v.Args[1] 25674 if !(z.Op != OpConst8 && x.Op != OpConst8) { 25675 break 25676 } 25677 v.reset(OpXor8) 25678 v.AddArg(i) 25679 v0 := b.NewValue0(v.Pos, OpXor8, t) 25680 v0.AddArg(z) 25681 v0.AddArg(x) 25682 v.AddArg(v0) 25683 return true 25684 } 25685 // match: (Xor8 x (Xor8 i:(Const8 <t>) z)) 25686 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 25687 // result: (Xor8 i (Xor8 <t> z x)) 25688 for { 25689 _ = v.Args[1] 25690 x := v.Args[0] 25691 v_1 := v.Args[1] 25692 if v_1.Op != OpXor8 { 25693 break 25694 } 25695 _ = v_1.Args[1] 25696 i := v_1.Args[0] 25697 if i.Op != OpConst8 { 25698 break 25699 } 25700 t := i.Type 25701 z := v_1.Args[1] 25702 if !(z.Op != OpConst8 && x.Op != OpConst8) { 25703 break 25704 } 25705 v.reset(OpXor8) 25706 v.AddArg(i) 25707 v0 := b.NewValue0(v.Pos, OpXor8, t) 25708 v0.AddArg(z) 25709 v0.AddArg(x) 25710 v.AddArg(v0) 25711 return true 25712 } 25713 // match: (Xor8 x (Xor8 z i:(Const8 <t>))) 25714 // cond: (z.Op != OpConst8 && x.Op != OpConst8) 25715 // result: (Xor8 i (Xor8 <t> z x)) 25716 for { 25717 _ = v.Args[1] 25718 x := v.Args[0] 25719 v_1 := v.Args[1] 25720 if v_1.Op != OpXor8 { 25721 break 25722 } 25723 _ = v_1.Args[1] 25724 z := v_1.Args[0] 25725 i := v_1.Args[1] 25726 if i.Op != OpConst8 { 25727 break 25728 } 25729 t := i.Type 25730 if !(z.Op != OpConst8 && x.Op != OpConst8) { 25731 break 25732 } 25733 v.reset(OpXor8) 25734 v.AddArg(i) 25735 v0 := b.NewValue0(v.Pos, OpXor8, t) 25736 v0.AddArg(z) 25737 v0.AddArg(x) 25738 v.AddArg(v0) 25739 return true 25740 } 25741 // match: (Xor8 (Const8 <t> [c]) (Xor8 (Const8 <t> [d]) x)) 25742 // cond: 25743 // result: (Xor8 (Const8 <t> [int64(int8(c^d))]) x) 25744 for { 25745 _ = v.Args[1] 25746 v_0 := v.Args[0] 25747 if v_0.Op != OpConst8 { 25748 break 25749 } 25750 t := v_0.Type 25751 c := v_0.AuxInt 25752 v_1 := v.Args[1] 25753 if v_1.Op != OpXor8 { 25754 break 25755 } 25756 _ = v_1.Args[1] 25757 v_1_0 := v_1.Args[0] 25758 if v_1_0.Op != OpConst8 { 25759 break 25760 } 25761 if v_1_0.Type != t { 25762 break 25763 } 25764 d := v_1_0.AuxInt 25765 x := v_1.Args[1] 25766 v.reset(OpXor8) 25767 v0 := b.NewValue0(v.Pos, OpConst8, t) 25768 v0.AuxInt = int64(int8(c ^ d)) 25769 v.AddArg(v0) 25770 v.AddArg(x) 25771 return true 25772 } 25773 // match: (Xor8 (Const8 <t> [c]) (Xor8 x (Const8 <t> [d]))) 25774 // cond: 25775 // result: (Xor8 (Const8 <t> [int64(int8(c^d))]) x) 25776 for { 25777 _ = v.Args[1] 25778 v_0 := v.Args[0] 25779 if v_0.Op != OpConst8 { 25780 break 25781 } 25782 t := v_0.Type 25783 c := v_0.AuxInt 25784 v_1 := v.Args[1] 25785 if v_1.Op != OpXor8 { 25786 break 25787 } 25788 _ = v_1.Args[1] 25789 x := v_1.Args[0] 25790 v_1_1 := v_1.Args[1] 25791 if v_1_1.Op != OpConst8 { 25792 break 25793 } 25794 if v_1_1.Type != t { 25795 break 25796 } 25797 d := v_1_1.AuxInt 25798 v.reset(OpXor8) 25799 v0 := b.NewValue0(v.Pos, OpConst8, t) 25800 v0.AuxInt = int64(int8(c ^ d)) 25801 v.AddArg(v0) 25802 v.AddArg(x) 25803 return true 25804 } 25805 // match: (Xor8 (Xor8 (Const8 <t> [d]) x) (Const8 <t> [c])) 25806 // cond: 25807 // result: (Xor8 (Const8 <t> [int64(int8(c^d))]) x) 25808 for { 25809 _ = v.Args[1] 25810 v_0 := v.Args[0] 25811 if v_0.Op != OpXor8 { 25812 break 25813 } 25814 _ = v_0.Args[1] 25815 v_0_0 := v_0.Args[0] 25816 if v_0_0.Op != OpConst8 { 25817 break 25818 } 25819 t := v_0_0.Type 25820 d := v_0_0.AuxInt 25821 x := v_0.Args[1] 25822 v_1 := v.Args[1] 25823 if v_1.Op != OpConst8 { 25824 break 25825 } 25826 if v_1.Type != t { 25827 break 25828 } 25829 c := v_1.AuxInt 25830 v.reset(OpXor8) 25831 v0 := b.NewValue0(v.Pos, OpConst8, t) 25832 v0.AuxInt = int64(int8(c ^ d)) 25833 v.AddArg(v0) 25834 v.AddArg(x) 25835 return true 25836 } 25837 // match: (Xor8 (Xor8 x (Const8 <t> [d])) (Const8 <t> [c])) 25838 // cond: 25839 // result: (Xor8 (Const8 <t> [int64(int8(c^d))]) x) 25840 for { 25841 _ = v.Args[1] 25842 v_0 := v.Args[0] 25843 if v_0.Op != OpXor8 { 25844 break 25845 } 25846 _ = v_0.Args[1] 25847 x := v_0.Args[0] 25848 v_0_1 := v_0.Args[1] 25849 if v_0_1.Op != OpConst8 { 25850 break 25851 } 25852 t := v_0_1.Type 25853 d := v_0_1.AuxInt 25854 v_1 := v.Args[1] 25855 if v_1.Op != OpConst8 { 25856 break 25857 } 25858 if v_1.Type != t { 25859 break 25860 } 25861 c := v_1.AuxInt 25862 v.reset(OpXor8) 25863 v0 := b.NewValue0(v.Pos, OpConst8, t) 25864 v0.AuxInt = int64(int8(c ^ d)) 25865 v.AddArg(v0) 25866 v.AddArg(x) 25867 return true 25868 } 25869 return false 25870 } 25871 func rewriteValuegeneric_OpZero_0(v *Value) bool { 25872 b := v.Block 25873 _ = b 25874 config := b.Func.Config 25875 _ = config 25876 // match: (Zero (Load (OffPtr [c] (SP)) mem) mem) 25877 // cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize 25878 // result: mem 25879 for { 25880 _ = v.Args[1] 25881 v_0 := v.Args[0] 25882 if v_0.Op != OpLoad { 25883 break 25884 } 25885 _ = v_0.Args[1] 25886 v_0_0 := v_0.Args[0] 25887 if v_0_0.Op != OpOffPtr { 25888 break 25889 } 25890 c := v_0_0.AuxInt 25891 v_0_0_0 := v_0_0.Args[0] 25892 if v_0_0_0.Op != OpSP { 25893 break 25894 } 25895 mem := v_0.Args[1] 25896 if mem != v.Args[1] { 25897 break 25898 } 25899 if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize) { 25900 break 25901 } 25902 v.reset(OpCopy) 25903 v.Type = mem.Type 25904 v.AddArg(mem) 25905 return true 25906 } 25907 return false 25908 } 25909 func rewriteValuegeneric_OpZeroExt16to32_0(v *Value) bool { 25910 // match: (ZeroExt16to32 (Const16 [c])) 25911 // cond: 25912 // result: (Const32 [int64(uint16(c))]) 25913 for { 25914 v_0 := v.Args[0] 25915 if v_0.Op != OpConst16 { 25916 break 25917 } 25918 c := v_0.AuxInt 25919 v.reset(OpConst32) 25920 v.AuxInt = int64(uint16(c)) 25921 return true 25922 } 25923 // match: (ZeroExt16to32 (Trunc32to16 x:(Rsh32Ux64 _ (Const64 [s])))) 25924 // cond: s >= 16 25925 // result: x 25926 for { 25927 v_0 := v.Args[0] 25928 if v_0.Op != OpTrunc32to16 { 25929 break 25930 } 25931 x := v_0.Args[0] 25932 if x.Op != OpRsh32Ux64 { 25933 break 25934 } 25935 _ = x.Args[1] 25936 x_1 := x.Args[1] 25937 if x_1.Op != OpConst64 { 25938 break 25939 } 25940 s := x_1.AuxInt 25941 if !(s >= 16) { 25942 break 25943 } 25944 v.reset(OpCopy) 25945 v.Type = x.Type 25946 v.AddArg(x) 25947 return true 25948 } 25949 return false 25950 } 25951 func rewriteValuegeneric_OpZeroExt16to64_0(v *Value) bool { 25952 // match: (ZeroExt16to64 (Const16 [c])) 25953 // cond: 25954 // result: (Const64 [int64(uint16(c))]) 25955 for { 25956 v_0 := v.Args[0] 25957 if v_0.Op != OpConst16 { 25958 break 25959 } 25960 c := v_0.AuxInt 25961 v.reset(OpConst64) 25962 v.AuxInt = int64(uint16(c)) 25963 return true 25964 } 25965 // match: (ZeroExt16to64 (Trunc64to16 x:(Rsh64Ux64 _ (Const64 [s])))) 25966 // cond: s >= 48 25967 // result: x 25968 for { 25969 v_0 := v.Args[0] 25970 if v_0.Op != OpTrunc64to16 { 25971 break 25972 } 25973 x := v_0.Args[0] 25974 if x.Op != OpRsh64Ux64 { 25975 break 25976 } 25977 _ = x.Args[1] 25978 x_1 := x.Args[1] 25979 if x_1.Op != OpConst64 { 25980 break 25981 } 25982 s := x_1.AuxInt 25983 if !(s >= 48) { 25984 break 25985 } 25986 v.reset(OpCopy) 25987 v.Type = x.Type 25988 v.AddArg(x) 25989 return true 25990 } 25991 return false 25992 } 25993 func rewriteValuegeneric_OpZeroExt32to64_0(v *Value) bool { 25994 // match: (ZeroExt32to64 (Const32 [c])) 25995 // cond: 25996 // result: (Const64 [int64(uint32(c))]) 25997 for { 25998 v_0 := v.Args[0] 25999 if v_0.Op != OpConst32 { 26000 break 26001 } 26002 c := v_0.AuxInt 26003 v.reset(OpConst64) 26004 v.AuxInt = int64(uint32(c)) 26005 return true 26006 } 26007 // match: (ZeroExt32to64 (Trunc64to32 x:(Rsh64Ux64 _ (Const64 [s])))) 26008 // cond: s >= 32 26009 // result: x 26010 for { 26011 v_0 := v.Args[0] 26012 if v_0.Op != OpTrunc64to32 { 26013 break 26014 } 26015 x := v_0.Args[0] 26016 if x.Op != OpRsh64Ux64 { 26017 break 26018 } 26019 _ = x.Args[1] 26020 x_1 := x.Args[1] 26021 if x_1.Op != OpConst64 { 26022 break 26023 } 26024 s := x_1.AuxInt 26025 if !(s >= 32) { 26026 break 26027 } 26028 v.reset(OpCopy) 26029 v.Type = x.Type 26030 v.AddArg(x) 26031 return true 26032 } 26033 return false 26034 } 26035 func rewriteValuegeneric_OpZeroExt8to16_0(v *Value) bool { 26036 // match: (ZeroExt8to16 (Const8 [c])) 26037 // cond: 26038 // result: (Const16 [int64( uint8(c))]) 26039 for { 26040 v_0 := v.Args[0] 26041 if v_0.Op != OpConst8 { 26042 break 26043 } 26044 c := v_0.AuxInt 26045 v.reset(OpConst16) 26046 v.AuxInt = int64(uint8(c)) 26047 return true 26048 } 26049 // match: (ZeroExt8to16 (Trunc16to8 x:(Rsh16Ux64 _ (Const64 [s])))) 26050 // cond: s >= 8 26051 // result: x 26052 for { 26053 v_0 := v.Args[0] 26054 if v_0.Op != OpTrunc16to8 { 26055 break 26056 } 26057 x := v_0.Args[0] 26058 if x.Op != OpRsh16Ux64 { 26059 break 26060 } 26061 _ = x.Args[1] 26062 x_1 := x.Args[1] 26063 if x_1.Op != OpConst64 { 26064 break 26065 } 26066 s := x_1.AuxInt 26067 if !(s >= 8) { 26068 break 26069 } 26070 v.reset(OpCopy) 26071 v.Type = x.Type 26072 v.AddArg(x) 26073 return true 26074 } 26075 return false 26076 } 26077 func rewriteValuegeneric_OpZeroExt8to32_0(v *Value) bool { 26078 // match: (ZeroExt8to32 (Const8 [c])) 26079 // cond: 26080 // result: (Const32 [int64( uint8(c))]) 26081 for { 26082 v_0 := v.Args[0] 26083 if v_0.Op != OpConst8 { 26084 break 26085 } 26086 c := v_0.AuxInt 26087 v.reset(OpConst32) 26088 v.AuxInt = int64(uint8(c)) 26089 return true 26090 } 26091 // match: (ZeroExt8to32 (Trunc32to8 x:(Rsh32Ux64 _ (Const64 [s])))) 26092 // cond: s >= 24 26093 // result: x 26094 for { 26095 v_0 := v.Args[0] 26096 if v_0.Op != OpTrunc32to8 { 26097 break 26098 } 26099 x := v_0.Args[0] 26100 if x.Op != OpRsh32Ux64 { 26101 break 26102 } 26103 _ = x.Args[1] 26104 x_1 := x.Args[1] 26105 if x_1.Op != OpConst64 { 26106 break 26107 } 26108 s := x_1.AuxInt 26109 if !(s >= 24) { 26110 break 26111 } 26112 v.reset(OpCopy) 26113 v.Type = x.Type 26114 v.AddArg(x) 26115 return true 26116 } 26117 return false 26118 } 26119 func rewriteValuegeneric_OpZeroExt8to64_0(v *Value) bool { 26120 // match: (ZeroExt8to64 (Const8 [c])) 26121 // cond: 26122 // result: (Const64 [int64( uint8(c))]) 26123 for { 26124 v_0 := v.Args[0] 26125 if v_0.Op != OpConst8 { 26126 break 26127 } 26128 c := v_0.AuxInt 26129 v.reset(OpConst64) 26130 v.AuxInt = int64(uint8(c)) 26131 return true 26132 } 26133 // match: (ZeroExt8to64 (Trunc64to8 x:(Rsh64Ux64 _ (Const64 [s])))) 26134 // cond: s >= 56 26135 // result: x 26136 for { 26137 v_0 := v.Args[0] 26138 if v_0.Op != OpTrunc64to8 { 26139 break 26140 } 26141 x := v_0.Args[0] 26142 if x.Op != OpRsh64Ux64 { 26143 break 26144 } 26145 _ = x.Args[1] 26146 x_1 := x.Args[1] 26147 if x_1.Op != OpConst64 { 26148 break 26149 } 26150 s := x_1.AuxInt 26151 if !(s >= 56) { 26152 break 26153 } 26154 v.reset(OpCopy) 26155 v.Type = x.Type 26156 v.AddArg(x) 26157 return true 26158 } 26159 return false 26160 } 26161 func rewriteBlockgeneric(b *Block) bool { 26162 config := b.Func.Config 26163 _ = config 26164 fe := b.Func.fe 26165 _ = fe 26166 typ := &config.Types 26167 _ = typ 26168 switch b.Kind { 26169 case BlockIf: 26170 // match: (If (Not cond) yes no) 26171 // cond: 26172 // result: (If cond no yes) 26173 for { 26174 v := b.Control 26175 if v.Op != OpNot { 26176 break 26177 } 26178 cond := v.Args[0] 26179 b.Kind = BlockIf 26180 b.SetControl(cond) 26181 b.swapSuccessors() 26182 return true 26183 } 26184 // match: (If (ConstBool [c]) yes no) 26185 // cond: c == 1 26186 // result: (First nil yes no) 26187 for { 26188 v := b.Control 26189 if v.Op != OpConstBool { 26190 break 26191 } 26192 c := v.AuxInt 26193 if !(c == 1) { 26194 break 26195 } 26196 b.Kind = BlockFirst 26197 b.SetControl(nil) 26198 return true 26199 } 26200 // match: (If (ConstBool [c]) yes no) 26201 // cond: c == 0 26202 // result: (First nil no yes) 26203 for { 26204 v := b.Control 26205 if v.Op != OpConstBool { 26206 break 26207 } 26208 c := v.AuxInt 26209 if !(c == 0) { 26210 break 26211 } 26212 b.Kind = BlockFirst 26213 b.SetControl(nil) 26214 b.swapSuccessors() 26215 return true 26216 } 26217 } 26218 return false 26219 }