github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/ssa/rewritegeneric.go (about)

     1  // Code generated from _gen/generic.rules; DO NOT EDIT.
     2  // generated with: cd _gen; go run .
     3  
     4  package ssa
     5  
     6  import "math"
     7  import "github.com/bir3/gocompiler/src/cmd/compile/internal/types"
     8  
     9  func rewriteValuegeneric(v *Value) bool {
    10  	switch v.Op {
    11  	case OpAdd16:
    12  		return rewriteValuegeneric_OpAdd16(v)
    13  	case OpAdd32:
    14  		return rewriteValuegeneric_OpAdd32(v)
    15  	case OpAdd32F:
    16  		return rewriteValuegeneric_OpAdd32F(v)
    17  	case OpAdd64:
    18  		return rewriteValuegeneric_OpAdd64(v)
    19  	case OpAdd64F:
    20  		return rewriteValuegeneric_OpAdd64F(v)
    21  	case OpAdd8:
    22  		return rewriteValuegeneric_OpAdd8(v)
    23  	case OpAddPtr:
    24  		return rewriteValuegeneric_OpAddPtr(v)
    25  	case OpAnd16:
    26  		return rewriteValuegeneric_OpAnd16(v)
    27  	case OpAnd32:
    28  		return rewriteValuegeneric_OpAnd32(v)
    29  	case OpAnd64:
    30  		return rewriteValuegeneric_OpAnd64(v)
    31  	case OpAnd8:
    32  		return rewriteValuegeneric_OpAnd8(v)
    33  	case OpAndB:
    34  		return rewriteValuegeneric_OpAndB(v)
    35  	case OpArraySelect:
    36  		return rewriteValuegeneric_OpArraySelect(v)
    37  	case OpCeil:
    38  		return rewriteValuegeneric_OpCeil(v)
    39  	case OpCom16:
    40  		return rewriteValuegeneric_OpCom16(v)
    41  	case OpCom32:
    42  		return rewriteValuegeneric_OpCom32(v)
    43  	case OpCom64:
    44  		return rewriteValuegeneric_OpCom64(v)
    45  	case OpCom8:
    46  		return rewriteValuegeneric_OpCom8(v)
    47  	case OpConstInterface:
    48  		return rewriteValuegeneric_OpConstInterface(v)
    49  	case OpConstSlice:
    50  		return rewriteValuegeneric_OpConstSlice(v)
    51  	case OpConstString:
    52  		return rewriteValuegeneric_OpConstString(v)
    53  	case OpConvert:
    54  		return rewriteValuegeneric_OpConvert(v)
    55  	case OpCtz16:
    56  		return rewriteValuegeneric_OpCtz16(v)
    57  	case OpCtz32:
    58  		return rewriteValuegeneric_OpCtz32(v)
    59  	case OpCtz64:
    60  		return rewriteValuegeneric_OpCtz64(v)
    61  	case OpCtz8:
    62  		return rewriteValuegeneric_OpCtz8(v)
    63  	case OpCvt32Fto32:
    64  		return rewriteValuegeneric_OpCvt32Fto32(v)
    65  	case OpCvt32Fto64:
    66  		return rewriteValuegeneric_OpCvt32Fto64(v)
    67  	case OpCvt32Fto64F:
    68  		return rewriteValuegeneric_OpCvt32Fto64F(v)
    69  	case OpCvt32to32F:
    70  		return rewriteValuegeneric_OpCvt32to32F(v)
    71  	case OpCvt32to64F:
    72  		return rewriteValuegeneric_OpCvt32to64F(v)
    73  	case OpCvt64Fto32:
    74  		return rewriteValuegeneric_OpCvt64Fto32(v)
    75  	case OpCvt64Fto32F:
    76  		return rewriteValuegeneric_OpCvt64Fto32F(v)
    77  	case OpCvt64Fto64:
    78  		return rewriteValuegeneric_OpCvt64Fto64(v)
    79  	case OpCvt64to32F:
    80  		return rewriteValuegeneric_OpCvt64to32F(v)
    81  	case OpCvt64to64F:
    82  		return rewriteValuegeneric_OpCvt64to64F(v)
    83  	case OpCvtBoolToUint8:
    84  		return rewriteValuegeneric_OpCvtBoolToUint8(v)
    85  	case OpDiv16:
    86  		return rewriteValuegeneric_OpDiv16(v)
    87  	case OpDiv16u:
    88  		return rewriteValuegeneric_OpDiv16u(v)
    89  	case OpDiv32:
    90  		return rewriteValuegeneric_OpDiv32(v)
    91  	case OpDiv32F:
    92  		return rewriteValuegeneric_OpDiv32F(v)
    93  	case OpDiv32u:
    94  		return rewriteValuegeneric_OpDiv32u(v)
    95  	case OpDiv64:
    96  		return rewriteValuegeneric_OpDiv64(v)
    97  	case OpDiv64F:
    98  		return rewriteValuegeneric_OpDiv64F(v)
    99  	case OpDiv64u:
   100  		return rewriteValuegeneric_OpDiv64u(v)
   101  	case OpDiv8:
   102  		return rewriteValuegeneric_OpDiv8(v)
   103  	case OpDiv8u:
   104  		return rewriteValuegeneric_OpDiv8u(v)
   105  	case OpEq16:
   106  		return rewriteValuegeneric_OpEq16(v)
   107  	case OpEq32:
   108  		return rewriteValuegeneric_OpEq32(v)
   109  	case OpEq32F:
   110  		return rewriteValuegeneric_OpEq32F(v)
   111  	case OpEq64:
   112  		return rewriteValuegeneric_OpEq64(v)
   113  	case OpEq64F:
   114  		return rewriteValuegeneric_OpEq64F(v)
   115  	case OpEq8:
   116  		return rewriteValuegeneric_OpEq8(v)
   117  	case OpEqB:
   118  		return rewriteValuegeneric_OpEqB(v)
   119  	case OpEqInter:
   120  		return rewriteValuegeneric_OpEqInter(v)
   121  	case OpEqPtr:
   122  		return rewriteValuegeneric_OpEqPtr(v)
   123  	case OpEqSlice:
   124  		return rewriteValuegeneric_OpEqSlice(v)
   125  	case OpFloor:
   126  		return rewriteValuegeneric_OpFloor(v)
   127  	case OpIMake:
   128  		return rewriteValuegeneric_OpIMake(v)
   129  	case OpInterLECall:
   130  		return rewriteValuegeneric_OpInterLECall(v)
   131  	case OpIsInBounds:
   132  		return rewriteValuegeneric_OpIsInBounds(v)
   133  	case OpIsNonNil:
   134  		return rewriteValuegeneric_OpIsNonNil(v)
   135  	case OpIsSliceInBounds:
   136  		return rewriteValuegeneric_OpIsSliceInBounds(v)
   137  	case OpLeq16:
   138  		return rewriteValuegeneric_OpLeq16(v)
   139  	case OpLeq16U:
   140  		return rewriteValuegeneric_OpLeq16U(v)
   141  	case OpLeq32:
   142  		return rewriteValuegeneric_OpLeq32(v)
   143  	case OpLeq32F:
   144  		return rewriteValuegeneric_OpLeq32F(v)
   145  	case OpLeq32U:
   146  		return rewriteValuegeneric_OpLeq32U(v)
   147  	case OpLeq64:
   148  		return rewriteValuegeneric_OpLeq64(v)
   149  	case OpLeq64F:
   150  		return rewriteValuegeneric_OpLeq64F(v)
   151  	case OpLeq64U:
   152  		return rewriteValuegeneric_OpLeq64U(v)
   153  	case OpLeq8:
   154  		return rewriteValuegeneric_OpLeq8(v)
   155  	case OpLeq8U:
   156  		return rewriteValuegeneric_OpLeq8U(v)
   157  	case OpLess16:
   158  		return rewriteValuegeneric_OpLess16(v)
   159  	case OpLess16U:
   160  		return rewriteValuegeneric_OpLess16U(v)
   161  	case OpLess32:
   162  		return rewriteValuegeneric_OpLess32(v)
   163  	case OpLess32F:
   164  		return rewriteValuegeneric_OpLess32F(v)
   165  	case OpLess32U:
   166  		return rewriteValuegeneric_OpLess32U(v)
   167  	case OpLess64:
   168  		return rewriteValuegeneric_OpLess64(v)
   169  	case OpLess64F:
   170  		return rewriteValuegeneric_OpLess64F(v)
   171  	case OpLess64U:
   172  		return rewriteValuegeneric_OpLess64U(v)
   173  	case OpLess8:
   174  		return rewriteValuegeneric_OpLess8(v)
   175  	case OpLess8U:
   176  		return rewriteValuegeneric_OpLess8U(v)
   177  	case OpLoad:
   178  		return rewriteValuegeneric_OpLoad(v)
   179  	case OpLsh16x16:
   180  		return rewriteValuegeneric_OpLsh16x16(v)
   181  	case OpLsh16x32:
   182  		return rewriteValuegeneric_OpLsh16x32(v)
   183  	case OpLsh16x64:
   184  		return rewriteValuegeneric_OpLsh16x64(v)
   185  	case OpLsh16x8:
   186  		return rewriteValuegeneric_OpLsh16x8(v)
   187  	case OpLsh32x16:
   188  		return rewriteValuegeneric_OpLsh32x16(v)
   189  	case OpLsh32x32:
   190  		return rewriteValuegeneric_OpLsh32x32(v)
   191  	case OpLsh32x64:
   192  		return rewriteValuegeneric_OpLsh32x64(v)
   193  	case OpLsh32x8:
   194  		return rewriteValuegeneric_OpLsh32x8(v)
   195  	case OpLsh64x16:
   196  		return rewriteValuegeneric_OpLsh64x16(v)
   197  	case OpLsh64x32:
   198  		return rewriteValuegeneric_OpLsh64x32(v)
   199  	case OpLsh64x64:
   200  		return rewriteValuegeneric_OpLsh64x64(v)
   201  	case OpLsh64x8:
   202  		return rewriteValuegeneric_OpLsh64x8(v)
   203  	case OpLsh8x16:
   204  		return rewriteValuegeneric_OpLsh8x16(v)
   205  	case OpLsh8x32:
   206  		return rewriteValuegeneric_OpLsh8x32(v)
   207  	case OpLsh8x64:
   208  		return rewriteValuegeneric_OpLsh8x64(v)
   209  	case OpLsh8x8:
   210  		return rewriteValuegeneric_OpLsh8x8(v)
   211  	case OpMod16:
   212  		return rewriteValuegeneric_OpMod16(v)
   213  	case OpMod16u:
   214  		return rewriteValuegeneric_OpMod16u(v)
   215  	case OpMod32:
   216  		return rewriteValuegeneric_OpMod32(v)
   217  	case OpMod32u:
   218  		return rewriteValuegeneric_OpMod32u(v)
   219  	case OpMod64:
   220  		return rewriteValuegeneric_OpMod64(v)
   221  	case OpMod64u:
   222  		return rewriteValuegeneric_OpMod64u(v)
   223  	case OpMod8:
   224  		return rewriteValuegeneric_OpMod8(v)
   225  	case OpMod8u:
   226  		return rewriteValuegeneric_OpMod8u(v)
   227  	case OpMove:
   228  		return rewriteValuegeneric_OpMove(v)
   229  	case OpMul16:
   230  		return rewriteValuegeneric_OpMul16(v)
   231  	case OpMul32:
   232  		return rewriteValuegeneric_OpMul32(v)
   233  	case OpMul32F:
   234  		return rewriteValuegeneric_OpMul32F(v)
   235  	case OpMul64:
   236  		return rewriteValuegeneric_OpMul64(v)
   237  	case OpMul64F:
   238  		return rewriteValuegeneric_OpMul64F(v)
   239  	case OpMul8:
   240  		return rewriteValuegeneric_OpMul8(v)
   241  	case OpNeg16:
   242  		return rewriteValuegeneric_OpNeg16(v)
   243  	case OpNeg32:
   244  		return rewriteValuegeneric_OpNeg32(v)
   245  	case OpNeg32F:
   246  		return rewriteValuegeneric_OpNeg32F(v)
   247  	case OpNeg64:
   248  		return rewriteValuegeneric_OpNeg64(v)
   249  	case OpNeg64F:
   250  		return rewriteValuegeneric_OpNeg64F(v)
   251  	case OpNeg8:
   252  		return rewriteValuegeneric_OpNeg8(v)
   253  	case OpNeq16:
   254  		return rewriteValuegeneric_OpNeq16(v)
   255  	case OpNeq32:
   256  		return rewriteValuegeneric_OpNeq32(v)
   257  	case OpNeq32F:
   258  		return rewriteValuegeneric_OpNeq32F(v)
   259  	case OpNeq64:
   260  		return rewriteValuegeneric_OpNeq64(v)
   261  	case OpNeq64F:
   262  		return rewriteValuegeneric_OpNeq64F(v)
   263  	case OpNeq8:
   264  		return rewriteValuegeneric_OpNeq8(v)
   265  	case OpNeqB:
   266  		return rewriteValuegeneric_OpNeqB(v)
   267  	case OpNeqInter:
   268  		return rewriteValuegeneric_OpNeqInter(v)
   269  	case OpNeqPtr:
   270  		return rewriteValuegeneric_OpNeqPtr(v)
   271  	case OpNeqSlice:
   272  		return rewriteValuegeneric_OpNeqSlice(v)
   273  	case OpNilCheck:
   274  		return rewriteValuegeneric_OpNilCheck(v)
   275  	case OpNot:
   276  		return rewriteValuegeneric_OpNot(v)
   277  	case OpOffPtr:
   278  		return rewriteValuegeneric_OpOffPtr(v)
   279  	case OpOr16:
   280  		return rewriteValuegeneric_OpOr16(v)
   281  	case OpOr32:
   282  		return rewriteValuegeneric_OpOr32(v)
   283  	case OpOr64:
   284  		return rewriteValuegeneric_OpOr64(v)
   285  	case OpOr8:
   286  		return rewriteValuegeneric_OpOr8(v)
   287  	case OpOrB:
   288  		return rewriteValuegeneric_OpOrB(v)
   289  	case OpPhi:
   290  		return rewriteValuegeneric_OpPhi(v)
   291  	case OpPtrIndex:
   292  		return rewriteValuegeneric_OpPtrIndex(v)
   293  	case OpRotateLeft16:
   294  		return rewriteValuegeneric_OpRotateLeft16(v)
   295  	case OpRotateLeft32:
   296  		return rewriteValuegeneric_OpRotateLeft32(v)
   297  	case OpRotateLeft64:
   298  		return rewriteValuegeneric_OpRotateLeft64(v)
   299  	case OpRotateLeft8:
   300  		return rewriteValuegeneric_OpRotateLeft8(v)
   301  	case OpRound32F:
   302  		return rewriteValuegeneric_OpRound32F(v)
   303  	case OpRound64F:
   304  		return rewriteValuegeneric_OpRound64F(v)
   305  	case OpRoundToEven:
   306  		return rewriteValuegeneric_OpRoundToEven(v)
   307  	case OpRsh16Ux16:
   308  		return rewriteValuegeneric_OpRsh16Ux16(v)
   309  	case OpRsh16Ux32:
   310  		return rewriteValuegeneric_OpRsh16Ux32(v)
   311  	case OpRsh16Ux64:
   312  		return rewriteValuegeneric_OpRsh16Ux64(v)
   313  	case OpRsh16Ux8:
   314  		return rewriteValuegeneric_OpRsh16Ux8(v)
   315  	case OpRsh16x16:
   316  		return rewriteValuegeneric_OpRsh16x16(v)
   317  	case OpRsh16x32:
   318  		return rewriteValuegeneric_OpRsh16x32(v)
   319  	case OpRsh16x64:
   320  		return rewriteValuegeneric_OpRsh16x64(v)
   321  	case OpRsh16x8:
   322  		return rewriteValuegeneric_OpRsh16x8(v)
   323  	case OpRsh32Ux16:
   324  		return rewriteValuegeneric_OpRsh32Ux16(v)
   325  	case OpRsh32Ux32:
   326  		return rewriteValuegeneric_OpRsh32Ux32(v)
   327  	case OpRsh32Ux64:
   328  		return rewriteValuegeneric_OpRsh32Ux64(v)
   329  	case OpRsh32Ux8:
   330  		return rewriteValuegeneric_OpRsh32Ux8(v)
   331  	case OpRsh32x16:
   332  		return rewriteValuegeneric_OpRsh32x16(v)
   333  	case OpRsh32x32:
   334  		return rewriteValuegeneric_OpRsh32x32(v)
   335  	case OpRsh32x64:
   336  		return rewriteValuegeneric_OpRsh32x64(v)
   337  	case OpRsh32x8:
   338  		return rewriteValuegeneric_OpRsh32x8(v)
   339  	case OpRsh64Ux16:
   340  		return rewriteValuegeneric_OpRsh64Ux16(v)
   341  	case OpRsh64Ux32:
   342  		return rewriteValuegeneric_OpRsh64Ux32(v)
   343  	case OpRsh64Ux64:
   344  		return rewriteValuegeneric_OpRsh64Ux64(v)
   345  	case OpRsh64Ux8:
   346  		return rewriteValuegeneric_OpRsh64Ux8(v)
   347  	case OpRsh64x16:
   348  		return rewriteValuegeneric_OpRsh64x16(v)
   349  	case OpRsh64x32:
   350  		return rewriteValuegeneric_OpRsh64x32(v)
   351  	case OpRsh64x64:
   352  		return rewriteValuegeneric_OpRsh64x64(v)
   353  	case OpRsh64x8:
   354  		return rewriteValuegeneric_OpRsh64x8(v)
   355  	case OpRsh8Ux16:
   356  		return rewriteValuegeneric_OpRsh8Ux16(v)
   357  	case OpRsh8Ux32:
   358  		return rewriteValuegeneric_OpRsh8Ux32(v)
   359  	case OpRsh8Ux64:
   360  		return rewriteValuegeneric_OpRsh8Ux64(v)
   361  	case OpRsh8Ux8:
   362  		return rewriteValuegeneric_OpRsh8Ux8(v)
   363  	case OpRsh8x16:
   364  		return rewriteValuegeneric_OpRsh8x16(v)
   365  	case OpRsh8x32:
   366  		return rewriteValuegeneric_OpRsh8x32(v)
   367  	case OpRsh8x64:
   368  		return rewriteValuegeneric_OpRsh8x64(v)
   369  	case OpRsh8x8:
   370  		return rewriteValuegeneric_OpRsh8x8(v)
   371  	case OpSelect0:
   372  		return rewriteValuegeneric_OpSelect0(v)
   373  	case OpSelect1:
   374  		return rewriteValuegeneric_OpSelect1(v)
   375  	case OpSelectN:
   376  		return rewriteValuegeneric_OpSelectN(v)
   377  	case OpSignExt16to32:
   378  		return rewriteValuegeneric_OpSignExt16to32(v)
   379  	case OpSignExt16to64:
   380  		return rewriteValuegeneric_OpSignExt16to64(v)
   381  	case OpSignExt32to64:
   382  		return rewriteValuegeneric_OpSignExt32to64(v)
   383  	case OpSignExt8to16:
   384  		return rewriteValuegeneric_OpSignExt8to16(v)
   385  	case OpSignExt8to32:
   386  		return rewriteValuegeneric_OpSignExt8to32(v)
   387  	case OpSignExt8to64:
   388  		return rewriteValuegeneric_OpSignExt8to64(v)
   389  	case OpSliceCap:
   390  		return rewriteValuegeneric_OpSliceCap(v)
   391  	case OpSliceLen:
   392  		return rewriteValuegeneric_OpSliceLen(v)
   393  	case OpSlicePtr:
   394  		return rewriteValuegeneric_OpSlicePtr(v)
   395  	case OpSlicemask:
   396  		return rewriteValuegeneric_OpSlicemask(v)
   397  	case OpSqrt:
   398  		return rewriteValuegeneric_OpSqrt(v)
   399  	case OpStaticLECall:
   400  		return rewriteValuegeneric_OpStaticLECall(v)
   401  	case OpStore:
   402  		return rewriteValuegeneric_OpStore(v)
   403  	case OpStringLen:
   404  		return rewriteValuegeneric_OpStringLen(v)
   405  	case OpStringPtr:
   406  		return rewriteValuegeneric_OpStringPtr(v)
   407  	case OpStructSelect:
   408  		return rewriteValuegeneric_OpStructSelect(v)
   409  	case OpSub16:
   410  		return rewriteValuegeneric_OpSub16(v)
   411  	case OpSub32:
   412  		return rewriteValuegeneric_OpSub32(v)
   413  	case OpSub32F:
   414  		return rewriteValuegeneric_OpSub32F(v)
   415  	case OpSub64:
   416  		return rewriteValuegeneric_OpSub64(v)
   417  	case OpSub64F:
   418  		return rewriteValuegeneric_OpSub64F(v)
   419  	case OpSub8:
   420  		return rewriteValuegeneric_OpSub8(v)
   421  	case OpTrunc:
   422  		return rewriteValuegeneric_OpTrunc(v)
   423  	case OpTrunc16to8:
   424  		return rewriteValuegeneric_OpTrunc16to8(v)
   425  	case OpTrunc32to16:
   426  		return rewriteValuegeneric_OpTrunc32to16(v)
   427  	case OpTrunc32to8:
   428  		return rewriteValuegeneric_OpTrunc32to8(v)
   429  	case OpTrunc64to16:
   430  		return rewriteValuegeneric_OpTrunc64to16(v)
   431  	case OpTrunc64to32:
   432  		return rewriteValuegeneric_OpTrunc64to32(v)
   433  	case OpTrunc64to8:
   434  		return rewriteValuegeneric_OpTrunc64to8(v)
   435  	case OpXor16:
   436  		return rewriteValuegeneric_OpXor16(v)
   437  	case OpXor32:
   438  		return rewriteValuegeneric_OpXor32(v)
   439  	case OpXor64:
   440  		return rewriteValuegeneric_OpXor64(v)
   441  	case OpXor8:
   442  		return rewriteValuegeneric_OpXor8(v)
   443  	case OpZero:
   444  		return rewriteValuegeneric_OpZero(v)
   445  	case OpZeroExt16to32:
   446  		return rewriteValuegeneric_OpZeroExt16to32(v)
   447  	case OpZeroExt16to64:
   448  		return rewriteValuegeneric_OpZeroExt16to64(v)
   449  	case OpZeroExt32to64:
   450  		return rewriteValuegeneric_OpZeroExt32to64(v)
   451  	case OpZeroExt8to16:
   452  		return rewriteValuegeneric_OpZeroExt8to16(v)
   453  	case OpZeroExt8to32:
   454  		return rewriteValuegeneric_OpZeroExt8to32(v)
   455  	case OpZeroExt8to64:
   456  		return rewriteValuegeneric_OpZeroExt8to64(v)
   457  	}
   458  	return false
   459  }
   460  func rewriteValuegeneric_OpAdd16(v *Value) bool {
   461  	v_1 := v.Args[1]
   462  	v_0 := v.Args[0]
   463  	b := v.Block
   464  	config := b.Func.Config
   465  	// match: (Add16 (Const16 [c]) (Const16 [d]))
   466  	// result: (Const16 [c+d])
   467  	for {
   468  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   469  			if v_0.Op != OpConst16 {
   470  				continue
   471  			}
   472  			c := auxIntToInt16(v_0.AuxInt)
   473  			if v_1.Op != OpConst16 {
   474  				continue
   475  			}
   476  			d := auxIntToInt16(v_1.AuxInt)
   477  			v.reset(OpConst16)
   478  			v.AuxInt = int16ToAuxInt(c + d)
   479  			return true
   480  		}
   481  		break
   482  	}
   483  	// match: (Add16 <t> (Mul16 x y) (Mul16 x z))
   484  	// result: (Mul16 x (Add16 <t> y z))
   485  	for {
   486  		t := v.Type
   487  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   488  			if v_0.Op != OpMul16 {
   489  				continue
   490  			}
   491  			_ = v_0.Args[1]
   492  			v_0_0 := v_0.Args[0]
   493  			v_0_1 := v_0.Args[1]
   494  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
   495  				x := v_0_0
   496  				y := v_0_1
   497  				if v_1.Op != OpMul16 {
   498  					continue
   499  				}
   500  				_ = v_1.Args[1]
   501  				v_1_0 := v_1.Args[0]
   502  				v_1_1 := v_1.Args[1]
   503  				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
   504  					if x != v_1_0 {
   505  						continue
   506  					}
   507  					z := v_1_1
   508  					v.reset(OpMul16)
   509  					v0 := b.NewValue0(v.Pos, OpAdd16, t)
   510  					v0.AddArg2(y, z)
   511  					v.AddArg2(x, v0)
   512  					return true
   513  				}
   514  			}
   515  		}
   516  		break
   517  	}
   518  	// match: (Add16 (Const16 [0]) x)
   519  	// result: x
   520  	for {
   521  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   522  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
   523  				continue
   524  			}
   525  			x := v_1
   526  			v.copyOf(x)
   527  			return true
   528  		}
   529  		break
   530  	}
   531  	// match: (Add16 x (Neg16 y))
   532  	// result: (Sub16 x y)
   533  	for {
   534  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   535  			x := v_0
   536  			if v_1.Op != OpNeg16 {
   537  				continue
   538  			}
   539  			y := v_1.Args[0]
   540  			v.reset(OpSub16)
   541  			v.AddArg2(x, y)
   542  			return true
   543  		}
   544  		break
   545  	}
   546  	// match: (Add16 (Com16 x) x)
   547  	// result: (Const16 [-1])
   548  	for {
   549  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   550  			if v_0.Op != OpCom16 {
   551  				continue
   552  			}
   553  			x := v_0.Args[0]
   554  			if x != v_1 {
   555  				continue
   556  			}
   557  			v.reset(OpConst16)
   558  			v.AuxInt = int16ToAuxInt(-1)
   559  			return true
   560  		}
   561  		break
   562  	}
   563  	// match: (Add16 (Const16 [1]) (Com16 x))
   564  	// result: (Neg16 x)
   565  	for {
   566  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   567  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 1 || v_1.Op != OpCom16 {
   568  				continue
   569  			}
   570  			x := v_1.Args[0]
   571  			v.reset(OpNeg16)
   572  			v.AddArg(x)
   573  			return true
   574  		}
   575  		break
   576  	}
   577  	// match: (Add16 x (Sub16 y x))
   578  	// result: y
   579  	for {
   580  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   581  			x := v_0
   582  			if v_1.Op != OpSub16 {
   583  				continue
   584  			}
   585  			_ = v_1.Args[1]
   586  			y := v_1.Args[0]
   587  			if x != v_1.Args[1] {
   588  				continue
   589  			}
   590  			v.copyOf(y)
   591  			return true
   592  		}
   593  		break
   594  	}
   595  	// match: (Add16 x (Add16 y (Sub16 z x)))
   596  	// result: (Add16 y z)
   597  	for {
   598  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   599  			x := v_0
   600  			if v_1.Op != OpAdd16 {
   601  				continue
   602  			}
   603  			_ = v_1.Args[1]
   604  			v_1_0 := v_1.Args[0]
   605  			v_1_1 := v_1.Args[1]
   606  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
   607  				y := v_1_0
   608  				if v_1_1.Op != OpSub16 {
   609  					continue
   610  				}
   611  				_ = v_1_1.Args[1]
   612  				z := v_1_1.Args[0]
   613  				if x != v_1_1.Args[1] {
   614  					continue
   615  				}
   616  				v.reset(OpAdd16)
   617  				v.AddArg2(y, z)
   618  				return true
   619  			}
   620  		}
   621  		break
   622  	}
   623  	// match: (Add16 (Add16 i:(Const16 <t>) z) x)
   624  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
   625  	// result: (Add16 i (Add16 <t> z x))
   626  	for {
   627  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   628  			if v_0.Op != OpAdd16 {
   629  				continue
   630  			}
   631  			_ = v_0.Args[1]
   632  			v_0_0 := v_0.Args[0]
   633  			v_0_1 := v_0.Args[1]
   634  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
   635  				i := v_0_0
   636  				if i.Op != OpConst16 {
   637  					continue
   638  				}
   639  				t := i.Type
   640  				z := v_0_1
   641  				x := v_1
   642  				if !(z.Op != OpConst16 && x.Op != OpConst16) {
   643  					continue
   644  				}
   645  				v.reset(OpAdd16)
   646  				v0 := b.NewValue0(v.Pos, OpAdd16, t)
   647  				v0.AddArg2(z, x)
   648  				v.AddArg2(i, v0)
   649  				return true
   650  			}
   651  		}
   652  		break
   653  	}
   654  	// match: (Add16 (Sub16 i:(Const16 <t>) z) x)
   655  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
   656  	// result: (Add16 i (Sub16 <t> x z))
   657  	for {
   658  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   659  			if v_0.Op != OpSub16 {
   660  				continue
   661  			}
   662  			z := v_0.Args[1]
   663  			i := v_0.Args[0]
   664  			if i.Op != OpConst16 {
   665  				continue
   666  			}
   667  			t := i.Type
   668  			x := v_1
   669  			if !(z.Op != OpConst16 && x.Op != OpConst16) {
   670  				continue
   671  			}
   672  			v.reset(OpAdd16)
   673  			v0 := b.NewValue0(v.Pos, OpSub16, t)
   674  			v0.AddArg2(x, z)
   675  			v.AddArg2(i, v0)
   676  			return true
   677  		}
   678  		break
   679  	}
   680  	// match: (Add16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
   681  	// result: (Add16 (Const16 <t> [c+d]) x)
   682  	for {
   683  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   684  			if v_0.Op != OpConst16 {
   685  				continue
   686  			}
   687  			t := v_0.Type
   688  			c := auxIntToInt16(v_0.AuxInt)
   689  			if v_1.Op != OpAdd16 {
   690  				continue
   691  			}
   692  			_ = v_1.Args[1]
   693  			v_1_0 := v_1.Args[0]
   694  			v_1_1 := v_1.Args[1]
   695  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
   696  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
   697  					continue
   698  				}
   699  				d := auxIntToInt16(v_1_0.AuxInt)
   700  				x := v_1_1
   701  				v.reset(OpAdd16)
   702  				v0 := b.NewValue0(v.Pos, OpConst16, t)
   703  				v0.AuxInt = int16ToAuxInt(c + d)
   704  				v.AddArg2(v0, x)
   705  				return true
   706  			}
   707  		}
   708  		break
   709  	}
   710  	// match: (Add16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x))
   711  	// result: (Sub16 (Const16 <t> [c+d]) x)
   712  	for {
   713  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   714  			if v_0.Op != OpConst16 {
   715  				continue
   716  			}
   717  			t := v_0.Type
   718  			c := auxIntToInt16(v_0.AuxInt)
   719  			if v_1.Op != OpSub16 {
   720  				continue
   721  			}
   722  			x := v_1.Args[1]
   723  			v_1_0 := v_1.Args[0]
   724  			if v_1_0.Op != OpConst16 || v_1_0.Type != t {
   725  				continue
   726  			}
   727  			d := auxIntToInt16(v_1_0.AuxInt)
   728  			v.reset(OpSub16)
   729  			v0 := b.NewValue0(v.Pos, OpConst16, t)
   730  			v0.AuxInt = int16ToAuxInt(c + d)
   731  			v.AddArg2(v0, x)
   732  			return true
   733  		}
   734  		break
   735  	}
   736  	// match: (Add16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
   737  	// cond: c < 16 && d == 16-c && canRotate(config, 16)
   738  	// result: (RotateLeft16 x z)
   739  	for {
   740  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   741  			if v_0.Op != OpLsh16x64 {
   742  				continue
   743  			}
   744  			_ = v_0.Args[1]
   745  			x := v_0.Args[0]
   746  			z := v_0.Args[1]
   747  			if z.Op != OpConst64 {
   748  				continue
   749  			}
   750  			c := auxIntToInt64(z.AuxInt)
   751  			if v_1.Op != OpRsh16Ux64 {
   752  				continue
   753  			}
   754  			_ = v_1.Args[1]
   755  			if x != v_1.Args[0] {
   756  				continue
   757  			}
   758  			v_1_1 := v_1.Args[1]
   759  			if v_1_1.Op != OpConst64 {
   760  				continue
   761  			}
   762  			d := auxIntToInt64(v_1_1.AuxInt)
   763  			if !(c < 16 && d == 16-c && canRotate(config, 16)) {
   764  				continue
   765  			}
   766  			v.reset(OpRotateLeft16)
   767  			v.AddArg2(x, z)
   768  			return true
   769  		}
   770  		break
   771  	}
   772  	// match: (Add16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
   773  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   774  	// result: (RotateLeft16 x y)
   775  	for {
   776  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   777  			left := v_0
   778  			if left.Op != OpLsh16x64 {
   779  				continue
   780  			}
   781  			y := left.Args[1]
   782  			x := left.Args[0]
   783  			right := v_1
   784  			if right.Op != OpRsh16Ux64 {
   785  				continue
   786  			}
   787  			_ = right.Args[1]
   788  			if x != right.Args[0] {
   789  				continue
   790  			}
   791  			right_1 := right.Args[1]
   792  			if right_1.Op != OpSub64 {
   793  				continue
   794  			}
   795  			_ = right_1.Args[1]
   796  			right_1_0 := right_1.Args[0]
   797  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   798  				continue
   799  			}
   800  			v.reset(OpRotateLeft16)
   801  			v.AddArg2(x, y)
   802  			return true
   803  		}
   804  		break
   805  	}
   806  	// match: (Add16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
   807  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   808  	// result: (RotateLeft16 x y)
   809  	for {
   810  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   811  			left := v_0
   812  			if left.Op != OpLsh16x32 {
   813  				continue
   814  			}
   815  			y := left.Args[1]
   816  			x := left.Args[0]
   817  			right := v_1
   818  			if right.Op != OpRsh16Ux32 {
   819  				continue
   820  			}
   821  			_ = right.Args[1]
   822  			if x != right.Args[0] {
   823  				continue
   824  			}
   825  			right_1 := right.Args[1]
   826  			if right_1.Op != OpSub32 {
   827  				continue
   828  			}
   829  			_ = right_1.Args[1]
   830  			right_1_0 := right_1.Args[0]
   831  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   832  				continue
   833  			}
   834  			v.reset(OpRotateLeft16)
   835  			v.AddArg2(x, y)
   836  			return true
   837  		}
   838  		break
   839  	}
   840  	// match: (Add16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
   841  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   842  	// result: (RotateLeft16 x y)
   843  	for {
   844  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   845  			left := v_0
   846  			if left.Op != OpLsh16x16 {
   847  				continue
   848  			}
   849  			y := left.Args[1]
   850  			x := left.Args[0]
   851  			right := v_1
   852  			if right.Op != OpRsh16Ux16 {
   853  				continue
   854  			}
   855  			_ = right.Args[1]
   856  			if x != right.Args[0] {
   857  				continue
   858  			}
   859  			right_1 := right.Args[1]
   860  			if right_1.Op != OpSub16 {
   861  				continue
   862  			}
   863  			_ = right_1.Args[1]
   864  			right_1_0 := right_1.Args[0]
   865  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   866  				continue
   867  			}
   868  			v.reset(OpRotateLeft16)
   869  			v.AddArg2(x, y)
   870  			return true
   871  		}
   872  		break
   873  	}
   874  	// match: (Add16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
   875  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   876  	// result: (RotateLeft16 x y)
   877  	for {
   878  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   879  			left := v_0
   880  			if left.Op != OpLsh16x8 {
   881  				continue
   882  			}
   883  			y := left.Args[1]
   884  			x := left.Args[0]
   885  			right := v_1
   886  			if right.Op != OpRsh16Ux8 {
   887  				continue
   888  			}
   889  			_ = right.Args[1]
   890  			if x != right.Args[0] {
   891  				continue
   892  			}
   893  			right_1 := right.Args[1]
   894  			if right_1.Op != OpSub8 {
   895  				continue
   896  			}
   897  			_ = right_1.Args[1]
   898  			right_1_0 := right_1.Args[0]
   899  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   900  				continue
   901  			}
   902  			v.reset(OpRotateLeft16)
   903  			v.AddArg2(x, y)
   904  			return true
   905  		}
   906  		break
   907  	}
   908  	// match: (Add16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
   909  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   910  	// result: (RotateLeft16 x z)
   911  	for {
   912  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   913  			right := v_0
   914  			if right.Op != OpRsh16Ux64 {
   915  				continue
   916  			}
   917  			y := right.Args[1]
   918  			x := right.Args[0]
   919  			left := v_1
   920  			if left.Op != OpLsh16x64 {
   921  				continue
   922  			}
   923  			_ = left.Args[1]
   924  			if x != left.Args[0] {
   925  				continue
   926  			}
   927  			z := left.Args[1]
   928  			if z.Op != OpSub64 {
   929  				continue
   930  			}
   931  			_ = z.Args[1]
   932  			z_0 := z.Args[0]
   933  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   934  				continue
   935  			}
   936  			v.reset(OpRotateLeft16)
   937  			v.AddArg2(x, z)
   938  			return true
   939  		}
   940  		break
   941  	}
   942  	// match: (Add16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
   943  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   944  	// result: (RotateLeft16 x z)
   945  	for {
   946  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   947  			right := v_0
   948  			if right.Op != OpRsh16Ux32 {
   949  				continue
   950  			}
   951  			y := right.Args[1]
   952  			x := right.Args[0]
   953  			left := v_1
   954  			if left.Op != OpLsh16x32 {
   955  				continue
   956  			}
   957  			_ = left.Args[1]
   958  			if x != left.Args[0] {
   959  				continue
   960  			}
   961  			z := left.Args[1]
   962  			if z.Op != OpSub32 {
   963  				continue
   964  			}
   965  			_ = z.Args[1]
   966  			z_0 := z.Args[0]
   967  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   968  				continue
   969  			}
   970  			v.reset(OpRotateLeft16)
   971  			v.AddArg2(x, z)
   972  			return true
   973  		}
   974  		break
   975  	}
   976  	// match: (Add16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
   977  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   978  	// result: (RotateLeft16 x z)
   979  	for {
   980  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   981  			right := v_0
   982  			if right.Op != OpRsh16Ux16 {
   983  				continue
   984  			}
   985  			y := right.Args[1]
   986  			x := right.Args[0]
   987  			left := v_1
   988  			if left.Op != OpLsh16x16 {
   989  				continue
   990  			}
   991  			_ = left.Args[1]
   992  			if x != left.Args[0] {
   993  				continue
   994  			}
   995  			z := left.Args[1]
   996  			if z.Op != OpSub16 {
   997  				continue
   998  			}
   999  			_ = z.Args[1]
  1000  			z_0 := z.Args[0]
  1001  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
  1002  				continue
  1003  			}
  1004  			v.reset(OpRotateLeft16)
  1005  			v.AddArg2(x, z)
  1006  			return true
  1007  		}
  1008  		break
  1009  	}
  1010  	// match: (Add16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
  1011  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
  1012  	// result: (RotateLeft16 x z)
  1013  	for {
  1014  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1015  			right := v_0
  1016  			if right.Op != OpRsh16Ux8 {
  1017  				continue
  1018  			}
  1019  			y := right.Args[1]
  1020  			x := right.Args[0]
  1021  			left := v_1
  1022  			if left.Op != OpLsh16x8 {
  1023  				continue
  1024  			}
  1025  			_ = left.Args[1]
  1026  			if x != left.Args[0] {
  1027  				continue
  1028  			}
  1029  			z := left.Args[1]
  1030  			if z.Op != OpSub8 {
  1031  				continue
  1032  			}
  1033  			_ = z.Args[1]
  1034  			z_0 := z.Args[0]
  1035  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
  1036  				continue
  1037  			}
  1038  			v.reset(OpRotateLeft16)
  1039  			v.AddArg2(x, z)
  1040  			return true
  1041  		}
  1042  		break
  1043  	}
  1044  	return false
  1045  }
  1046  func rewriteValuegeneric_OpAdd32(v *Value) bool {
  1047  	v_1 := v.Args[1]
  1048  	v_0 := v.Args[0]
  1049  	b := v.Block
  1050  	config := b.Func.Config
  1051  	// match: (Add32 (Const32 [c]) (Const32 [d]))
  1052  	// result: (Const32 [c+d])
  1053  	for {
  1054  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1055  			if v_0.Op != OpConst32 {
  1056  				continue
  1057  			}
  1058  			c := auxIntToInt32(v_0.AuxInt)
  1059  			if v_1.Op != OpConst32 {
  1060  				continue
  1061  			}
  1062  			d := auxIntToInt32(v_1.AuxInt)
  1063  			v.reset(OpConst32)
  1064  			v.AuxInt = int32ToAuxInt(c + d)
  1065  			return true
  1066  		}
  1067  		break
  1068  	}
  1069  	// match: (Add32 <t> (Mul32 x y) (Mul32 x z))
  1070  	// result: (Mul32 x (Add32 <t> y z))
  1071  	for {
  1072  		t := v.Type
  1073  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1074  			if v_0.Op != OpMul32 {
  1075  				continue
  1076  			}
  1077  			_ = v_0.Args[1]
  1078  			v_0_0 := v_0.Args[0]
  1079  			v_0_1 := v_0.Args[1]
  1080  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  1081  				x := v_0_0
  1082  				y := v_0_1
  1083  				if v_1.Op != OpMul32 {
  1084  					continue
  1085  				}
  1086  				_ = v_1.Args[1]
  1087  				v_1_0 := v_1.Args[0]
  1088  				v_1_1 := v_1.Args[1]
  1089  				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
  1090  					if x != v_1_0 {
  1091  						continue
  1092  					}
  1093  					z := v_1_1
  1094  					v.reset(OpMul32)
  1095  					v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1096  					v0.AddArg2(y, z)
  1097  					v.AddArg2(x, v0)
  1098  					return true
  1099  				}
  1100  			}
  1101  		}
  1102  		break
  1103  	}
  1104  	// match: (Add32 (Const32 [0]) x)
  1105  	// result: x
  1106  	for {
  1107  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1108  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
  1109  				continue
  1110  			}
  1111  			x := v_1
  1112  			v.copyOf(x)
  1113  			return true
  1114  		}
  1115  		break
  1116  	}
  1117  	// match: (Add32 x (Neg32 y))
  1118  	// result: (Sub32 x y)
  1119  	for {
  1120  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1121  			x := v_0
  1122  			if v_1.Op != OpNeg32 {
  1123  				continue
  1124  			}
  1125  			y := v_1.Args[0]
  1126  			v.reset(OpSub32)
  1127  			v.AddArg2(x, y)
  1128  			return true
  1129  		}
  1130  		break
  1131  	}
  1132  	// match: (Add32 (Com32 x) x)
  1133  	// result: (Const32 [-1])
  1134  	for {
  1135  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1136  			if v_0.Op != OpCom32 {
  1137  				continue
  1138  			}
  1139  			x := v_0.Args[0]
  1140  			if x != v_1 {
  1141  				continue
  1142  			}
  1143  			v.reset(OpConst32)
  1144  			v.AuxInt = int32ToAuxInt(-1)
  1145  			return true
  1146  		}
  1147  		break
  1148  	}
  1149  	// match: (Add32 (Const32 [1]) (Com32 x))
  1150  	// result: (Neg32 x)
  1151  	for {
  1152  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1153  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 1 || v_1.Op != OpCom32 {
  1154  				continue
  1155  			}
  1156  			x := v_1.Args[0]
  1157  			v.reset(OpNeg32)
  1158  			v.AddArg(x)
  1159  			return true
  1160  		}
  1161  		break
  1162  	}
  1163  	// match: (Add32 x (Sub32 y x))
  1164  	// result: y
  1165  	for {
  1166  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1167  			x := v_0
  1168  			if v_1.Op != OpSub32 {
  1169  				continue
  1170  			}
  1171  			_ = v_1.Args[1]
  1172  			y := v_1.Args[0]
  1173  			if x != v_1.Args[1] {
  1174  				continue
  1175  			}
  1176  			v.copyOf(y)
  1177  			return true
  1178  		}
  1179  		break
  1180  	}
  1181  	// match: (Add32 x (Add32 y (Sub32 z x)))
  1182  	// result: (Add32 y z)
  1183  	for {
  1184  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1185  			x := v_0
  1186  			if v_1.Op != OpAdd32 {
  1187  				continue
  1188  			}
  1189  			_ = v_1.Args[1]
  1190  			v_1_0 := v_1.Args[0]
  1191  			v_1_1 := v_1.Args[1]
  1192  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  1193  				y := v_1_0
  1194  				if v_1_1.Op != OpSub32 {
  1195  					continue
  1196  				}
  1197  				_ = v_1_1.Args[1]
  1198  				z := v_1_1.Args[0]
  1199  				if x != v_1_1.Args[1] {
  1200  					continue
  1201  				}
  1202  				v.reset(OpAdd32)
  1203  				v.AddArg2(y, z)
  1204  				return true
  1205  			}
  1206  		}
  1207  		break
  1208  	}
  1209  	// match: (Add32 (Add32 i:(Const32 <t>) z) x)
  1210  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1211  	// result: (Add32 i (Add32 <t> z x))
  1212  	for {
  1213  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1214  			if v_0.Op != OpAdd32 {
  1215  				continue
  1216  			}
  1217  			_ = v_0.Args[1]
  1218  			v_0_0 := v_0.Args[0]
  1219  			v_0_1 := v_0.Args[1]
  1220  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  1221  				i := v_0_0
  1222  				if i.Op != OpConst32 {
  1223  					continue
  1224  				}
  1225  				t := i.Type
  1226  				z := v_0_1
  1227  				x := v_1
  1228  				if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1229  					continue
  1230  				}
  1231  				v.reset(OpAdd32)
  1232  				v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1233  				v0.AddArg2(z, x)
  1234  				v.AddArg2(i, v0)
  1235  				return true
  1236  			}
  1237  		}
  1238  		break
  1239  	}
  1240  	// match: (Add32 (Sub32 i:(Const32 <t>) z) x)
  1241  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1242  	// result: (Add32 i (Sub32 <t> x z))
  1243  	for {
  1244  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1245  			if v_0.Op != OpSub32 {
  1246  				continue
  1247  			}
  1248  			z := v_0.Args[1]
  1249  			i := v_0.Args[0]
  1250  			if i.Op != OpConst32 {
  1251  				continue
  1252  			}
  1253  			t := i.Type
  1254  			x := v_1
  1255  			if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1256  				continue
  1257  			}
  1258  			v.reset(OpAdd32)
  1259  			v0 := b.NewValue0(v.Pos, OpSub32, t)
  1260  			v0.AddArg2(x, z)
  1261  			v.AddArg2(i, v0)
  1262  			return true
  1263  		}
  1264  		break
  1265  	}
  1266  	// match: (Add32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
  1267  	// result: (Add32 (Const32 <t> [c+d]) x)
  1268  	for {
  1269  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1270  			if v_0.Op != OpConst32 {
  1271  				continue
  1272  			}
  1273  			t := v_0.Type
  1274  			c := auxIntToInt32(v_0.AuxInt)
  1275  			if v_1.Op != OpAdd32 {
  1276  				continue
  1277  			}
  1278  			_ = v_1.Args[1]
  1279  			v_1_0 := v_1.Args[0]
  1280  			v_1_1 := v_1.Args[1]
  1281  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  1282  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
  1283  					continue
  1284  				}
  1285  				d := auxIntToInt32(v_1_0.AuxInt)
  1286  				x := v_1_1
  1287  				v.reset(OpAdd32)
  1288  				v0 := b.NewValue0(v.Pos, OpConst32, t)
  1289  				v0.AuxInt = int32ToAuxInt(c + d)
  1290  				v.AddArg2(v0, x)
  1291  				return true
  1292  			}
  1293  		}
  1294  		break
  1295  	}
  1296  	// match: (Add32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x))
  1297  	// result: (Sub32 (Const32 <t> [c+d]) x)
  1298  	for {
  1299  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1300  			if v_0.Op != OpConst32 {
  1301  				continue
  1302  			}
  1303  			t := v_0.Type
  1304  			c := auxIntToInt32(v_0.AuxInt)
  1305  			if v_1.Op != OpSub32 {
  1306  				continue
  1307  			}
  1308  			x := v_1.Args[1]
  1309  			v_1_0 := v_1.Args[0]
  1310  			if v_1_0.Op != OpConst32 || v_1_0.Type != t {
  1311  				continue
  1312  			}
  1313  			d := auxIntToInt32(v_1_0.AuxInt)
  1314  			v.reset(OpSub32)
  1315  			v0 := b.NewValue0(v.Pos, OpConst32, t)
  1316  			v0.AuxInt = int32ToAuxInt(c + d)
  1317  			v.AddArg2(v0, x)
  1318  			return true
  1319  		}
  1320  		break
  1321  	}
  1322  	// match: (Add32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
  1323  	// cond: c < 32 && d == 32-c && canRotate(config, 32)
  1324  	// result: (RotateLeft32 x z)
  1325  	for {
  1326  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1327  			if v_0.Op != OpLsh32x64 {
  1328  				continue
  1329  			}
  1330  			_ = v_0.Args[1]
  1331  			x := v_0.Args[0]
  1332  			z := v_0.Args[1]
  1333  			if z.Op != OpConst64 {
  1334  				continue
  1335  			}
  1336  			c := auxIntToInt64(z.AuxInt)
  1337  			if v_1.Op != OpRsh32Ux64 {
  1338  				continue
  1339  			}
  1340  			_ = v_1.Args[1]
  1341  			if x != v_1.Args[0] {
  1342  				continue
  1343  			}
  1344  			v_1_1 := v_1.Args[1]
  1345  			if v_1_1.Op != OpConst64 {
  1346  				continue
  1347  			}
  1348  			d := auxIntToInt64(v_1_1.AuxInt)
  1349  			if !(c < 32 && d == 32-c && canRotate(config, 32)) {
  1350  				continue
  1351  			}
  1352  			v.reset(OpRotateLeft32)
  1353  			v.AddArg2(x, z)
  1354  			return true
  1355  		}
  1356  		break
  1357  	}
  1358  	// match: (Add32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
  1359  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1360  	// result: (RotateLeft32 x y)
  1361  	for {
  1362  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1363  			left := v_0
  1364  			if left.Op != OpLsh32x64 {
  1365  				continue
  1366  			}
  1367  			y := left.Args[1]
  1368  			x := left.Args[0]
  1369  			right := v_1
  1370  			if right.Op != OpRsh32Ux64 {
  1371  				continue
  1372  			}
  1373  			_ = right.Args[1]
  1374  			if x != right.Args[0] {
  1375  				continue
  1376  			}
  1377  			right_1 := right.Args[1]
  1378  			if right_1.Op != OpSub64 {
  1379  				continue
  1380  			}
  1381  			_ = right_1.Args[1]
  1382  			right_1_0 := right_1.Args[0]
  1383  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1384  				continue
  1385  			}
  1386  			v.reset(OpRotateLeft32)
  1387  			v.AddArg2(x, y)
  1388  			return true
  1389  		}
  1390  		break
  1391  	}
  1392  	// match: (Add32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
  1393  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1394  	// result: (RotateLeft32 x y)
  1395  	for {
  1396  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1397  			left := v_0
  1398  			if left.Op != OpLsh32x32 {
  1399  				continue
  1400  			}
  1401  			y := left.Args[1]
  1402  			x := left.Args[0]
  1403  			right := v_1
  1404  			if right.Op != OpRsh32Ux32 {
  1405  				continue
  1406  			}
  1407  			_ = right.Args[1]
  1408  			if x != right.Args[0] {
  1409  				continue
  1410  			}
  1411  			right_1 := right.Args[1]
  1412  			if right_1.Op != OpSub32 {
  1413  				continue
  1414  			}
  1415  			_ = right_1.Args[1]
  1416  			right_1_0 := right_1.Args[0]
  1417  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1418  				continue
  1419  			}
  1420  			v.reset(OpRotateLeft32)
  1421  			v.AddArg2(x, y)
  1422  			return true
  1423  		}
  1424  		break
  1425  	}
  1426  	// match: (Add32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
  1427  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1428  	// result: (RotateLeft32 x y)
  1429  	for {
  1430  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1431  			left := v_0
  1432  			if left.Op != OpLsh32x16 {
  1433  				continue
  1434  			}
  1435  			y := left.Args[1]
  1436  			x := left.Args[0]
  1437  			right := v_1
  1438  			if right.Op != OpRsh32Ux16 {
  1439  				continue
  1440  			}
  1441  			_ = right.Args[1]
  1442  			if x != right.Args[0] {
  1443  				continue
  1444  			}
  1445  			right_1 := right.Args[1]
  1446  			if right_1.Op != OpSub16 {
  1447  				continue
  1448  			}
  1449  			_ = right_1.Args[1]
  1450  			right_1_0 := right_1.Args[0]
  1451  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1452  				continue
  1453  			}
  1454  			v.reset(OpRotateLeft32)
  1455  			v.AddArg2(x, y)
  1456  			return true
  1457  		}
  1458  		break
  1459  	}
  1460  	// match: (Add32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
  1461  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1462  	// result: (RotateLeft32 x y)
  1463  	for {
  1464  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1465  			left := v_0
  1466  			if left.Op != OpLsh32x8 {
  1467  				continue
  1468  			}
  1469  			y := left.Args[1]
  1470  			x := left.Args[0]
  1471  			right := v_1
  1472  			if right.Op != OpRsh32Ux8 {
  1473  				continue
  1474  			}
  1475  			_ = right.Args[1]
  1476  			if x != right.Args[0] {
  1477  				continue
  1478  			}
  1479  			right_1 := right.Args[1]
  1480  			if right_1.Op != OpSub8 {
  1481  				continue
  1482  			}
  1483  			_ = right_1.Args[1]
  1484  			right_1_0 := right_1.Args[0]
  1485  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1486  				continue
  1487  			}
  1488  			v.reset(OpRotateLeft32)
  1489  			v.AddArg2(x, y)
  1490  			return true
  1491  		}
  1492  		break
  1493  	}
  1494  	// match: (Add32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
  1495  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1496  	// result: (RotateLeft32 x z)
  1497  	for {
  1498  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1499  			right := v_0
  1500  			if right.Op != OpRsh32Ux64 {
  1501  				continue
  1502  			}
  1503  			y := right.Args[1]
  1504  			x := right.Args[0]
  1505  			left := v_1
  1506  			if left.Op != OpLsh32x64 {
  1507  				continue
  1508  			}
  1509  			_ = left.Args[1]
  1510  			if x != left.Args[0] {
  1511  				continue
  1512  			}
  1513  			z := left.Args[1]
  1514  			if z.Op != OpSub64 {
  1515  				continue
  1516  			}
  1517  			_ = z.Args[1]
  1518  			z_0 := z.Args[0]
  1519  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1520  				continue
  1521  			}
  1522  			v.reset(OpRotateLeft32)
  1523  			v.AddArg2(x, z)
  1524  			return true
  1525  		}
  1526  		break
  1527  	}
  1528  	// match: (Add32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
  1529  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1530  	// result: (RotateLeft32 x z)
  1531  	for {
  1532  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1533  			right := v_0
  1534  			if right.Op != OpRsh32Ux32 {
  1535  				continue
  1536  			}
  1537  			y := right.Args[1]
  1538  			x := right.Args[0]
  1539  			left := v_1
  1540  			if left.Op != OpLsh32x32 {
  1541  				continue
  1542  			}
  1543  			_ = left.Args[1]
  1544  			if x != left.Args[0] {
  1545  				continue
  1546  			}
  1547  			z := left.Args[1]
  1548  			if z.Op != OpSub32 {
  1549  				continue
  1550  			}
  1551  			_ = z.Args[1]
  1552  			z_0 := z.Args[0]
  1553  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1554  				continue
  1555  			}
  1556  			v.reset(OpRotateLeft32)
  1557  			v.AddArg2(x, z)
  1558  			return true
  1559  		}
  1560  		break
  1561  	}
  1562  	// match: (Add32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
  1563  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1564  	// result: (RotateLeft32 x z)
  1565  	for {
  1566  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1567  			right := v_0
  1568  			if right.Op != OpRsh32Ux16 {
  1569  				continue
  1570  			}
  1571  			y := right.Args[1]
  1572  			x := right.Args[0]
  1573  			left := v_1
  1574  			if left.Op != OpLsh32x16 {
  1575  				continue
  1576  			}
  1577  			_ = left.Args[1]
  1578  			if x != left.Args[0] {
  1579  				continue
  1580  			}
  1581  			z := left.Args[1]
  1582  			if z.Op != OpSub16 {
  1583  				continue
  1584  			}
  1585  			_ = z.Args[1]
  1586  			z_0 := z.Args[0]
  1587  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1588  				continue
  1589  			}
  1590  			v.reset(OpRotateLeft32)
  1591  			v.AddArg2(x, z)
  1592  			return true
  1593  		}
  1594  		break
  1595  	}
  1596  	// match: (Add32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
  1597  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1598  	// result: (RotateLeft32 x z)
  1599  	for {
  1600  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1601  			right := v_0
  1602  			if right.Op != OpRsh32Ux8 {
  1603  				continue
  1604  			}
  1605  			y := right.Args[1]
  1606  			x := right.Args[0]
  1607  			left := v_1
  1608  			if left.Op != OpLsh32x8 {
  1609  				continue
  1610  			}
  1611  			_ = left.Args[1]
  1612  			if x != left.Args[0] {
  1613  				continue
  1614  			}
  1615  			z := left.Args[1]
  1616  			if z.Op != OpSub8 {
  1617  				continue
  1618  			}
  1619  			_ = z.Args[1]
  1620  			z_0 := z.Args[0]
  1621  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1622  				continue
  1623  			}
  1624  			v.reset(OpRotateLeft32)
  1625  			v.AddArg2(x, z)
  1626  			return true
  1627  		}
  1628  		break
  1629  	}
  1630  	return false
  1631  }
  1632  func rewriteValuegeneric_OpAdd32F(v *Value) bool {
  1633  	v_1 := v.Args[1]
  1634  	v_0 := v.Args[0]
  1635  	// match: (Add32F (Const32F [c]) (Const32F [d]))
  1636  	// cond: c+d == c+d
  1637  	// result: (Const32F [c+d])
  1638  	for {
  1639  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1640  			if v_0.Op != OpConst32F {
  1641  				continue
  1642  			}
  1643  			c := auxIntToFloat32(v_0.AuxInt)
  1644  			if v_1.Op != OpConst32F {
  1645  				continue
  1646  			}
  1647  			d := auxIntToFloat32(v_1.AuxInt)
  1648  			if !(c+d == c+d) {
  1649  				continue
  1650  			}
  1651  			v.reset(OpConst32F)
  1652  			v.AuxInt = float32ToAuxInt(c + d)
  1653  			return true
  1654  		}
  1655  		break
  1656  	}
  1657  	return false
  1658  }
  1659  func rewriteValuegeneric_OpAdd64(v *Value) bool {
  1660  	v_1 := v.Args[1]
  1661  	v_0 := v.Args[0]
  1662  	b := v.Block
  1663  	config := b.Func.Config
  1664  	// match: (Add64 (Const64 [c]) (Const64 [d]))
  1665  	// result: (Const64 [c+d])
  1666  	for {
  1667  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1668  			if v_0.Op != OpConst64 {
  1669  				continue
  1670  			}
  1671  			c := auxIntToInt64(v_0.AuxInt)
  1672  			if v_1.Op != OpConst64 {
  1673  				continue
  1674  			}
  1675  			d := auxIntToInt64(v_1.AuxInt)
  1676  			v.reset(OpConst64)
  1677  			v.AuxInt = int64ToAuxInt(c + d)
  1678  			return true
  1679  		}
  1680  		break
  1681  	}
  1682  	// match: (Add64 <t> (Mul64 x y) (Mul64 x z))
  1683  	// result: (Mul64 x (Add64 <t> y z))
  1684  	for {
  1685  		t := v.Type
  1686  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1687  			if v_0.Op != OpMul64 {
  1688  				continue
  1689  			}
  1690  			_ = v_0.Args[1]
  1691  			v_0_0 := v_0.Args[0]
  1692  			v_0_1 := v_0.Args[1]
  1693  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  1694  				x := v_0_0
  1695  				y := v_0_1
  1696  				if v_1.Op != OpMul64 {
  1697  					continue
  1698  				}
  1699  				_ = v_1.Args[1]
  1700  				v_1_0 := v_1.Args[0]
  1701  				v_1_1 := v_1.Args[1]
  1702  				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
  1703  					if x != v_1_0 {
  1704  						continue
  1705  					}
  1706  					z := v_1_1
  1707  					v.reset(OpMul64)
  1708  					v0 := b.NewValue0(v.Pos, OpAdd64, t)
  1709  					v0.AddArg2(y, z)
  1710  					v.AddArg2(x, v0)
  1711  					return true
  1712  				}
  1713  			}
  1714  		}
  1715  		break
  1716  	}
  1717  	// match: (Add64 (Const64 [0]) x)
  1718  	// result: x
  1719  	for {
  1720  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1721  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
  1722  				continue
  1723  			}
  1724  			x := v_1
  1725  			v.copyOf(x)
  1726  			return true
  1727  		}
  1728  		break
  1729  	}
  1730  	// match: (Add64 x (Neg64 y))
  1731  	// result: (Sub64 x y)
  1732  	for {
  1733  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1734  			x := v_0
  1735  			if v_1.Op != OpNeg64 {
  1736  				continue
  1737  			}
  1738  			y := v_1.Args[0]
  1739  			v.reset(OpSub64)
  1740  			v.AddArg2(x, y)
  1741  			return true
  1742  		}
  1743  		break
  1744  	}
  1745  	// match: (Add64 (Com64 x) x)
  1746  	// result: (Const64 [-1])
  1747  	for {
  1748  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1749  			if v_0.Op != OpCom64 {
  1750  				continue
  1751  			}
  1752  			x := v_0.Args[0]
  1753  			if x != v_1 {
  1754  				continue
  1755  			}
  1756  			v.reset(OpConst64)
  1757  			v.AuxInt = int64ToAuxInt(-1)
  1758  			return true
  1759  		}
  1760  		break
  1761  	}
  1762  	// match: (Add64 (Const64 [1]) (Com64 x))
  1763  	// result: (Neg64 x)
  1764  	for {
  1765  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1766  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 1 || v_1.Op != OpCom64 {
  1767  				continue
  1768  			}
  1769  			x := v_1.Args[0]
  1770  			v.reset(OpNeg64)
  1771  			v.AddArg(x)
  1772  			return true
  1773  		}
  1774  		break
  1775  	}
  1776  	// match: (Add64 x (Sub64 y x))
  1777  	// result: y
  1778  	for {
  1779  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1780  			x := v_0
  1781  			if v_1.Op != OpSub64 {
  1782  				continue
  1783  			}
  1784  			_ = v_1.Args[1]
  1785  			y := v_1.Args[0]
  1786  			if x != v_1.Args[1] {
  1787  				continue
  1788  			}
  1789  			v.copyOf(y)
  1790  			return true
  1791  		}
  1792  		break
  1793  	}
  1794  	// match: (Add64 x (Add64 y (Sub64 z x)))
  1795  	// result: (Add64 y z)
  1796  	for {
  1797  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1798  			x := v_0
  1799  			if v_1.Op != OpAdd64 {
  1800  				continue
  1801  			}
  1802  			_ = v_1.Args[1]
  1803  			v_1_0 := v_1.Args[0]
  1804  			v_1_1 := v_1.Args[1]
  1805  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  1806  				y := v_1_0
  1807  				if v_1_1.Op != OpSub64 {
  1808  					continue
  1809  				}
  1810  				_ = v_1_1.Args[1]
  1811  				z := v_1_1.Args[0]
  1812  				if x != v_1_1.Args[1] {
  1813  					continue
  1814  				}
  1815  				v.reset(OpAdd64)
  1816  				v.AddArg2(y, z)
  1817  				return true
  1818  			}
  1819  		}
  1820  		break
  1821  	}
  1822  	// match: (Add64 (Add64 i:(Const64 <t>) z) x)
  1823  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  1824  	// result: (Add64 i (Add64 <t> z x))
  1825  	for {
  1826  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1827  			if v_0.Op != OpAdd64 {
  1828  				continue
  1829  			}
  1830  			_ = v_0.Args[1]
  1831  			v_0_0 := v_0.Args[0]
  1832  			v_0_1 := v_0.Args[1]
  1833  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  1834  				i := v_0_0
  1835  				if i.Op != OpConst64 {
  1836  					continue
  1837  				}
  1838  				t := i.Type
  1839  				z := v_0_1
  1840  				x := v_1
  1841  				if !(z.Op != OpConst64 && x.Op != OpConst64) {
  1842  					continue
  1843  				}
  1844  				v.reset(OpAdd64)
  1845  				v0 := b.NewValue0(v.Pos, OpAdd64, t)
  1846  				v0.AddArg2(z, x)
  1847  				v.AddArg2(i, v0)
  1848  				return true
  1849  			}
  1850  		}
  1851  		break
  1852  	}
  1853  	// match: (Add64 (Sub64 i:(Const64 <t>) z) x)
  1854  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  1855  	// result: (Add64 i (Sub64 <t> x z))
  1856  	for {
  1857  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1858  			if v_0.Op != OpSub64 {
  1859  				continue
  1860  			}
  1861  			z := v_0.Args[1]
  1862  			i := v_0.Args[0]
  1863  			if i.Op != OpConst64 {
  1864  				continue
  1865  			}
  1866  			t := i.Type
  1867  			x := v_1
  1868  			if !(z.Op != OpConst64 && x.Op != OpConst64) {
  1869  				continue
  1870  			}
  1871  			v.reset(OpAdd64)
  1872  			v0 := b.NewValue0(v.Pos, OpSub64, t)
  1873  			v0.AddArg2(x, z)
  1874  			v.AddArg2(i, v0)
  1875  			return true
  1876  		}
  1877  		break
  1878  	}
  1879  	// match: (Add64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
  1880  	// result: (Add64 (Const64 <t> [c+d]) x)
  1881  	for {
  1882  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1883  			if v_0.Op != OpConst64 {
  1884  				continue
  1885  			}
  1886  			t := v_0.Type
  1887  			c := auxIntToInt64(v_0.AuxInt)
  1888  			if v_1.Op != OpAdd64 {
  1889  				continue
  1890  			}
  1891  			_ = v_1.Args[1]
  1892  			v_1_0 := v_1.Args[0]
  1893  			v_1_1 := v_1.Args[1]
  1894  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  1895  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
  1896  					continue
  1897  				}
  1898  				d := auxIntToInt64(v_1_0.AuxInt)
  1899  				x := v_1_1
  1900  				v.reset(OpAdd64)
  1901  				v0 := b.NewValue0(v.Pos, OpConst64, t)
  1902  				v0.AuxInt = int64ToAuxInt(c + d)
  1903  				v.AddArg2(v0, x)
  1904  				return true
  1905  			}
  1906  		}
  1907  		break
  1908  	}
  1909  	// match: (Add64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x))
  1910  	// result: (Sub64 (Const64 <t> [c+d]) x)
  1911  	for {
  1912  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1913  			if v_0.Op != OpConst64 {
  1914  				continue
  1915  			}
  1916  			t := v_0.Type
  1917  			c := auxIntToInt64(v_0.AuxInt)
  1918  			if v_1.Op != OpSub64 {
  1919  				continue
  1920  			}
  1921  			x := v_1.Args[1]
  1922  			v_1_0 := v_1.Args[0]
  1923  			if v_1_0.Op != OpConst64 || v_1_0.Type != t {
  1924  				continue
  1925  			}
  1926  			d := auxIntToInt64(v_1_0.AuxInt)
  1927  			v.reset(OpSub64)
  1928  			v0 := b.NewValue0(v.Pos, OpConst64, t)
  1929  			v0.AuxInt = int64ToAuxInt(c + d)
  1930  			v.AddArg2(v0, x)
  1931  			return true
  1932  		}
  1933  		break
  1934  	}
  1935  	// match: (Add64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
  1936  	// cond: c < 64 && d == 64-c && canRotate(config, 64)
  1937  	// result: (RotateLeft64 x z)
  1938  	for {
  1939  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1940  			if v_0.Op != OpLsh64x64 {
  1941  				continue
  1942  			}
  1943  			_ = v_0.Args[1]
  1944  			x := v_0.Args[0]
  1945  			z := v_0.Args[1]
  1946  			if z.Op != OpConst64 {
  1947  				continue
  1948  			}
  1949  			c := auxIntToInt64(z.AuxInt)
  1950  			if v_1.Op != OpRsh64Ux64 {
  1951  				continue
  1952  			}
  1953  			_ = v_1.Args[1]
  1954  			if x != v_1.Args[0] {
  1955  				continue
  1956  			}
  1957  			v_1_1 := v_1.Args[1]
  1958  			if v_1_1.Op != OpConst64 {
  1959  				continue
  1960  			}
  1961  			d := auxIntToInt64(v_1_1.AuxInt)
  1962  			if !(c < 64 && d == 64-c && canRotate(config, 64)) {
  1963  				continue
  1964  			}
  1965  			v.reset(OpRotateLeft64)
  1966  			v.AddArg2(x, z)
  1967  			return true
  1968  		}
  1969  		break
  1970  	}
  1971  	// match: (Add64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
  1972  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  1973  	// result: (RotateLeft64 x y)
  1974  	for {
  1975  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1976  			left := v_0
  1977  			if left.Op != OpLsh64x64 {
  1978  				continue
  1979  			}
  1980  			y := left.Args[1]
  1981  			x := left.Args[0]
  1982  			right := v_1
  1983  			if right.Op != OpRsh64Ux64 {
  1984  				continue
  1985  			}
  1986  			_ = right.Args[1]
  1987  			if x != right.Args[0] {
  1988  				continue
  1989  			}
  1990  			right_1 := right.Args[1]
  1991  			if right_1.Op != OpSub64 {
  1992  				continue
  1993  			}
  1994  			_ = right_1.Args[1]
  1995  			right_1_0 := right_1.Args[0]
  1996  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  1997  				continue
  1998  			}
  1999  			v.reset(OpRotateLeft64)
  2000  			v.AddArg2(x, y)
  2001  			return true
  2002  		}
  2003  		break
  2004  	}
  2005  	// match: (Add64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
  2006  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2007  	// result: (RotateLeft64 x y)
  2008  	for {
  2009  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2010  			left := v_0
  2011  			if left.Op != OpLsh64x32 {
  2012  				continue
  2013  			}
  2014  			y := left.Args[1]
  2015  			x := left.Args[0]
  2016  			right := v_1
  2017  			if right.Op != OpRsh64Ux32 {
  2018  				continue
  2019  			}
  2020  			_ = right.Args[1]
  2021  			if x != right.Args[0] {
  2022  				continue
  2023  			}
  2024  			right_1 := right.Args[1]
  2025  			if right_1.Op != OpSub32 {
  2026  				continue
  2027  			}
  2028  			_ = right_1.Args[1]
  2029  			right_1_0 := right_1.Args[0]
  2030  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2031  				continue
  2032  			}
  2033  			v.reset(OpRotateLeft64)
  2034  			v.AddArg2(x, y)
  2035  			return true
  2036  		}
  2037  		break
  2038  	}
  2039  	// match: (Add64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
  2040  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2041  	// result: (RotateLeft64 x y)
  2042  	for {
  2043  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2044  			left := v_0
  2045  			if left.Op != OpLsh64x16 {
  2046  				continue
  2047  			}
  2048  			y := left.Args[1]
  2049  			x := left.Args[0]
  2050  			right := v_1
  2051  			if right.Op != OpRsh64Ux16 {
  2052  				continue
  2053  			}
  2054  			_ = right.Args[1]
  2055  			if x != right.Args[0] {
  2056  				continue
  2057  			}
  2058  			right_1 := right.Args[1]
  2059  			if right_1.Op != OpSub16 {
  2060  				continue
  2061  			}
  2062  			_ = right_1.Args[1]
  2063  			right_1_0 := right_1.Args[0]
  2064  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2065  				continue
  2066  			}
  2067  			v.reset(OpRotateLeft64)
  2068  			v.AddArg2(x, y)
  2069  			return true
  2070  		}
  2071  		break
  2072  	}
  2073  	// match: (Add64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
  2074  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2075  	// result: (RotateLeft64 x y)
  2076  	for {
  2077  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2078  			left := v_0
  2079  			if left.Op != OpLsh64x8 {
  2080  				continue
  2081  			}
  2082  			y := left.Args[1]
  2083  			x := left.Args[0]
  2084  			right := v_1
  2085  			if right.Op != OpRsh64Ux8 {
  2086  				continue
  2087  			}
  2088  			_ = right.Args[1]
  2089  			if x != right.Args[0] {
  2090  				continue
  2091  			}
  2092  			right_1 := right.Args[1]
  2093  			if right_1.Op != OpSub8 {
  2094  				continue
  2095  			}
  2096  			_ = right_1.Args[1]
  2097  			right_1_0 := right_1.Args[0]
  2098  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2099  				continue
  2100  			}
  2101  			v.reset(OpRotateLeft64)
  2102  			v.AddArg2(x, y)
  2103  			return true
  2104  		}
  2105  		break
  2106  	}
  2107  	// match: (Add64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
  2108  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2109  	// result: (RotateLeft64 x z)
  2110  	for {
  2111  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2112  			right := v_0
  2113  			if right.Op != OpRsh64Ux64 {
  2114  				continue
  2115  			}
  2116  			y := right.Args[1]
  2117  			x := right.Args[0]
  2118  			left := v_1
  2119  			if left.Op != OpLsh64x64 {
  2120  				continue
  2121  			}
  2122  			_ = left.Args[1]
  2123  			if x != left.Args[0] {
  2124  				continue
  2125  			}
  2126  			z := left.Args[1]
  2127  			if z.Op != OpSub64 {
  2128  				continue
  2129  			}
  2130  			_ = z.Args[1]
  2131  			z_0 := z.Args[0]
  2132  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2133  				continue
  2134  			}
  2135  			v.reset(OpRotateLeft64)
  2136  			v.AddArg2(x, z)
  2137  			return true
  2138  		}
  2139  		break
  2140  	}
  2141  	// match: (Add64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
  2142  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2143  	// result: (RotateLeft64 x z)
  2144  	for {
  2145  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2146  			right := v_0
  2147  			if right.Op != OpRsh64Ux32 {
  2148  				continue
  2149  			}
  2150  			y := right.Args[1]
  2151  			x := right.Args[0]
  2152  			left := v_1
  2153  			if left.Op != OpLsh64x32 {
  2154  				continue
  2155  			}
  2156  			_ = left.Args[1]
  2157  			if x != left.Args[0] {
  2158  				continue
  2159  			}
  2160  			z := left.Args[1]
  2161  			if z.Op != OpSub32 {
  2162  				continue
  2163  			}
  2164  			_ = z.Args[1]
  2165  			z_0 := z.Args[0]
  2166  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2167  				continue
  2168  			}
  2169  			v.reset(OpRotateLeft64)
  2170  			v.AddArg2(x, z)
  2171  			return true
  2172  		}
  2173  		break
  2174  	}
  2175  	// match: (Add64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
  2176  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2177  	// result: (RotateLeft64 x z)
  2178  	for {
  2179  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2180  			right := v_0
  2181  			if right.Op != OpRsh64Ux16 {
  2182  				continue
  2183  			}
  2184  			y := right.Args[1]
  2185  			x := right.Args[0]
  2186  			left := v_1
  2187  			if left.Op != OpLsh64x16 {
  2188  				continue
  2189  			}
  2190  			_ = left.Args[1]
  2191  			if x != left.Args[0] {
  2192  				continue
  2193  			}
  2194  			z := left.Args[1]
  2195  			if z.Op != OpSub16 {
  2196  				continue
  2197  			}
  2198  			_ = z.Args[1]
  2199  			z_0 := z.Args[0]
  2200  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2201  				continue
  2202  			}
  2203  			v.reset(OpRotateLeft64)
  2204  			v.AddArg2(x, z)
  2205  			return true
  2206  		}
  2207  		break
  2208  	}
  2209  	// match: (Add64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
  2210  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2211  	// result: (RotateLeft64 x z)
  2212  	for {
  2213  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2214  			right := v_0
  2215  			if right.Op != OpRsh64Ux8 {
  2216  				continue
  2217  			}
  2218  			y := right.Args[1]
  2219  			x := right.Args[0]
  2220  			left := v_1
  2221  			if left.Op != OpLsh64x8 {
  2222  				continue
  2223  			}
  2224  			_ = left.Args[1]
  2225  			if x != left.Args[0] {
  2226  				continue
  2227  			}
  2228  			z := left.Args[1]
  2229  			if z.Op != OpSub8 {
  2230  				continue
  2231  			}
  2232  			_ = z.Args[1]
  2233  			z_0 := z.Args[0]
  2234  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2235  				continue
  2236  			}
  2237  			v.reset(OpRotateLeft64)
  2238  			v.AddArg2(x, z)
  2239  			return true
  2240  		}
  2241  		break
  2242  	}
  2243  	return false
  2244  }
  2245  func rewriteValuegeneric_OpAdd64F(v *Value) bool {
  2246  	v_1 := v.Args[1]
  2247  	v_0 := v.Args[0]
  2248  	// match: (Add64F (Const64F [c]) (Const64F [d]))
  2249  	// cond: c+d == c+d
  2250  	// result: (Const64F [c+d])
  2251  	for {
  2252  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2253  			if v_0.Op != OpConst64F {
  2254  				continue
  2255  			}
  2256  			c := auxIntToFloat64(v_0.AuxInt)
  2257  			if v_1.Op != OpConst64F {
  2258  				continue
  2259  			}
  2260  			d := auxIntToFloat64(v_1.AuxInt)
  2261  			if !(c+d == c+d) {
  2262  				continue
  2263  			}
  2264  			v.reset(OpConst64F)
  2265  			v.AuxInt = float64ToAuxInt(c + d)
  2266  			return true
  2267  		}
  2268  		break
  2269  	}
  2270  	return false
  2271  }
  2272  func rewriteValuegeneric_OpAdd8(v *Value) bool {
  2273  	v_1 := v.Args[1]
  2274  	v_0 := v.Args[0]
  2275  	b := v.Block
  2276  	config := b.Func.Config
  2277  	// match: (Add8 (Const8 [c]) (Const8 [d]))
  2278  	// result: (Const8 [c+d])
  2279  	for {
  2280  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2281  			if v_0.Op != OpConst8 {
  2282  				continue
  2283  			}
  2284  			c := auxIntToInt8(v_0.AuxInt)
  2285  			if v_1.Op != OpConst8 {
  2286  				continue
  2287  			}
  2288  			d := auxIntToInt8(v_1.AuxInt)
  2289  			v.reset(OpConst8)
  2290  			v.AuxInt = int8ToAuxInt(c + d)
  2291  			return true
  2292  		}
  2293  		break
  2294  	}
  2295  	// match: (Add8 <t> (Mul8 x y) (Mul8 x z))
  2296  	// result: (Mul8 x (Add8 <t> y z))
  2297  	for {
  2298  		t := v.Type
  2299  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2300  			if v_0.Op != OpMul8 {
  2301  				continue
  2302  			}
  2303  			_ = v_0.Args[1]
  2304  			v_0_0 := v_0.Args[0]
  2305  			v_0_1 := v_0.Args[1]
  2306  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  2307  				x := v_0_0
  2308  				y := v_0_1
  2309  				if v_1.Op != OpMul8 {
  2310  					continue
  2311  				}
  2312  				_ = v_1.Args[1]
  2313  				v_1_0 := v_1.Args[0]
  2314  				v_1_1 := v_1.Args[1]
  2315  				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
  2316  					if x != v_1_0 {
  2317  						continue
  2318  					}
  2319  					z := v_1_1
  2320  					v.reset(OpMul8)
  2321  					v0 := b.NewValue0(v.Pos, OpAdd8, t)
  2322  					v0.AddArg2(y, z)
  2323  					v.AddArg2(x, v0)
  2324  					return true
  2325  				}
  2326  			}
  2327  		}
  2328  		break
  2329  	}
  2330  	// match: (Add8 (Const8 [0]) x)
  2331  	// result: x
  2332  	for {
  2333  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2334  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
  2335  				continue
  2336  			}
  2337  			x := v_1
  2338  			v.copyOf(x)
  2339  			return true
  2340  		}
  2341  		break
  2342  	}
  2343  	// match: (Add8 x (Neg8 y))
  2344  	// result: (Sub8 x y)
  2345  	for {
  2346  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2347  			x := v_0
  2348  			if v_1.Op != OpNeg8 {
  2349  				continue
  2350  			}
  2351  			y := v_1.Args[0]
  2352  			v.reset(OpSub8)
  2353  			v.AddArg2(x, y)
  2354  			return true
  2355  		}
  2356  		break
  2357  	}
  2358  	// match: (Add8 (Com8 x) x)
  2359  	// result: (Const8 [-1])
  2360  	for {
  2361  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2362  			if v_0.Op != OpCom8 {
  2363  				continue
  2364  			}
  2365  			x := v_0.Args[0]
  2366  			if x != v_1 {
  2367  				continue
  2368  			}
  2369  			v.reset(OpConst8)
  2370  			v.AuxInt = int8ToAuxInt(-1)
  2371  			return true
  2372  		}
  2373  		break
  2374  	}
  2375  	// match: (Add8 (Const8 [1]) (Com8 x))
  2376  	// result: (Neg8 x)
  2377  	for {
  2378  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2379  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 1 || v_1.Op != OpCom8 {
  2380  				continue
  2381  			}
  2382  			x := v_1.Args[0]
  2383  			v.reset(OpNeg8)
  2384  			v.AddArg(x)
  2385  			return true
  2386  		}
  2387  		break
  2388  	}
  2389  	// match: (Add8 x (Sub8 y x))
  2390  	// result: y
  2391  	for {
  2392  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2393  			x := v_0
  2394  			if v_1.Op != OpSub8 {
  2395  				continue
  2396  			}
  2397  			_ = v_1.Args[1]
  2398  			y := v_1.Args[0]
  2399  			if x != v_1.Args[1] {
  2400  				continue
  2401  			}
  2402  			v.copyOf(y)
  2403  			return true
  2404  		}
  2405  		break
  2406  	}
  2407  	// match: (Add8 x (Add8 y (Sub8 z x)))
  2408  	// result: (Add8 y z)
  2409  	for {
  2410  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2411  			x := v_0
  2412  			if v_1.Op != OpAdd8 {
  2413  				continue
  2414  			}
  2415  			_ = v_1.Args[1]
  2416  			v_1_0 := v_1.Args[0]
  2417  			v_1_1 := v_1.Args[1]
  2418  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  2419  				y := v_1_0
  2420  				if v_1_1.Op != OpSub8 {
  2421  					continue
  2422  				}
  2423  				_ = v_1_1.Args[1]
  2424  				z := v_1_1.Args[0]
  2425  				if x != v_1_1.Args[1] {
  2426  					continue
  2427  				}
  2428  				v.reset(OpAdd8)
  2429  				v.AddArg2(y, z)
  2430  				return true
  2431  			}
  2432  		}
  2433  		break
  2434  	}
  2435  	// match: (Add8 (Add8 i:(Const8 <t>) z) x)
  2436  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  2437  	// result: (Add8 i (Add8 <t> z x))
  2438  	for {
  2439  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2440  			if v_0.Op != OpAdd8 {
  2441  				continue
  2442  			}
  2443  			_ = v_0.Args[1]
  2444  			v_0_0 := v_0.Args[0]
  2445  			v_0_1 := v_0.Args[1]
  2446  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  2447  				i := v_0_0
  2448  				if i.Op != OpConst8 {
  2449  					continue
  2450  				}
  2451  				t := i.Type
  2452  				z := v_0_1
  2453  				x := v_1
  2454  				if !(z.Op != OpConst8 && x.Op != OpConst8) {
  2455  					continue
  2456  				}
  2457  				v.reset(OpAdd8)
  2458  				v0 := b.NewValue0(v.Pos, OpAdd8, t)
  2459  				v0.AddArg2(z, x)
  2460  				v.AddArg2(i, v0)
  2461  				return true
  2462  			}
  2463  		}
  2464  		break
  2465  	}
  2466  	// match: (Add8 (Sub8 i:(Const8 <t>) z) x)
  2467  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  2468  	// result: (Add8 i (Sub8 <t> x z))
  2469  	for {
  2470  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2471  			if v_0.Op != OpSub8 {
  2472  				continue
  2473  			}
  2474  			z := v_0.Args[1]
  2475  			i := v_0.Args[0]
  2476  			if i.Op != OpConst8 {
  2477  				continue
  2478  			}
  2479  			t := i.Type
  2480  			x := v_1
  2481  			if !(z.Op != OpConst8 && x.Op != OpConst8) {
  2482  				continue
  2483  			}
  2484  			v.reset(OpAdd8)
  2485  			v0 := b.NewValue0(v.Pos, OpSub8, t)
  2486  			v0.AddArg2(x, z)
  2487  			v.AddArg2(i, v0)
  2488  			return true
  2489  		}
  2490  		break
  2491  	}
  2492  	// match: (Add8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
  2493  	// result: (Add8 (Const8 <t> [c+d]) x)
  2494  	for {
  2495  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2496  			if v_0.Op != OpConst8 {
  2497  				continue
  2498  			}
  2499  			t := v_0.Type
  2500  			c := auxIntToInt8(v_0.AuxInt)
  2501  			if v_1.Op != OpAdd8 {
  2502  				continue
  2503  			}
  2504  			_ = v_1.Args[1]
  2505  			v_1_0 := v_1.Args[0]
  2506  			v_1_1 := v_1.Args[1]
  2507  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  2508  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
  2509  					continue
  2510  				}
  2511  				d := auxIntToInt8(v_1_0.AuxInt)
  2512  				x := v_1_1
  2513  				v.reset(OpAdd8)
  2514  				v0 := b.NewValue0(v.Pos, OpConst8, t)
  2515  				v0.AuxInt = int8ToAuxInt(c + d)
  2516  				v.AddArg2(v0, x)
  2517  				return true
  2518  			}
  2519  		}
  2520  		break
  2521  	}
  2522  	// match: (Add8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x))
  2523  	// result: (Sub8 (Const8 <t> [c+d]) x)
  2524  	for {
  2525  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2526  			if v_0.Op != OpConst8 {
  2527  				continue
  2528  			}
  2529  			t := v_0.Type
  2530  			c := auxIntToInt8(v_0.AuxInt)
  2531  			if v_1.Op != OpSub8 {
  2532  				continue
  2533  			}
  2534  			x := v_1.Args[1]
  2535  			v_1_0 := v_1.Args[0]
  2536  			if v_1_0.Op != OpConst8 || v_1_0.Type != t {
  2537  				continue
  2538  			}
  2539  			d := auxIntToInt8(v_1_0.AuxInt)
  2540  			v.reset(OpSub8)
  2541  			v0 := b.NewValue0(v.Pos, OpConst8, t)
  2542  			v0.AuxInt = int8ToAuxInt(c + d)
  2543  			v.AddArg2(v0, x)
  2544  			return true
  2545  		}
  2546  		break
  2547  	}
  2548  	// match: (Add8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
  2549  	// cond: c < 8 && d == 8-c && canRotate(config, 8)
  2550  	// result: (RotateLeft8 x z)
  2551  	for {
  2552  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2553  			if v_0.Op != OpLsh8x64 {
  2554  				continue
  2555  			}
  2556  			_ = v_0.Args[1]
  2557  			x := v_0.Args[0]
  2558  			z := v_0.Args[1]
  2559  			if z.Op != OpConst64 {
  2560  				continue
  2561  			}
  2562  			c := auxIntToInt64(z.AuxInt)
  2563  			if v_1.Op != OpRsh8Ux64 {
  2564  				continue
  2565  			}
  2566  			_ = v_1.Args[1]
  2567  			if x != v_1.Args[0] {
  2568  				continue
  2569  			}
  2570  			v_1_1 := v_1.Args[1]
  2571  			if v_1_1.Op != OpConst64 {
  2572  				continue
  2573  			}
  2574  			d := auxIntToInt64(v_1_1.AuxInt)
  2575  			if !(c < 8 && d == 8-c && canRotate(config, 8)) {
  2576  				continue
  2577  			}
  2578  			v.reset(OpRotateLeft8)
  2579  			v.AddArg2(x, z)
  2580  			return true
  2581  		}
  2582  		break
  2583  	}
  2584  	// match: (Add8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
  2585  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2586  	// result: (RotateLeft8 x y)
  2587  	for {
  2588  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2589  			left := v_0
  2590  			if left.Op != OpLsh8x64 {
  2591  				continue
  2592  			}
  2593  			y := left.Args[1]
  2594  			x := left.Args[0]
  2595  			right := v_1
  2596  			if right.Op != OpRsh8Ux64 {
  2597  				continue
  2598  			}
  2599  			_ = right.Args[1]
  2600  			if x != right.Args[0] {
  2601  				continue
  2602  			}
  2603  			right_1 := right.Args[1]
  2604  			if right_1.Op != OpSub64 {
  2605  				continue
  2606  			}
  2607  			_ = right_1.Args[1]
  2608  			right_1_0 := right_1.Args[0]
  2609  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2610  				continue
  2611  			}
  2612  			v.reset(OpRotateLeft8)
  2613  			v.AddArg2(x, y)
  2614  			return true
  2615  		}
  2616  		break
  2617  	}
  2618  	// match: (Add8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
  2619  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2620  	// result: (RotateLeft8 x y)
  2621  	for {
  2622  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2623  			left := v_0
  2624  			if left.Op != OpLsh8x32 {
  2625  				continue
  2626  			}
  2627  			y := left.Args[1]
  2628  			x := left.Args[0]
  2629  			right := v_1
  2630  			if right.Op != OpRsh8Ux32 {
  2631  				continue
  2632  			}
  2633  			_ = right.Args[1]
  2634  			if x != right.Args[0] {
  2635  				continue
  2636  			}
  2637  			right_1 := right.Args[1]
  2638  			if right_1.Op != OpSub32 {
  2639  				continue
  2640  			}
  2641  			_ = right_1.Args[1]
  2642  			right_1_0 := right_1.Args[0]
  2643  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2644  				continue
  2645  			}
  2646  			v.reset(OpRotateLeft8)
  2647  			v.AddArg2(x, y)
  2648  			return true
  2649  		}
  2650  		break
  2651  	}
  2652  	// match: (Add8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
  2653  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2654  	// result: (RotateLeft8 x y)
  2655  	for {
  2656  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2657  			left := v_0
  2658  			if left.Op != OpLsh8x16 {
  2659  				continue
  2660  			}
  2661  			y := left.Args[1]
  2662  			x := left.Args[0]
  2663  			right := v_1
  2664  			if right.Op != OpRsh8Ux16 {
  2665  				continue
  2666  			}
  2667  			_ = right.Args[1]
  2668  			if x != right.Args[0] {
  2669  				continue
  2670  			}
  2671  			right_1 := right.Args[1]
  2672  			if right_1.Op != OpSub16 {
  2673  				continue
  2674  			}
  2675  			_ = right_1.Args[1]
  2676  			right_1_0 := right_1.Args[0]
  2677  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2678  				continue
  2679  			}
  2680  			v.reset(OpRotateLeft8)
  2681  			v.AddArg2(x, y)
  2682  			return true
  2683  		}
  2684  		break
  2685  	}
  2686  	// match: (Add8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
  2687  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2688  	// result: (RotateLeft8 x y)
  2689  	for {
  2690  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2691  			left := v_0
  2692  			if left.Op != OpLsh8x8 {
  2693  				continue
  2694  			}
  2695  			y := left.Args[1]
  2696  			x := left.Args[0]
  2697  			right := v_1
  2698  			if right.Op != OpRsh8Ux8 {
  2699  				continue
  2700  			}
  2701  			_ = right.Args[1]
  2702  			if x != right.Args[0] {
  2703  				continue
  2704  			}
  2705  			right_1 := right.Args[1]
  2706  			if right_1.Op != OpSub8 {
  2707  				continue
  2708  			}
  2709  			_ = right_1.Args[1]
  2710  			right_1_0 := right_1.Args[0]
  2711  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2712  				continue
  2713  			}
  2714  			v.reset(OpRotateLeft8)
  2715  			v.AddArg2(x, y)
  2716  			return true
  2717  		}
  2718  		break
  2719  	}
  2720  	// match: (Add8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
  2721  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2722  	// result: (RotateLeft8 x z)
  2723  	for {
  2724  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2725  			right := v_0
  2726  			if right.Op != OpRsh8Ux64 {
  2727  				continue
  2728  			}
  2729  			y := right.Args[1]
  2730  			x := right.Args[0]
  2731  			left := v_1
  2732  			if left.Op != OpLsh8x64 {
  2733  				continue
  2734  			}
  2735  			_ = left.Args[1]
  2736  			if x != left.Args[0] {
  2737  				continue
  2738  			}
  2739  			z := left.Args[1]
  2740  			if z.Op != OpSub64 {
  2741  				continue
  2742  			}
  2743  			_ = z.Args[1]
  2744  			z_0 := z.Args[0]
  2745  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2746  				continue
  2747  			}
  2748  			v.reset(OpRotateLeft8)
  2749  			v.AddArg2(x, z)
  2750  			return true
  2751  		}
  2752  		break
  2753  	}
  2754  	// match: (Add8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
  2755  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2756  	// result: (RotateLeft8 x z)
  2757  	for {
  2758  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2759  			right := v_0
  2760  			if right.Op != OpRsh8Ux32 {
  2761  				continue
  2762  			}
  2763  			y := right.Args[1]
  2764  			x := right.Args[0]
  2765  			left := v_1
  2766  			if left.Op != OpLsh8x32 {
  2767  				continue
  2768  			}
  2769  			_ = left.Args[1]
  2770  			if x != left.Args[0] {
  2771  				continue
  2772  			}
  2773  			z := left.Args[1]
  2774  			if z.Op != OpSub32 {
  2775  				continue
  2776  			}
  2777  			_ = z.Args[1]
  2778  			z_0 := z.Args[0]
  2779  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2780  				continue
  2781  			}
  2782  			v.reset(OpRotateLeft8)
  2783  			v.AddArg2(x, z)
  2784  			return true
  2785  		}
  2786  		break
  2787  	}
  2788  	// match: (Add8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
  2789  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2790  	// result: (RotateLeft8 x z)
  2791  	for {
  2792  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2793  			right := v_0
  2794  			if right.Op != OpRsh8Ux16 {
  2795  				continue
  2796  			}
  2797  			y := right.Args[1]
  2798  			x := right.Args[0]
  2799  			left := v_1
  2800  			if left.Op != OpLsh8x16 {
  2801  				continue
  2802  			}
  2803  			_ = left.Args[1]
  2804  			if x != left.Args[0] {
  2805  				continue
  2806  			}
  2807  			z := left.Args[1]
  2808  			if z.Op != OpSub16 {
  2809  				continue
  2810  			}
  2811  			_ = z.Args[1]
  2812  			z_0 := z.Args[0]
  2813  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2814  				continue
  2815  			}
  2816  			v.reset(OpRotateLeft8)
  2817  			v.AddArg2(x, z)
  2818  			return true
  2819  		}
  2820  		break
  2821  	}
  2822  	// match: (Add8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
  2823  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2824  	// result: (RotateLeft8 x z)
  2825  	for {
  2826  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2827  			right := v_0
  2828  			if right.Op != OpRsh8Ux8 {
  2829  				continue
  2830  			}
  2831  			y := right.Args[1]
  2832  			x := right.Args[0]
  2833  			left := v_1
  2834  			if left.Op != OpLsh8x8 {
  2835  				continue
  2836  			}
  2837  			_ = left.Args[1]
  2838  			if x != left.Args[0] {
  2839  				continue
  2840  			}
  2841  			z := left.Args[1]
  2842  			if z.Op != OpSub8 {
  2843  				continue
  2844  			}
  2845  			_ = z.Args[1]
  2846  			z_0 := z.Args[0]
  2847  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2848  				continue
  2849  			}
  2850  			v.reset(OpRotateLeft8)
  2851  			v.AddArg2(x, z)
  2852  			return true
  2853  		}
  2854  		break
  2855  	}
  2856  	return false
  2857  }
  2858  func rewriteValuegeneric_OpAddPtr(v *Value) bool {
  2859  	v_1 := v.Args[1]
  2860  	v_0 := v.Args[0]
  2861  	// match: (AddPtr <t> x (Const64 [c]))
  2862  	// result: (OffPtr <t> x [c])
  2863  	for {
  2864  		t := v.Type
  2865  		x := v_0
  2866  		if v_1.Op != OpConst64 {
  2867  			break
  2868  		}
  2869  		c := auxIntToInt64(v_1.AuxInt)
  2870  		v.reset(OpOffPtr)
  2871  		v.Type = t
  2872  		v.AuxInt = int64ToAuxInt(c)
  2873  		v.AddArg(x)
  2874  		return true
  2875  	}
  2876  	// match: (AddPtr <t> x (Const32 [c]))
  2877  	// result: (OffPtr <t> x [int64(c)])
  2878  	for {
  2879  		t := v.Type
  2880  		x := v_0
  2881  		if v_1.Op != OpConst32 {
  2882  			break
  2883  		}
  2884  		c := auxIntToInt32(v_1.AuxInt)
  2885  		v.reset(OpOffPtr)
  2886  		v.Type = t
  2887  		v.AuxInt = int64ToAuxInt(int64(c))
  2888  		v.AddArg(x)
  2889  		return true
  2890  	}
  2891  	return false
  2892  }
  2893  func rewriteValuegeneric_OpAnd16(v *Value) bool {
  2894  	v_1 := v.Args[1]
  2895  	v_0 := v.Args[0]
  2896  	b := v.Block
  2897  	// match: (And16 (Const16 [c]) (Const16 [d]))
  2898  	// result: (Const16 [c&d])
  2899  	for {
  2900  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2901  			if v_0.Op != OpConst16 {
  2902  				continue
  2903  			}
  2904  			c := auxIntToInt16(v_0.AuxInt)
  2905  			if v_1.Op != OpConst16 {
  2906  				continue
  2907  			}
  2908  			d := auxIntToInt16(v_1.AuxInt)
  2909  			v.reset(OpConst16)
  2910  			v.AuxInt = int16ToAuxInt(c & d)
  2911  			return true
  2912  		}
  2913  		break
  2914  	}
  2915  	// match: (And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c])))
  2916  	// cond: c >= int64(16-ntz16(m))
  2917  	// result: (Const16 [0])
  2918  	for {
  2919  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2920  			if v_0.Op != OpConst16 {
  2921  				continue
  2922  			}
  2923  			m := auxIntToInt16(v_0.AuxInt)
  2924  			if v_1.Op != OpRsh16Ux64 {
  2925  				continue
  2926  			}
  2927  			_ = v_1.Args[1]
  2928  			v_1_1 := v_1.Args[1]
  2929  			if v_1_1.Op != OpConst64 {
  2930  				continue
  2931  			}
  2932  			c := auxIntToInt64(v_1_1.AuxInt)
  2933  			if !(c >= int64(16-ntz16(m))) {
  2934  				continue
  2935  			}
  2936  			v.reset(OpConst16)
  2937  			v.AuxInt = int16ToAuxInt(0)
  2938  			return true
  2939  		}
  2940  		break
  2941  	}
  2942  	// match: (And16 (Const16 [m]) (Lsh16x64 _ (Const64 [c])))
  2943  	// cond: c >= int64(16-nlz16(m))
  2944  	// result: (Const16 [0])
  2945  	for {
  2946  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2947  			if v_0.Op != OpConst16 {
  2948  				continue
  2949  			}
  2950  			m := auxIntToInt16(v_0.AuxInt)
  2951  			if v_1.Op != OpLsh16x64 {
  2952  				continue
  2953  			}
  2954  			_ = v_1.Args[1]
  2955  			v_1_1 := v_1.Args[1]
  2956  			if v_1_1.Op != OpConst64 {
  2957  				continue
  2958  			}
  2959  			c := auxIntToInt64(v_1_1.AuxInt)
  2960  			if !(c >= int64(16-nlz16(m))) {
  2961  				continue
  2962  			}
  2963  			v.reset(OpConst16)
  2964  			v.AuxInt = int16ToAuxInt(0)
  2965  			return true
  2966  		}
  2967  		break
  2968  	}
  2969  	// match: (And16 x x)
  2970  	// result: x
  2971  	for {
  2972  		x := v_0
  2973  		if x != v_1 {
  2974  			break
  2975  		}
  2976  		v.copyOf(x)
  2977  		return true
  2978  	}
  2979  	// match: (And16 (Const16 [-1]) x)
  2980  	// result: x
  2981  	for {
  2982  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2983  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
  2984  				continue
  2985  			}
  2986  			x := v_1
  2987  			v.copyOf(x)
  2988  			return true
  2989  		}
  2990  		break
  2991  	}
  2992  	// match: (And16 (Const16 [0]) _)
  2993  	// result: (Const16 [0])
  2994  	for {
  2995  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2996  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
  2997  				continue
  2998  			}
  2999  			v.reset(OpConst16)
  3000  			v.AuxInt = int16ToAuxInt(0)
  3001  			return true
  3002  		}
  3003  		break
  3004  	}
  3005  	// match: (And16 (Com16 x) x)
  3006  	// result: (Const16 [0])
  3007  	for {
  3008  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3009  			if v_0.Op != OpCom16 {
  3010  				continue
  3011  			}
  3012  			x := v_0.Args[0]
  3013  			if x != v_1 {
  3014  				continue
  3015  			}
  3016  			v.reset(OpConst16)
  3017  			v.AuxInt = int16ToAuxInt(0)
  3018  			return true
  3019  		}
  3020  		break
  3021  	}
  3022  	// match: (And16 x (And16 x y))
  3023  	// result: (And16 x y)
  3024  	for {
  3025  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3026  			x := v_0
  3027  			if v_1.Op != OpAnd16 {
  3028  				continue
  3029  			}
  3030  			_ = v_1.Args[1]
  3031  			v_1_0 := v_1.Args[0]
  3032  			v_1_1 := v_1.Args[1]
  3033  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3034  				if x != v_1_0 {
  3035  					continue
  3036  				}
  3037  				y := v_1_1
  3038  				v.reset(OpAnd16)
  3039  				v.AddArg2(x, y)
  3040  				return true
  3041  			}
  3042  		}
  3043  		break
  3044  	}
  3045  	// match: (And16 (And16 i:(Const16 <t>) z) x)
  3046  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  3047  	// result: (And16 i (And16 <t> z x))
  3048  	for {
  3049  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3050  			if v_0.Op != OpAnd16 {
  3051  				continue
  3052  			}
  3053  			_ = v_0.Args[1]
  3054  			v_0_0 := v_0.Args[0]
  3055  			v_0_1 := v_0.Args[1]
  3056  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  3057  				i := v_0_0
  3058  				if i.Op != OpConst16 {
  3059  					continue
  3060  				}
  3061  				t := i.Type
  3062  				z := v_0_1
  3063  				x := v_1
  3064  				if !(z.Op != OpConst16 && x.Op != OpConst16) {
  3065  					continue
  3066  				}
  3067  				v.reset(OpAnd16)
  3068  				v0 := b.NewValue0(v.Pos, OpAnd16, t)
  3069  				v0.AddArg2(z, x)
  3070  				v.AddArg2(i, v0)
  3071  				return true
  3072  			}
  3073  		}
  3074  		break
  3075  	}
  3076  	// match: (And16 (Const16 <t> [c]) (And16 (Const16 <t> [d]) x))
  3077  	// result: (And16 (Const16 <t> [c&d]) x)
  3078  	for {
  3079  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3080  			if v_0.Op != OpConst16 {
  3081  				continue
  3082  			}
  3083  			t := v_0.Type
  3084  			c := auxIntToInt16(v_0.AuxInt)
  3085  			if v_1.Op != OpAnd16 {
  3086  				continue
  3087  			}
  3088  			_ = v_1.Args[1]
  3089  			v_1_0 := v_1.Args[0]
  3090  			v_1_1 := v_1.Args[1]
  3091  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3092  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
  3093  					continue
  3094  				}
  3095  				d := auxIntToInt16(v_1_0.AuxInt)
  3096  				x := v_1_1
  3097  				v.reset(OpAnd16)
  3098  				v0 := b.NewValue0(v.Pos, OpConst16, t)
  3099  				v0.AuxInt = int16ToAuxInt(c & d)
  3100  				v.AddArg2(v0, x)
  3101  				return true
  3102  			}
  3103  		}
  3104  		break
  3105  	}
  3106  	return false
  3107  }
  3108  func rewriteValuegeneric_OpAnd32(v *Value) bool {
  3109  	v_1 := v.Args[1]
  3110  	v_0 := v.Args[0]
  3111  	b := v.Block
  3112  	// match: (And32 (Const32 [c]) (Const32 [d]))
  3113  	// result: (Const32 [c&d])
  3114  	for {
  3115  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3116  			if v_0.Op != OpConst32 {
  3117  				continue
  3118  			}
  3119  			c := auxIntToInt32(v_0.AuxInt)
  3120  			if v_1.Op != OpConst32 {
  3121  				continue
  3122  			}
  3123  			d := auxIntToInt32(v_1.AuxInt)
  3124  			v.reset(OpConst32)
  3125  			v.AuxInt = int32ToAuxInt(c & d)
  3126  			return true
  3127  		}
  3128  		break
  3129  	}
  3130  	// match: (And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c])))
  3131  	// cond: c >= int64(32-ntz32(m))
  3132  	// result: (Const32 [0])
  3133  	for {
  3134  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3135  			if v_0.Op != OpConst32 {
  3136  				continue
  3137  			}
  3138  			m := auxIntToInt32(v_0.AuxInt)
  3139  			if v_1.Op != OpRsh32Ux64 {
  3140  				continue
  3141  			}
  3142  			_ = v_1.Args[1]
  3143  			v_1_1 := v_1.Args[1]
  3144  			if v_1_1.Op != OpConst64 {
  3145  				continue
  3146  			}
  3147  			c := auxIntToInt64(v_1_1.AuxInt)
  3148  			if !(c >= int64(32-ntz32(m))) {
  3149  				continue
  3150  			}
  3151  			v.reset(OpConst32)
  3152  			v.AuxInt = int32ToAuxInt(0)
  3153  			return true
  3154  		}
  3155  		break
  3156  	}
  3157  	// match: (And32 (Const32 [m]) (Lsh32x64 _ (Const64 [c])))
  3158  	// cond: c >= int64(32-nlz32(m))
  3159  	// result: (Const32 [0])
  3160  	for {
  3161  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3162  			if v_0.Op != OpConst32 {
  3163  				continue
  3164  			}
  3165  			m := auxIntToInt32(v_0.AuxInt)
  3166  			if v_1.Op != OpLsh32x64 {
  3167  				continue
  3168  			}
  3169  			_ = v_1.Args[1]
  3170  			v_1_1 := v_1.Args[1]
  3171  			if v_1_1.Op != OpConst64 {
  3172  				continue
  3173  			}
  3174  			c := auxIntToInt64(v_1_1.AuxInt)
  3175  			if !(c >= int64(32-nlz32(m))) {
  3176  				continue
  3177  			}
  3178  			v.reset(OpConst32)
  3179  			v.AuxInt = int32ToAuxInt(0)
  3180  			return true
  3181  		}
  3182  		break
  3183  	}
  3184  	// match: (And32 x x)
  3185  	// result: x
  3186  	for {
  3187  		x := v_0
  3188  		if x != v_1 {
  3189  			break
  3190  		}
  3191  		v.copyOf(x)
  3192  		return true
  3193  	}
  3194  	// match: (And32 (Const32 [-1]) x)
  3195  	// result: x
  3196  	for {
  3197  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3198  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
  3199  				continue
  3200  			}
  3201  			x := v_1
  3202  			v.copyOf(x)
  3203  			return true
  3204  		}
  3205  		break
  3206  	}
  3207  	// match: (And32 (Const32 [0]) _)
  3208  	// result: (Const32 [0])
  3209  	for {
  3210  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3211  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
  3212  				continue
  3213  			}
  3214  			v.reset(OpConst32)
  3215  			v.AuxInt = int32ToAuxInt(0)
  3216  			return true
  3217  		}
  3218  		break
  3219  	}
  3220  	// match: (And32 (Com32 x) x)
  3221  	// result: (Const32 [0])
  3222  	for {
  3223  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3224  			if v_0.Op != OpCom32 {
  3225  				continue
  3226  			}
  3227  			x := v_0.Args[0]
  3228  			if x != v_1 {
  3229  				continue
  3230  			}
  3231  			v.reset(OpConst32)
  3232  			v.AuxInt = int32ToAuxInt(0)
  3233  			return true
  3234  		}
  3235  		break
  3236  	}
  3237  	// match: (And32 x (And32 x y))
  3238  	// result: (And32 x y)
  3239  	for {
  3240  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3241  			x := v_0
  3242  			if v_1.Op != OpAnd32 {
  3243  				continue
  3244  			}
  3245  			_ = v_1.Args[1]
  3246  			v_1_0 := v_1.Args[0]
  3247  			v_1_1 := v_1.Args[1]
  3248  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3249  				if x != v_1_0 {
  3250  					continue
  3251  				}
  3252  				y := v_1_1
  3253  				v.reset(OpAnd32)
  3254  				v.AddArg2(x, y)
  3255  				return true
  3256  			}
  3257  		}
  3258  		break
  3259  	}
  3260  	// match: (And32 (And32 i:(Const32 <t>) z) x)
  3261  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  3262  	// result: (And32 i (And32 <t> z x))
  3263  	for {
  3264  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3265  			if v_0.Op != OpAnd32 {
  3266  				continue
  3267  			}
  3268  			_ = v_0.Args[1]
  3269  			v_0_0 := v_0.Args[0]
  3270  			v_0_1 := v_0.Args[1]
  3271  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  3272  				i := v_0_0
  3273  				if i.Op != OpConst32 {
  3274  					continue
  3275  				}
  3276  				t := i.Type
  3277  				z := v_0_1
  3278  				x := v_1
  3279  				if !(z.Op != OpConst32 && x.Op != OpConst32) {
  3280  					continue
  3281  				}
  3282  				v.reset(OpAnd32)
  3283  				v0 := b.NewValue0(v.Pos, OpAnd32, t)
  3284  				v0.AddArg2(z, x)
  3285  				v.AddArg2(i, v0)
  3286  				return true
  3287  			}
  3288  		}
  3289  		break
  3290  	}
  3291  	// match: (And32 (Const32 <t> [c]) (And32 (Const32 <t> [d]) x))
  3292  	// result: (And32 (Const32 <t> [c&d]) x)
  3293  	for {
  3294  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3295  			if v_0.Op != OpConst32 {
  3296  				continue
  3297  			}
  3298  			t := v_0.Type
  3299  			c := auxIntToInt32(v_0.AuxInt)
  3300  			if v_1.Op != OpAnd32 {
  3301  				continue
  3302  			}
  3303  			_ = v_1.Args[1]
  3304  			v_1_0 := v_1.Args[0]
  3305  			v_1_1 := v_1.Args[1]
  3306  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3307  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
  3308  					continue
  3309  				}
  3310  				d := auxIntToInt32(v_1_0.AuxInt)
  3311  				x := v_1_1
  3312  				v.reset(OpAnd32)
  3313  				v0 := b.NewValue0(v.Pos, OpConst32, t)
  3314  				v0.AuxInt = int32ToAuxInt(c & d)
  3315  				v.AddArg2(v0, x)
  3316  				return true
  3317  			}
  3318  		}
  3319  		break
  3320  	}
  3321  	return false
  3322  }
  3323  func rewriteValuegeneric_OpAnd64(v *Value) bool {
  3324  	v_1 := v.Args[1]
  3325  	v_0 := v.Args[0]
  3326  	b := v.Block
  3327  	// match: (And64 (Const64 [c]) (Const64 [d]))
  3328  	// result: (Const64 [c&d])
  3329  	for {
  3330  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3331  			if v_0.Op != OpConst64 {
  3332  				continue
  3333  			}
  3334  			c := auxIntToInt64(v_0.AuxInt)
  3335  			if v_1.Op != OpConst64 {
  3336  				continue
  3337  			}
  3338  			d := auxIntToInt64(v_1.AuxInt)
  3339  			v.reset(OpConst64)
  3340  			v.AuxInt = int64ToAuxInt(c & d)
  3341  			return true
  3342  		}
  3343  		break
  3344  	}
  3345  	// match: (And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c])))
  3346  	// cond: c >= int64(64-ntz64(m))
  3347  	// result: (Const64 [0])
  3348  	for {
  3349  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3350  			if v_0.Op != OpConst64 {
  3351  				continue
  3352  			}
  3353  			m := auxIntToInt64(v_0.AuxInt)
  3354  			if v_1.Op != OpRsh64Ux64 {
  3355  				continue
  3356  			}
  3357  			_ = v_1.Args[1]
  3358  			v_1_1 := v_1.Args[1]
  3359  			if v_1_1.Op != OpConst64 {
  3360  				continue
  3361  			}
  3362  			c := auxIntToInt64(v_1_1.AuxInt)
  3363  			if !(c >= int64(64-ntz64(m))) {
  3364  				continue
  3365  			}
  3366  			v.reset(OpConst64)
  3367  			v.AuxInt = int64ToAuxInt(0)
  3368  			return true
  3369  		}
  3370  		break
  3371  	}
  3372  	// match: (And64 (Const64 [m]) (Lsh64x64 _ (Const64 [c])))
  3373  	// cond: c >= int64(64-nlz64(m))
  3374  	// result: (Const64 [0])
  3375  	for {
  3376  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3377  			if v_0.Op != OpConst64 {
  3378  				continue
  3379  			}
  3380  			m := auxIntToInt64(v_0.AuxInt)
  3381  			if v_1.Op != OpLsh64x64 {
  3382  				continue
  3383  			}
  3384  			_ = v_1.Args[1]
  3385  			v_1_1 := v_1.Args[1]
  3386  			if v_1_1.Op != OpConst64 {
  3387  				continue
  3388  			}
  3389  			c := auxIntToInt64(v_1_1.AuxInt)
  3390  			if !(c >= int64(64-nlz64(m))) {
  3391  				continue
  3392  			}
  3393  			v.reset(OpConst64)
  3394  			v.AuxInt = int64ToAuxInt(0)
  3395  			return true
  3396  		}
  3397  		break
  3398  	}
  3399  	// match: (And64 x x)
  3400  	// result: x
  3401  	for {
  3402  		x := v_0
  3403  		if x != v_1 {
  3404  			break
  3405  		}
  3406  		v.copyOf(x)
  3407  		return true
  3408  	}
  3409  	// match: (And64 (Const64 [-1]) x)
  3410  	// result: x
  3411  	for {
  3412  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3413  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
  3414  				continue
  3415  			}
  3416  			x := v_1
  3417  			v.copyOf(x)
  3418  			return true
  3419  		}
  3420  		break
  3421  	}
  3422  	// match: (And64 (Const64 [0]) _)
  3423  	// result: (Const64 [0])
  3424  	for {
  3425  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3426  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
  3427  				continue
  3428  			}
  3429  			v.reset(OpConst64)
  3430  			v.AuxInt = int64ToAuxInt(0)
  3431  			return true
  3432  		}
  3433  		break
  3434  	}
  3435  	// match: (And64 (Com64 x) x)
  3436  	// result: (Const64 [0])
  3437  	for {
  3438  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3439  			if v_0.Op != OpCom64 {
  3440  				continue
  3441  			}
  3442  			x := v_0.Args[0]
  3443  			if x != v_1 {
  3444  				continue
  3445  			}
  3446  			v.reset(OpConst64)
  3447  			v.AuxInt = int64ToAuxInt(0)
  3448  			return true
  3449  		}
  3450  		break
  3451  	}
  3452  	// match: (And64 x (And64 x y))
  3453  	// result: (And64 x y)
  3454  	for {
  3455  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3456  			x := v_0
  3457  			if v_1.Op != OpAnd64 {
  3458  				continue
  3459  			}
  3460  			_ = v_1.Args[1]
  3461  			v_1_0 := v_1.Args[0]
  3462  			v_1_1 := v_1.Args[1]
  3463  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3464  				if x != v_1_0 {
  3465  					continue
  3466  				}
  3467  				y := v_1_1
  3468  				v.reset(OpAnd64)
  3469  				v.AddArg2(x, y)
  3470  				return true
  3471  			}
  3472  		}
  3473  		break
  3474  	}
  3475  	// match: (And64 (And64 i:(Const64 <t>) z) x)
  3476  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  3477  	// result: (And64 i (And64 <t> z x))
  3478  	for {
  3479  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3480  			if v_0.Op != OpAnd64 {
  3481  				continue
  3482  			}
  3483  			_ = v_0.Args[1]
  3484  			v_0_0 := v_0.Args[0]
  3485  			v_0_1 := v_0.Args[1]
  3486  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  3487  				i := v_0_0
  3488  				if i.Op != OpConst64 {
  3489  					continue
  3490  				}
  3491  				t := i.Type
  3492  				z := v_0_1
  3493  				x := v_1
  3494  				if !(z.Op != OpConst64 && x.Op != OpConst64) {
  3495  					continue
  3496  				}
  3497  				v.reset(OpAnd64)
  3498  				v0 := b.NewValue0(v.Pos, OpAnd64, t)
  3499  				v0.AddArg2(z, x)
  3500  				v.AddArg2(i, v0)
  3501  				return true
  3502  			}
  3503  		}
  3504  		break
  3505  	}
  3506  	// match: (And64 (Const64 <t> [c]) (And64 (Const64 <t> [d]) x))
  3507  	// result: (And64 (Const64 <t> [c&d]) x)
  3508  	for {
  3509  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3510  			if v_0.Op != OpConst64 {
  3511  				continue
  3512  			}
  3513  			t := v_0.Type
  3514  			c := auxIntToInt64(v_0.AuxInt)
  3515  			if v_1.Op != OpAnd64 {
  3516  				continue
  3517  			}
  3518  			_ = v_1.Args[1]
  3519  			v_1_0 := v_1.Args[0]
  3520  			v_1_1 := v_1.Args[1]
  3521  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3522  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
  3523  					continue
  3524  				}
  3525  				d := auxIntToInt64(v_1_0.AuxInt)
  3526  				x := v_1_1
  3527  				v.reset(OpAnd64)
  3528  				v0 := b.NewValue0(v.Pos, OpConst64, t)
  3529  				v0.AuxInt = int64ToAuxInt(c & d)
  3530  				v.AddArg2(v0, x)
  3531  				return true
  3532  			}
  3533  		}
  3534  		break
  3535  	}
  3536  	return false
  3537  }
  3538  func rewriteValuegeneric_OpAnd8(v *Value) bool {
  3539  	v_1 := v.Args[1]
  3540  	v_0 := v.Args[0]
  3541  	b := v.Block
  3542  	// match: (And8 (Const8 [c]) (Const8 [d]))
  3543  	// result: (Const8 [c&d])
  3544  	for {
  3545  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3546  			if v_0.Op != OpConst8 {
  3547  				continue
  3548  			}
  3549  			c := auxIntToInt8(v_0.AuxInt)
  3550  			if v_1.Op != OpConst8 {
  3551  				continue
  3552  			}
  3553  			d := auxIntToInt8(v_1.AuxInt)
  3554  			v.reset(OpConst8)
  3555  			v.AuxInt = int8ToAuxInt(c & d)
  3556  			return true
  3557  		}
  3558  		break
  3559  	}
  3560  	// match: (And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c])))
  3561  	// cond: c >= int64(8-ntz8(m))
  3562  	// result: (Const8 [0])
  3563  	for {
  3564  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3565  			if v_0.Op != OpConst8 {
  3566  				continue
  3567  			}
  3568  			m := auxIntToInt8(v_0.AuxInt)
  3569  			if v_1.Op != OpRsh8Ux64 {
  3570  				continue
  3571  			}
  3572  			_ = v_1.Args[1]
  3573  			v_1_1 := v_1.Args[1]
  3574  			if v_1_1.Op != OpConst64 {
  3575  				continue
  3576  			}
  3577  			c := auxIntToInt64(v_1_1.AuxInt)
  3578  			if !(c >= int64(8-ntz8(m))) {
  3579  				continue
  3580  			}
  3581  			v.reset(OpConst8)
  3582  			v.AuxInt = int8ToAuxInt(0)
  3583  			return true
  3584  		}
  3585  		break
  3586  	}
  3587  	// match: (And8 (Const8 [m]) (Lsh8x64 _ (Const64 [c])))
  3588  	// cond: c >= int64(8-nlz8(m))
  3589  	// result: (Const8 [0])
  3590  	for {
  3591  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3592  			if v_0.Op != OpConst8 {
  3593  				continue
  3594  			}
  3595  			m := auxIntToInt8(v_0.AuxInt)
  3596  			if v_1.Op != OpLsh8x64 {
  3597  				continue
  3598  			}
  3599  			_ = v_1.Args[1]
  3600  			v_1_1 := v_1.Args[1]
  3601  			if v_1_1.Op != OpConst64 {
  3602  				continue
  3603  			}
  3604  			c := auxIntToInt64(v_1_1.AuxInt)
  3605  			if !(c >= int64(8-nlz8(m))) {
  3606  				continue
  3607  			}
  3608  			v.reset(OpConst8)
  3609  			v.AuxInt = int8ToAuxInt(0)
  3610  			return true
  3611  		}
  3612  		break
  3613  	}
  3614  	// match: (And8 x x)
  3615  	// result: x
  3616  	for {
  3617  		x := v_0
  3618  		if x != v_1 {
  3619  			break
  3620  		}
  3621  		v.copyOf(x)
  3622  		return true
  3623  	}
  3624  	// match: (And8 (Const8 [-1]) x)
  3625  	// result: x
  3626  	for {
  3627  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3628  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
  3629  				continue
  3630  			}
  3631  			x := v_1
  3632  			v.copyOf(x)
  3633  			return true
  3634  		}
  3635  		break
  3636  	}
  3637  	// match: (And8 (Const8 [0]) _)
  3638  	// result: (Const8 [0])
  3639  	for {
  3640  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3641  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
  3642  				continue
  3643  			}
  3644  			v.reset(OpConst8)
  3645  			v.AuxInt = int8ToAuxInt(0)
  3646  			return true
  3647  		}
  3648  		break
  3649  	}
  3650  	// match: (And8 (Com8 x) x)
  3651  	// result: (Const8 [0])
  3652  	for {
  3653  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3654  			if v_0.Op != OpCom8 {
  3655  				continue
  3656  			}
  3657  			x := v_0.Args[0]
  3658  			if x != v_1 {
  3659  				continue
  3660  			}
  3661  			v.reset(OpConst8)
  3662  			v.AuxInt = int8ToAuxInt(0)
  3663  			return true
  3664  		}
  3665  		break
  3666  	}
  3667  	// match: (And8 x (And8 x y))
  3668  	// result: (And8 x y)
  3669  	for {
  3670  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3671  			x := v_0
  3672  			if v_1.Op != OpAnd8 {
  3673  				continue
  3674  			}
  3675  			_ = v_1.Args[1]
  3676  			v_1_0 := v_1.Args[0]
  3677  			v_1_1 := v_1.Args[1]
  3678  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3679  				if x != v_1_0 {
  3680  					continue
  3681  				}
  3682  				y := v_1_1
  3683  				v.reset(OpAnd8)
  3684  				v.AddArg2(x, y)
  3685  				return true
  3686  			}
  3687  		}
  3688  		break
  3689  	}
  3690  	// match: (And8 (And8 i:(Const8 <t>) z) x)
  3691  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3692  	// result: (And8 i (And8 <t> z x))
  3693  	for {
  3694  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3695  			if v_0.Op != OpAnd8 {
  3696  				continue
  3697  			}
  3698  			_ = v_0.Args[1]
  3699  			v_0_0 := v_0.Args[0]
  3700  			v_0_1 := v_0.Args[1]
  3701  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  3702  				i := v_0_0
  3703  				if i.Op != OpConst8 {
  3704  					continue
  3705  				}
  3706  				t := i.Type
  3707  				z := v_0_1
  3708  				x := v_1
  3709  				if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3710  					continue
  3711  				}
  3712  				v.reset(OpAnd8)
  3713  				v0 := b.NewValue0(v.Pos, OpAnd8, t)
  3714  				v0.AddArg2(z, x)
  3715  				v.AddArg2(i, v0)
  3716  				return true
  3717  			}
  3718  		}
  3719  		break
  3720  	}
  3721  	// match: (And8 (Const8 <t> [c]) (And8 (Const8 <t> [d]) x))
  3722  	// result: (And8 (Const8 <t> [c&d]) x)
  3723  	for {
  3724  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3725  			if v_0.Op != OpConst8 {
  3726  				continue
  3727  			}
  3728  			t := v_0.Type
  3729  			c := auxIntToInt8(v_0.AuxInt)
  3730  			if v_1.Op != OpAnd8 {
  3731  				continue
  3732  			}
  3733  			_ = v_1.Args[1]
  3734  			v_1_0 := v_1.Args[0]
  3735  			v_1_1 := v_1.Args[1]
  3736  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3737  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
  3738  					continue
  3739  				}
  3740  				d := auxIntToInt8(v_1_0.AuxInt)
  3741  				x := v_1_1
  3742  				v.reset(OpAnd8)
  3743  				v0 := b.NewValue0(v.Pos, OpConst8, t)
  3744  				v0.AuxInt = int8ToAuxInt(c & d)
  3745  				v.AddArg2(v0, x)
  3746  				return true
  3747  			}
  3748  		}
  3749  		break
  3750  	}
  3751  	return false
  3752  }
  3753  func rewriteValuegeneric_OpAndB(v *Value) bool {
  3754  	v_1 := v.Args[1]
  3755  	v_0 := v.Args[0]
  3756  	b := v.Block
  3757  	// match: (AndB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d])))
  3758  	// cond: d >= c
  3759  	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
  3760  	for {
  3761  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3762  			if v_0.Op != OpLeq64 {
  3763  				continue
  3764  			}
  3765  			x := v_0.Args[1]
  3766  			v_0_0 := v_0.Args[0]
  3767  			if v_0_0.Op != OpConst64 {
  3768  				continue
  3769  			}
  3770  			c := auxIntToInt64(v_0_0.AuxInt)
  3771  			if v_1.Op != OpLess64 {
  3772  				continue
  3773  			}
  3774  			_ = v_1.Args[1]
  3775  			if x != v_1.Args[0] {
  3776  				continue
  3777  			}
  3778  			v_1_1 := v_1.Args[1]
  3779  			if v_1_1.Op != OpConst64 {
  3780  				continue
  3781  			}
  3782  			d := auxIntToInt64(v_1_1.AuxInt)
  3783  			if !(d >= c) {
  3784  				continue
  3785  			}
  3786  			v.reset(OpLess64U)
  3787  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  3788  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  3789  			v1.AuxInt = int64ToAuxInt(c)
  3790  			v0.AddArg2(x, v1)
  3791  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  3792  			v2.AuxInt = int64ToAuxInt(d - c)
  3793  			v.AddArg2(v0, v2)
  3794  			return true
  3795  		}
  3796  		break
  3797  	}
  3798  	// match: (AndB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
  3799  	// cond: d >= c
  3800  	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
  3801  	for {
  3802  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3803  			if v_0.Op != OpLeq64 {
  3804  				continue
  3805  			}
  3806  			x := v_0.Args[1]
  3807  			v_0_0 := v_0.Args[0]
  3808  			if v_0_0.Op != OpConst64 {
  3809  				continue
  3810  			}
  3811  			c := auxIntToInt64(v_0_0.AuxInt)
  3812  			if v_1.Op != OpLeq64 {
  3813  				continue
  3814  			}
  3815  			_ = v_1.Args[1]
  3816  			if x != v_1.Args[0] {
  3817  				continue
  3818  			}
  3819  			v_1_1 := v_1.Args[1]
  3820  			if v_1_1.Op != OpConst64 {
  3821  				continue
  3822  			}
  3823  			d := auxIntToInt64(v_1_1.AuxInt)
  3824  			if !(d >= c) {
  3825  				continue
  3826  			}
  3827  			v.reset(OpLeq64U)
  3828  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  3829  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  3830  			v1.AuxInt = int64ToAuxInt(c)
  3831  			v0.AddArg2(x, v1)
  3832  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  3833  			v2.AuxInt = int64ToAuxInt(d - c)
  3834  			v.AddArg2(v0, v2)
  3835  			return true
  3836  		}
  3837  		break
  3838  	}
  3839  	// match: (AndB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d])))
  3840  	// cond: d >= c
  3841  	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
  3842  	for {
  3843  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3844  			if v_0.Op != OpLeq32 {
  3845  				continue
  3846  			}
  3847  			x := v_0.Args[1]
  3848  			v_0_0 := v_0.Args[0]
  3849  			if v_0_0.Op != OpConst32 {
  3850  				continue
  3851  			}
  3852  			c := auxIntToInt32(v_0_0.AuxInt)
  3853  			if v_1.Op != OpLess32 {
  3854  				continue
  3855  			}
  3856  			_ = v_1.Args[1]
  3857  			if x != v_1.Args[0] {
  3858  				continue
  3859  			}
  3860  			v_1_1 := v_1.Args[1]
  3861  			if v_1_1.Op != OpConst32 {
  3862  				continue
  3863  			}
  3864  			d := auxIntToInt32(v_1_1.AuxInt)
  3865  			if !(d >= c) {
  3866  				continue
  3867  			}
  3868  			v.reset(OpLess32U)
  3869  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  3870  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  3871  			v1.AuxInt = int32ToAuxInt(c)
  3872  			v0.AddArg2(x, v1)
  3873  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  3874  			v2.AuxInt = int32ToAuxInt(d - c)
  3875  			v.AddArg2(v0, v2)
  3876  			return true
  3877  		}
  3878  		break
  3879  	}
  3880  	// match: (AndB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
  3881  	// cond: d >= c
  3882  	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
  3883  	for {
  3884  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3885  			if v_0.Op != OpLeq32 {
  3886  				continue
  3887  			}
  3888  			x := v_0.Args[1]
  3889  			v_0_0 := v_0.Args[0]
  3890  			if v_0_0.Op != OpConst32 {
  3891  				continue
  3892  			}
  3893  			c := auxIntToInt32(v_0_0.AuxInt)
  3894  			if v_1.Op != OpLeq32 {
  3895  				continue
  3896  			}
  3897  			_ = v_1.Args[1]
  3898  			if x != v_1.Args[0] {
  3899  				continue
  3900  			}
  3901  			v_1_1 := v_1.Args[1]
  3902  			if v_1_1.Op != OpConst32 {
  3903  				continue
  3904  			}
  3905  			d := auxIntToInt32(v_1_1.AuxInt)
  3906  			if !(d >= c) {
  3907  				continue
  3908  			}
  3909  			v.reset(OpLeq32U)
  3910  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  3911  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  3912  			v1.AuxInt = int32ToAuxInt(c)
  3913  			v0.AddArg2(x, v1)
  3914  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  3915  			v2.AuxInt = int32ToAuxInt(d - c)
  3916  			v.AddArg2(v0, v2)
  3917  			return true
  3918  		}
  3919  		break
  3920  	}
  3921  	// match: (AndB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d])))
  3922  	// cond: d >= c
  3923  	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
  3924  	for {
  3925  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3926  			if v_0.Op != OpLeq16 {
  3927  				continue
  3928  			}
  3929  			x := v_0.Args[1]
  3930  			v_0_0 := v_0.Args[0]
  3931  			if v_0_0.Op != OpConst16 {
  3932  				continue
  3933  			}
  3934  			c := auxIntToInt16(v_0_0.AuxInt)
  3935  			if v_1.Op != OpLess16 {
  3936  				continue
  3937  			}
  3938  			_ = v_1.Args[1]
  3939  			if x != v_1.Args[0] {
  3940  				continue
  3941  			}
  3942  			v_1_1 := v_1.Args[1]
  3943  			if v_1_1.Op != OpConst16 {
  3944  				continue
  3945  			}
  3946  			d := auxIntToInt16(v_1_1.AuxInt)
  3947  			if !(d >= c) {
  3948  				continue
  3949  			}
  3950  			v.reset(OpLess16U)
  3951  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  3952  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  3953  			v1.AuxInt = int16ToAuxInt(c)
  3954  			v0.AddArg2(x, v1)
  3955  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  3956  			v2.AuxInt = int16ToAuxInt(d - c)
  3957  			v.AddArg2(v0, v2)
  3958  			return true
  3959  		}
  3960  		break
  3961  	}
  3962  	// match: (AndB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
  3963  	// cond: d >= c
  3964  	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
  3965  	for {
  3966  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3967  			if v_0.Op != OpLeq16 {
  3968  				continue
  3969  			}
  3970  			x := v_0.Args[1]
  3971  			v_0_0 := v_0.Args[0]
  3972  			if v_0_0.Op != OpConst16 {
  3973  				continue
  3974  			}
  3975  			c := auxIntToInt16(v_0_0.AuxInt)
  3976  			if v_1.Op != OpLeq16 {
  3977  				continue
  3978  			}
  3979  			_ = v_1.Args[1]
  3980  			if x != v_1.Args[0] {
  3981  				continue
  3982  			}
  3983  			v_1_1 := v_1.Args[1]
  3984  			if v_1_1.Op != OpConst16 {
  3985  				continue
  3986  			}
  3987  			d := auxIntToInt16(v_1_1.AuxInt)
  3988  			if !(d >= c) {
  3989  				continue
  3990  			}
  3991  			v.reset(OpLeq16U)
  3992  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  3993  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  3994  			v1.AuxInt = int16ToAuxInt(c)
  3995  			v0.AddArg2(x, v1)
  3996  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  3997  			v2.AuxInt = int16ToAuxInt(d - c)
  3998  			v.AddArg2(v0, v2)
  3999  			return true
  4000  		}
  4001  		break
  4002  	}
  4003  	// match: (AndB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d])))
  4004  	// cond: d >= c
  4005  	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
  4006  	for {
  4007  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4008  			if v_0.Op != OpLeq8 {
  4009  				continue
  4010  			}
  4011  			x := v_0.Args[1]
  4012  			v_0_0 := v_0.Args[0]
  4013  			if v_0_0.Op != OpConst8 {
  4014  				continue
  4015  			}
  4016  			c := auxIntToInt8(v_0_0.AuxInt)
  4017  			if v_1.Op != OpLess8 {
  4018  				continue
  4019  			}
  4020  			_ = v_1.Args[1]
  4021  			if x != v_1.Args[0] {
  4022  				continue
  4023  			}
  4024  			v_1_1 := v_1.Args[1]
  4025  			if v_1_1.Op != OpConst8 {
  4026  				continue
  4027  			}
  4028  			d := auxIntToInt8(v_1_1.AuxInt)
  4029  			if !(d >= c) {
  4030  				continue
  4031  			}
  4032  			v.reset(OpLess8U)
  4033  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4034  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4035  			v1.AuxInt = int8ToAuxInt(c)
  4036  			v0.AddArg2(x, v1)
  4037  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4038  			v2.AuxInt = int8ToAuxInt(d - c)
  4039  			v.AddArg2(v0, v2)
  4040  			return true
  4041  		}
  4042  		break
  4043  	}
  4044  	// match: (AndB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
  4045  	// cond: d >= c
  4046  	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
  4047  	for {
  4048  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4049  			if v_0.Op != OpLeq8 {
  4050  				continue
  4051  			}
  4052  			x := v_0.Args[1]
  4053  			v_0_0 := v_0.Args[0]
  4054  			if v_0_0.Op != OpConst8 {
  4055  				continue
  4056  			}
  4057  			c := auxIntToInt8(v_0_0.AuxInt)
  4058  			if v_1.Op != OpLeq8 {
  4059  				continue
  4060  			}
  4061  			_ = v_1.Args[1]
  4062  			if x != v_1.Args[0] {
  4063  				continue
  4064  			}
  4065  			v_1_1 := v_1.Args[1]
  4066  			if v_1_1.Op != OpConst8 {
  4067  				continue
  4068  			}
  4069  			d := auxIntToInt8(v_1_1.AuxInt)
  4070  			if !(d >= c) {
  4071  				continue
  4072  			}
  4073  			v.reset(OpLeq8U)
  4074  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4075  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4076  			v1.AuxInt = int8ToAuxInt(c)
  4077  			v0.AddArg2(x, v1)
  4078  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4079  			v2.AuxInt = int8ToAuxInt(d - c)
  4080  			v.AddArg2(v0, v2)
  4081  			return true
  4082  		}
  4083  		break
  4084  	}
  4085  	// match: (AndB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d])))
  4086  	// cond: d >= c+1 && c+1 > c
  4087  	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
  4088  	for {
  4089  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4090  			if v_0.Op != OpLess64 {
  4091  				continue
  4092  			}
  4093  			x := v_0.Args[1]
  4094  			v_0_0 := v_0.Args[0]
  4095  			if v_0_0.Op != OpConst64 {
  4096  				continue
  4097  			}
  4098  			c := auxIntToInt64(v_0_0.AuxInt)
  4099  			if v_1.Op != OpLess64 {
  4100  				continue
  4101  			}
  4102  			_ = v_1.Args[1]
  4103  			if x != v_1.Args[0] {
  4104  				continue
  4105  			}
  4106  			v_1_1 := v_1.Args[1]
  4107  			if v_1_1.Op != OpConst64 {
  4108  				continue
  4109  			}
  4110  			d := auxIntToInt64(v_1_1.AuxInt)
  4111  			if !(d >= c+1 && c+1 > c) {
  4112  				continue
  4113  			}
  4114  			v.reset(OpLess64U)
  4115  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4116  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4117  			v1.AuxInt = int64ToAuxInt(c + 1)
  4118  			v0.AddArg2(x, v1)
  4119  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4120  			v2.AuxInt = int64ToAuxInt(d - c - 1)
  4121  			v.AddArg2(v0, v2)
  4122  			return true
  4123  		}
  4124  		break
  4125  	}
  4126  	// match: (AndB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
  4127  	// cond: d >= c+1 && c+1 > c
  4128  	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
  4129  	for {
  4130  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4131  			if v_0.Op != OpLess64 {
  4132  				continue
  4133  			}
  4134  			x := v_0.Args[1]
  4135  			v_0_0 := v_0.Args[0]
  4136  			if v_0_0.Op != OpConst64 {
  4137  				continue
  4138  			}
  4139  			c := auxIntToInt64(v_0_0.AuxInt)
  4140  			if v_1.Op != OpLeq64 {
  4141  				continue
  4142  			}
  4143  			_ = v_1.Args[1]
  4144  			if x != v_1.Args[0] {
  4145  				continue
  4146  			}
  4147  			v_1_1 := v_1.Args[1]
  4148  			if v_1_1.Op != OpConst64 {
  4149  				continue
  4150  			}
  4151  			d := auxIntToInt64(v_1_1.AuxInt)
  4152  			if !(d >= c+1 && c+1 > c) {
  4153  				continue
  4154  			}
  4155  			v.reset(OpLeq64U)
  4156  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4157  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4158  			v1.AuxInt = int64ToAuxInt(c + 1)
  4159  			v0.AddArg2(x, v1)
  4160  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4161  			v2.AuxInt = int64ToAuxInt(d - c - 1)
  4162  			v.AddArg2(v0, v2)
  4163  			return true
  4164  		}
  4165  		break
  4166  	}
  4167  	// match: (AndB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d])))
  4168  	// cond: d >= c+1 && c+1 > c
  4169  	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
  4170  	for {
  4171  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4172  			if v_0.Op != OpLess32 {
  4173  				continue
  4174  			}
  4175  			x := v_0.Args[1]
  4176  			v_0_0 := v_0.Args[0]
  4177  			if v_0_0.Op != OpConst32 {
  4178  				continue
  4179  			}
  4180  			c := auxIntToInt32(v_0_0.AuxInt)
  4181  			if v_1.Op != OpLess32 {
  4182  				continue
  4183  			}
  4184  			_ = v_1.Args[1]
  4185  			if x != v_1.Args[0] {
  4186  				continue
  4187  			}
  4188  			v_1_1 := v_1.Args[1]
  4189  			if v_1_1.Op != OpConst32 {
  4190  				continue
  4191  			}
  4192  			d := auxIntToInt32(v_1_1.AuxInt)
  4193  			if !(d >= c+1 && c+1 > c) {
  4194  				continue
  4195  			}
  4196  			v.reset(OpLess32U)
  4197  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4198  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4199  			v1.AuxInt = int32ToAuxInt(c + 1)
  4200  			v0.AddArg2(x, v1)
  4201  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4202  			v2.AuxInt = int32ToAuxInt(d - c - 1)
  4203  			v.AddArg2(v0, v2)
  4204  			return true
  4205  		}
  4206  		break
  4207  	}
  4208  	// match: (AndB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
  4209  	// cond: d >= c+1 && c+1 > c
  4210  	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
  4211  	for {
  4212  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4213  			if v_0.Op != OpLess32 {
  4214  				continue
  4215  			}
  4216  			x := v_0.Args[1]
  4217  			v_0_0 := v_0.Args[0]
  4218  			if v_0_0.Op != OpConst32 {
  4219  				continue
  4220  			}
  4221  			c := auxIntToInt32(v_0_0.AuxInt)
  4222  			if v_1.Op != OpLeq32 {
  4223  				continue
  4224  			}
  4225  			_ = v_1.Args[1]
  4226  			if x != v_1.Args[0] {
  4227  				continue
  4228  			}
  4229  			v_1_1 := v_1.Args[1]
  4230  			if v_1_1.Op != OpConst32 {
  4231  				continue
  4232  			}
  4233  			d := auxIntToInt32(v_1_1.AuxInt)
  4234  			if !(d >= c+1 && c+1 > c) {
  4235  				continue
  4236  			}
  4237  			v.reset(OpLeq32U)
  4238  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4239  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4240  			v1.AuxInt = int32ToAuxInt(c + 1)
  4241  			v0.AddArg2(x, v1)
  4242  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4243  			v2.AuxInt = int32ToAuxInt(d - c - 1)
  4244  			v.AddArg2(v0, v2)
  4245  			return true
  4246  		}
  4247  		break
  4248  	}
  4249  	// match: (AndB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d])))
  4250  	// cond: d >= c+1 && c+1 > c
  4251  	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
  4252  	for {
  4253  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4254  			if v_0.Op != OpLess16 {
  4255  				continue
  4256  			}
  4257  			x := v_0.Args[1]
  4258  			v_0_0 := v_0.Args[0]
  4259  			if v_0_0.Op != OpConst16 {
  4260  				continue
  4261  			}
  4262  			c := auxIntToInt16(v_0_0.AuxInt)
  4263  			if v_1.Op != OpLess16 {
  4264  				continue
  4265  			}
  4266  			_ = v_1.Args[1]
  4267  			if x != v_1.Args[0] {
  4268  				continue
  4269  			}
  4270  			v_1_1 := v_1.Args[1]
  4271  			if v_1_1.Op != OpConst16 {
  4272  				continue
  4273  			}
  4274  			d := auxIntToInt16(v_1_1.AuxInt)
  4275  			if !(d >= c+1 && c+1 > c) {
  4276  				continue
  4277  			}
  4278  			v.reset(OpLess16U)
  4279  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4280  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4281  			v1.AuxInt = int16ToAuxInt(c + 1)
  4282  			v0.AddArg2(x, v1)
  4283  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4284  			v2.AuxInt = int16ToAuxInt(d - c - 1)
  4285  			v.AddArg2(v0, v2)
  4286  			return true
  4287  		}
  4288  		break
  4289  	}
  4290  	// match: (AndB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
  4291  	// cond: d >= c+1 && c+1 > c
  4292  	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
  4293  	for {
  4294  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4295  			if v_0.Op != OpLess16 {
  4296  				continue
  4297  			}
  4298  			x := v_0.Args[1]
  4299  			v_0_0 := v_0.Args[0]
  4300  			if v_0_0.Op != OpConst16 {
  4301  				continue
  4302  			}
  4303  			c := auxIntToInt16(v_0_0.AuxInt)
  4304  			if v_1.Op != OpLeq16 {
  4305  				continue
  4306  			}
  4307  			_ = v_1.Args[1]
  4308  			if x != v_1.Args[0] {
  4309  				continue
  4310  			}
  4311  			v_1_1 := v_1.Args[1]
  4312  			if v_1_1.Op != OpConst16 {
  4313  				continue
  4314  			}
  4315  			d := auxIntToInt16(v_1_1.AuxInt)
  4316  			if !(d >= c+1 && c+1 > c) {
  4317  				continue
  4318  			}
  4319  			v.reset(OpLeq16U)
  4320  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4321  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4322  			v1.AuxInt = int16ToAuxInt(c + 1)
  4323  			v0.AddArg2(x, v1)
  4324  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4325  			v2.AuxInt = int16ToAuxInt(d - c - 1)
  4326  			v.AddArg2(v0, v2)
  4327  			return true
  4328  		}
  4329  		break
  4330  	}
  4331  	// match: (AndB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d])))
  4332  	// cond: d >= c+1 && c+1 > c
  4333  	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
  4334  	for {
  4335  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4336  			if v_0.Op != OpLess8 {
  4337  				continue
  4338  			}
  4339  			x := v_0.Args[1]
  4340  			v_0_0 := v_0.Args[0]
  4341  			if v_0_0.Op != OpConst8 {
  4342  				continue
  4343  			}
  4344  			c := auxIntToInt8(v_0_0.AuxInt)
  4345  			if v_1.Op != OpLess8 {
  4346  				continue
  4347  			}
  4348  			_ = v_1.Args[1]
  4349  			if x != v_1.Args[0] {
  4350  				continue
  4351  			}
  4352  			v_1_1 := v_1.Args[1]
  4353  			if v_1_1.Op != OpConst8 {
  4354  				continue
  4355  			}
  4356  			d := auxIntToInt8(v_1_1.AuxInt)
  4357  			if !(d >= c+1 && c+1 > c) {
  4358  				continue
  4359  			}
  4360  			v.reset(OpLess8U)
  4361  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4362  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4363  			v1.AuxInt = int8ToAuxInt(c + 1)
  4364  			v0.AddArg2(x, v1)
  4365  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4366  			v2.AuxInt = int8ToAuxInt(d - c - 1)
  4367  			v.AddArg2(v0, v2)
  4368  			return true
  4369  		}
  4370  		break
  4371  	}
  4372  	// match: (AndB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
  4373  	// cond: d >= c+1 && c+1 > c
  4374  	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
  4375  	for {
  4376  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4377  			if v_0.Op != OpLess8 {
  4378  				continue
  4379  			}
  4380  			x := v_0.Args[1]
  4381  			v_0_0 := v_0.Args[0]
  4382  			if v_0_0.Op != OpConst8 {
  4383  				continue
  4384  			}
  4385  			c := auxIntToInt8(v_0_0.AuxInt)
  4386  			if v_1.Op != OpLeq8 {
  4387  				continue
  4388  			}
  4389  			_ = v_1.Args[1]
  4390  			if x != v_1.Args[0] {
  4391  				continue
  4392  			}
  4393  			v_1_1 := v_1.Args[1]
  4394  			if v_1_1.Op != OpConst8 {
  4395  				continue
  4396  			}
  4397  			d := auxIntToInt8(v_1_1.AuxInt)
  4398  			if !(d >= c+1 && c+1 > c) {
  4399  				continue
  4400  			}
  4401  			v.reset(OpLeq8U)
  4402  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4403  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4404  			v1.AuxInt = int8ToAuxInt(c + 1)
  4405  			v0.AddArg2(x, v1)
  4406  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4407  			v2.AuxInt = int8ToAuxInt(d - c - 1)
  4408  			v.AddArg2(v0, v2)
  4409  			return true
  4410  		}
  4411  		break
  4412  	}
  4413  	// match: (AndB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d])))
  4414  	// cond: uint64(d) >= uint64(c)
  4415  	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
  4416  	for {
  4417  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4418  			if v_0.Op != OpLeq64U {
  4419  				continue
  4420  			}
  4421  			x := v_0.Args[1]
  4422  			v_0_0 := v_0.Args[0]
  4423  			if v_0_0.Op != OpConst64 {
  4424  				continue
  4425  			}
  4426  			c := auxIntToInt64(v_0_0.AuxInt)
  4427  			if v_1.Op != OpLess64U {
  4428  				continue
  4429  			}
  4430  			_ = v_1.Args[1]
  4431  			if x != v_1.Args[0] {
  4432  				continue
  4433  			}
  4434  			v_1_1 := v_1.Args[1]
  4435  			if v_1_1.Op != OpConst64 {
  4436  				continue
  4437  			}
  4438  			d := auxIntToInt64(v_1_1.AuxInt)
  4439  			if !(uint64(d) >= uint64(c)) {
  4440  				continue
  4441  			}
  4442  			v.reset(OpLess64U)
  4443  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4444  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4445  			v1.AuxInt = int64ToAuxInt(c)
  4446  			v0.AddArg2(x, v1)
  4447  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4448  			v2.AuxInt = int64ToAuxInt(d - c)
  4449  			v.AddArg2(v0, v2)
  4450  			return true
  4451  		}
  4452  		break
  4453  	}
  4454  	// match: (AndB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
  4455  	// cond: uint64(d) >= uint64(c)
  4456  	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
  4457  	for {
  4458  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4459  			if v_0.Op != OpLeq64U {
  4460  				continue
  4461  			}
  4462  			x := v_0.Args[1]
  4463  			v_0_0 := v_0.Args[0]
  4464  			if v_0_0.Op != OpConst64 {
  4465  				continue
  4466  			}
  4467  			c := auxIntToInt64(v_0_0.AuxInt)
  4468  			if v_1.Op != OpLeq64U {
  4469  				continue
  4470  			}
  4471  			_ = v_1.Args[1]
  4472  			if x != v_1.Args[0] {
  4473  				continue
  4474  			}
  4475  			v_1_1 := v_1.Args[1]
  4476  			if v_1_1.Op != OpConst64 {
  4477  				continue
  4478  			}
  4479  			d := auxIntToInt64(v_1_1.AuxInt)
  4480  			if !(uint64(d) >= uint64(c)) {
  4481  				continue
  4482  			}
  4483  			v.reset(OpLeq64U)
  4484  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4485  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4486  			v1.AuxInt = int64ToAuxInt(c)
  4487  			v0.AddArg2(x, v1)
  4488  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4489  			v2.AuxInt = int64ToAuxInt(d - c)
  4490  			v.AddArg2(v0, v2)
  4491  			return true
  4492  		}
  4493  		break
  4494  	}
  4495  	// match: (AndB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d])))
  4496  	// cond: uint32(d) >= uint32(c)
  4497  	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
  4498  	for {
  4499  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4500  			if v_0.Op != OpLeq32U {
  4501  				continue
  4502  			}
  4503  			x := v_0.Args[1]
  4504  			v_0_0 := v_0.Args[0]
  4505  			if v_0_0.Op != OpConst32 {
  4506  				continue
  4507  			}
  4508  			c := auxIntToInt32(v_0_0.AuxInt)
  4509  			if v_1.Op != OpLess32U {
  4510  				continue
  4511  			}
  4512  			_ = v_1.Args[1]
  4513  			if x != v_1.Args[0] {
  4514  				continue
  4515  			}
  4516  			v_1_1 := v_1.Args[1]
  4517  			if v_1_1.Op != OpConst32 {
  4518  				continue
  4519  			}
  4520  			d := auxIntToInt32(v_1_1.AuxInt)
  4521  			if !(uint32(d) >= uint32(c)) {
  4522  				continue
  4523  			}
  4524  			v.reset(OpLess32U)
  4525  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4526  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4527  			v1.AuxInt = int32ToAuxInt(c)
  4528  			v0.AddArg2(x, v1)
  4529  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4530  			v2.AuxInt = int32ToAuxInt(d - c)
  4531  			v.AddArg2(v0, v2)
  4532  			return true
  4533  		}
  4534  		break
  4535  	}
  4536  	// match: (AndB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
  4537  	// cond: uint32(d) >= uint32(c)
  4538  	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
  4539  	for {
  4540  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4541  			if v_0.Op != OpLeq32U {
  4542  				continue
  4543  			}
  4544  			x := v_0.Args[1]
  4545  			v_0_0 := v_0.Args[0]
  4546  			if v_0_0.Op != OpConst32 {
  4547  				continue
  4548  			}
  4549  			c := auxIntToInt32(v_0_0.AuxInt)
  4550  			if v_1.Op != OpLeq32U {
  4551  				continue
  4552  			}
  4553  			_ = v_1.Args[1]
  4554  			if x != v_1.Args[0] {
  4555  				continue
  4556  			}
  4557  			v_1_1 := v_1.Args[1]
  4558  			if v_1_1.Op != OpConst32 {
  4559  				continue
  4560  			}
  4561  			d := auxIntToInt32(v_1_1.AuxInt)
  4562  			if !(uint32(d) >= uint32(c)) {
  4563  				continue
  4564  			}
  4565  			v.reset(OpLeq32U)
  4566  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4567  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4568  			v1.AuxInt = int32ToAuxInt(c)
  4569  			v0.AddArg2(x, v1)
  4570  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4571  			v2.AuxInt = int32ToAuxInt(d - c)
  4572  			v.AddArg2(v0, v2)
  4573  			return true
  4574  		}
  4575  		break
  4576  	}
  4577  	// match: (AndB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d])))
  4578  	// cond: uint16(d) >= uint16(c)
  4579  	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
  4580  	for {
  4581  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4582  			if v_0.Op != OpLeq16U {
  4583  				continue
  4584  			}
  4585  			x := v_0.Args[1]
  4586  			v_0_0 := v_0.Args[0]
  4587  			if v_0_0.Op != OpConst16 {
  4588  				continue
  4589  			}
  4590  			c := auxIntToInt16(v_0_0.AuxInt)
  4591  			if v_1.Op != OpLess16U {
  4592  				continue
  4593  			}
  4594  			_ = v_1.Args[1]
  4595  			if x != v_1.Args[0] {
  4596  				continue
  4597  			}
  4598  			v_1_1 := v_1.Args[1]
  4599  			if v_1_1.Op != OpConst16 {
  4600  				continue
  4601  			}
  4602  			d := auxIntToInt16(v_1_1.AuxInt)
  4603  			if !(uint16(d) >= uint16(c)) {
  4604  				continue
  4605  			}
  4606  			v.reset(OpLess16U)
  4607  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4608  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4609  			v1.AuxInt = int16ToAuxInt(c)
  4610  			v0.AddArg2(x, v1)
  4611  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4612  			v2.AuxInt = int16ToAuxInt(d - c)
  4613  			v.AddArg2(v0, v2)
  4614  			return true
  4615  		}
  4616  		break
  4617  	}
  4618  	// match: (AndB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
  4619  	// cond: uint16(d) >= uint16(c)
  4620  	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
  4621  	for {
  4622  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4623  			if v_0.Op != OpLeq16U {
  4624  				continue
  4625  			}
  4626  			x := v_0.Args[1]
  4627  			v_0_0 := v_0.Args[0]
  4628  			if v_0_0.Op != OpConst16 {
  4629  				continue
  4630  			}
  4631  			c := auxIntToInt16(v_0_0.AuxInt)
  4632  			if v_1.Op != OpLeq16U {
  4633  				continue
  4634  			}
  4635  			_ = v_1.Args[1]
  4636  			if x != v_1.Args[0] {
  4637  				continue
  4638  			}
  4639  			v_1_1 := v_1.Args[1]
  4640  			if v_1_1.Op != OpConst16 {
  4641  				continue
  4642  			}
  4643  			d := auxIntToInt16(v_1_1.AuxInt)
  4644  			if !(uint16(d) >= uint16(c)) {
  4645  				continue
  4646  			}
  4647  			v.reset(OpLeq16U)
  4648  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4649  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4650  			v1.AuxInt = int16ToAuxInt(c)
  4651  			v0.AddArg2(x, v1)
  4652  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4653  			v2.AuxInt = int16ToAuxInt(d - c)
  4654  			v.AddArg2(v0, v2)
  4655  			return true
  4656  		}
  4657  		break
  4658  	}
  4659  	// match: (AndB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d])))
  4660  	// cond: uint8(d) >= uint8(c)
  4661  	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
  4662  	for {
  4663  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4664  			if v_0.Op != OpLeq8U {
  4665  				continue
  4666  			}
  4667  			x := v_0.Args[1]
  4668  			v_0_0 := v_0.Args[0]
  4669  			if v_0_0.Op != OpConst8 {
  4670  				continue
  4671  			}
  4672  			c := auxIntToInt8(v_0_0.AuxInt)
  4673  			if v_1.Op != OpLess8U {
  4674  				continue
  4675  			}
  4676  			_ = v_1.Args[1]
  4677  			if x != v_1.Args[0] {
  4678  				continue
  4679  			}
  4680  			v_1_1 := v_1.Args[1]
  4681  			if v_1_1.Op != OpConst8 {
  4682  				continue
  4683  			}
  4684  			d := auxIntToInt8(v_1_1.AuxInt)
  4685  			if !(uint8(d) >= uint8(c)) {
  4686  				continue
  4687  			}
  4688  			v.reset(OpLess8U)
  4689  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4690  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4691  			v1.AuxInt = int8ToAuxInt(c)
  4692  			v0.AddArg2(x, v1)
  4693  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4694  			v2.AuxInt = int8ToAuxInt(d - c)
  4695  			v.AddArg2(v0, v2)
  4696  			return true
  4697  		}
  4698  		break
  4699  	}
  4700  	// match: (AndB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
  4701  	// cond: uint8(d) >= uint8(c)
  4702  	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
  4703  	for {
  4704  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4705  			if v_0.Op != OpLeq8U {
  4706  				continue
  4707  			}
  4708  			x := v_0.Args[1]
  4709  			v_0_0 := v_0.Args[0]
  4710  			if v_0_0.Op != OpConst8 {
  4711  				continue
  4712  			}
  4713  			c := auxIntToInt8(v_0_0.AuxInt)
  4714  			if v_1.Op != OpLeq8U {
  4715  				continue
  4716  			}
  4717  			_ = v_1.Args[1]
  4718  			if x != v_1.Args[0] {
  4719  				continue
  4720  			}
  4721  			v_1_1 := v_1.Args[1]
  4722  			if v_1_1.Op != OpConst8 {
  4723  				continue
  4724  			}
  4725  			d := auxIntToInt8(v_1_1.AuxInt)
  4726  			if !(uint8(d) >= uint8(c)) {
  4727  				continue
  4728  			}
  4729  			v.reset(OpLeq8U)
  4730  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4731  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4732  			v1.AuxInt = int8ToAuxInt(c)
  4733  			v0.AddArg2(x, v1)
  4734  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4735  			v2.AuxInt = int8ToAuxInt(d - c)
  4736  			v.AddArg2(v0, v2)
  4737  			return true
  4738  		}
  4739  		break
  4740  	}
  4741  	// match: (AndB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d])))
  4742  	// cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)
  4743  	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
  4744  	for {
  4745  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4746  			if v_0.Op != OpLess64U {
  4747  				continue
  4748  			}
  4749  			x := v_0.Args[1]
  4750  			v_0_0 := v_0.Args[0]
  4751  			if v_0_0.Op != OpConst64 {
  4752  				continue
  4753  			}
  4754  			c := auxIntToInt64(v_0_0.AuxInt)
  4755  			if v_1.Op != OpLess64U {
  4756  				continue
  4757  			}
  4758  			_ = v_1.Args[1]
  4759  			if x != v_1.Args[0] {
  4760  				continue
  4761  			}
  4762  			v_1_1 := v_1.Args[1]
  4763  			if v_1_1.Op != OpConst64 {
  4764  				continue
  4765  			}
  4766  			d := auxIntToInt64(v_1_1.AuxInt)
  4767  			if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) {
  4768  				continue
  4769  			}
  4770  			v.reset(OpLess64U)
  4771  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4772  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4773  			v1.AuxInt = int64ToAuxInt(c + 1)
  4774  			v0.AddArg2(x, v1)
  4775  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4776  			v2.AuxInt = int64ToAuxInt(d - c - 1)
  4777  			v.AddArg2(v0, v2)
  4778  			return true
  4779  		}
  4780  		break
  4781  	}
  4782  	// match: (AndB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
  4783  	// cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)
  4784  	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
  4785  	for {
  4786  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4787  			if v_0.Op != OpLess64U {
  4788  				continue
  4789  			}
  4790  			x := v_0.Args[1]
  4791  			v_0_0 := v_0.Args[0]
  4792  			if v_0_0.Op != OpConst64 {
  4793  				continue
  4794  			}
  4795  			c := auxIntToInt64(v_0_0.AuxInt)
  4796  			if v_1.Op != OpLeq64U {
  4797  				continue
  4798  			}
  4799  			_ = v_1.Args[1]
  4800  			if x != v_1.Args[0] {
  4801  				continue
  4802  			}
  4803  			v_1_1 := v_1.Args[1]
  4804  			if v_1_1.Op != OpConst64 {
  4805  				continue
  4806  			}
  4807  			d := auxIntToInt64(v_1_1.AuxInt)
  4808  			if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) {
  4809  				continue
  4810  			}
  4811  			v.reset(OpLeq64U)
  4812  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4813  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4814  			v1.AuxInt = int64ToAuxInt(c + 1)
  4815  			v0.AddArg2(x, v1)
  4816  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4817  			v2.AuxInt = int64ToAuxInt(d - c - 1)
  4818  			v.AddArg2(v0, v2)
  4819  			return true
  4820  		}
  4821  		break
  4822  	}
  4823  	// match: (AndB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d])))
  4824  	// cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)
  4825  	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
  4826  	for {
  4827  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4828  			if v_0.Op != OpLess32U {
  4829  				continue
  4830  			}
  4831  			x := v_0.Args[1]
  4832  			v_0_0 := v_0.Args[0]
  4833  			if v_0_0.Op != OpConst32 {
  4834  				continue
  4835  			}
  4836  			c := auxIntToInt32(v_0_0.AuxInt)
  4837  			if v_1.Op != OpLess32U {
  4838  				continue
  4839  			}
  4840  			_ = v_1.Args[1]
  4841  			if x != v_1.Args[0] {
  4842  				continue
  4843  			}
  4844  			v_1_1 := v_1.Args[1]
  4845  			if v_1_1.Op != OpConst32 {
  4846  				continue
  4847  			}
  4848  			d := auxIntToInt32(v_1_1.AuxInt)
  4849  			if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) {
  4850  				continue
  4851  			}
  4852  			v.reset(OpLess32U)
  4853  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4854  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4855  			v1.AuxInt = int32ToAuxInt(c + 1)
  4856  			v0.AddArg2(x, v1)
  4857  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4858  			v2.AuxInt = int32ToAuxInt(d - c - 1)
  4859  			v.AddArg2(v0, v2)
  4860  			return true
  4861  		}
  4862  		break
  4863  	}
  4864  	// match: (AndB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
  4865  	// cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)
  4866  	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
  4867  	for {
  4868  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4869  			if v_0.Op != OpLess32U {
  4870  				continue
  4871  			}
  4872  			x := v_0.Args[1]
  4873  			v_0_0 := v_0.Args[0]
  4874  			if v_0_0.Op != OpConst32 {
  4875  				continue
  4876  			}
  4877  			c := auxIntToInt32(v_0_0.AuxInt)
  4878  			if v_1.Op != OpLeq32U {
  4879  				continue
  4880  			}
  4881  			_ = v_1.Args[1]
  4882  			if x != v_1.Args[0] {
  4883  				continue
  4884  			}
  4885  			v_1_1 := v_1.Args[1]
  4886  			if v_1_1.Op != OpConst32 {
  4887  				continue
  4888  			}
  4889  			d := auxIntToInt32(v_1_1.AuxInt)
  4890  			if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) {
  4891  				continue
  4892  			}
  4893  			v.reset(OpLeq32U)
  4894  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4895  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4896  			v1.AuxInt = int32ToAuxInt(c + 1)
  4897  			v0.AddArg2(x, v1)
  4898  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4899  			v2.AuxInt = int32ToAuxInt(d - c - 1)
  4900  			v.AddArg2(v0, v2)
  4901  			return true
  4902  		}
  4903  		break
  4904  	}
  4905  	// match: (AndB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d])))
  4906  	// cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)
  4907  	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
  4908  	for {
  4909  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4910  			if v_0.Op != OpLess16U {
  4911  				continue
  4912  			}
  4913  			x := v_0.Args[1]
  4914  			v_0_0 := v_0.Args[0]
  4915  			if v_0_0.Op != OpConst16 {
  4916  				continue
  4917  			}
  4918  			c := auxIntToInt16(v_0_0.AuxInt)
  4919  			if v_1.Op != OpLess16U {
  4920  				continue
  4921  			}
  4922  			_ = v_1.Args[1]
  4923  			if x != v_1.Args[0] {
  4924  				continue
  4925  			}
  4926  			v_1_1 := v_1.Args[1]
  4927  			if v_1_1.Op != OpConst16 {
  4928  				continue
  4929  			}
  4930  			d := auxIntToInt16(v_1_1.AuxInt)
  4931  			if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) {
  4932  				continue
  4933  			}
  4934  			v.reset(OpLess16U)
  4935  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4936  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4937  			v1.AuxInt = int16ToAuxInt(c + 1)
  4938  			v0.AddArg2(x, v1)
  4939  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4940  			v2.AuxInt = int16ToAuxInt(d - c - 1)
  4941  			v.AddArg2(v0, v2)
  4942  			return true
  4943  		}
  4944  		break
  4945  	}
  4946  	// match: (AndB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
  4947  	// cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)
  4948  	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
  4949  	for {
  4950  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4951  			if v_0.Op != OpLess16U {
  4952  				continue
  4953  			}
  4954  			x := v_0.Args[1]
  4955  			v_0_0 := v_0.Args[0]
  4956  			if v_0_0.Op != OpConst16 {
  4957  				continue
  4958  			}
  4959  			c := auxIntToInt16(v_0_0.AuxInt)
  4960  			if v_1.Op != OpLeq16U {
  4961  				continue
  4962  			}
  4963  			_ = v_1.Args[1]
  4964  			if x != v_1.Args[0] {
  4965  				continue
  4966  			}
  4967  			v_1_1 := v_1.Args[1]
  4968  			if v_1_1.Op != OpConst16 {
  4969  				continue
  4970  			}
  4971  			d := auxIntToInt16(v_1_1.AuxInt)
  4972  			if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) {
  4973  				continue
  4974  			}
  4975  			v.reset(OpLeq16U)
  4976  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4977  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4978  			v1.AuxInt = int16ToAuxInt(c + 1)
  4979  			v0.AddArg2(x, v1)
  4980  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4981  			v2.AuxInt = int16ToAuxInt(d - c - 1)
  4982  			v.AddArg2(v0, v2)
  4983  			return true
  4984  		}
  4985  		break
  4986  	}
  4987  	// match: (AndB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d])))
  4988  	// cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)
  4989  	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
  4990  	for {
  4991  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4992  			if v_0.Op != OpLess8U {
  4993  				continue
  4994  			}
  4995  			x := v_0.Args[1]
  4996  			v_0_0 := v_0.Args[0]
  4997  			if v_0_0.Op != OpConst8 {
  4998  				continue
  4999  			}
  5000  			c := auxIntToInt8(v_0_0.AuxInt)
  5001  			if v_1.Op != OpLess8U {
  5002  				continue
  5003  			}
  5004  			_ = v_1.Args[1]
  5005  			if x != v_1.Args[0] {
  5006  				continue
  5007  			}
  5008  			v_1_1 := v_1.Args[1]
  5009  			if v_1_1.Op != OpConst8 {
  5010  				continue
  5011  			}
  5012  			d := auxIntToInt8(v_1_1.AuxInt)
  5013  			if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) {
  5014  				continue
  5015  			}
  5016  			v.reset(OpLess8U)
  5017  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  5018  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  5019  			v1.AuxInt = int8ToAuxInt(c + 1)
  5020  			v0.AddArg2(x, v1)
  5021  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  5022  			v2.AuxInt = int8ToAuxInt(d - c - 1)
  5023  			v.AddArg2(v0, v2)
  5024  			return true
  5025  		}
  5026  		break
  5027  	}
  5028  	// match: (AndB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
  5029  	// cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)
  5030  	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
  5031  	for {
  5032  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5033  			if v_0.Op != OpLess8U {
  5034  				continue
  5035  			}
  5036  			x := v_0.Args[1]
  5037  			v_0_0 := v_0.Args[0]
  5038  			if v_0_0.Op != OpConst8 {
  5039  				continue
  5040  			}
  5041  			c := auxIntToInt8(v_0_0.AuxInt)
  5042  			if v_1.Op != OpLeq8U {
  5043  				continue
  5044  			}
  5045  			_ = v_1.Args[1]
  5046  			if x != v_1.Args[0] {
  5047  				continue
  5048  			}
  5049  			v_1_1 := v_1.Args[1]
  5050  			if v_1_1.Op != OpConst8 {
  5051  				continue
  5052  			}
  5053  			d := auxIntToInt8(v_1_1.AuxInt)
  5054  			if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) {
  5055  				continue
  5056  			}
  5057  			v.reset(OpLeq8U)
  5058  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  5059  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  5060  			v1.AuxInt = int8ToAuxInt(c + 1)
  5061  			v0.AddArg2(x, v1)
  5062  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  5063  			v2.AuxInt = int8ToAuxInt(d - c - 1)
  5064  			v.AddArg2(v0, v2)
  5065  			return true
  5066  		}
  5067  		break
  5068  	}
  5069  	return false
  5070  }
  5071  func rewriteValuegeneric_OpArraySelect(v *Value) bool {
  5072  	v_0 := v.Args[0]
  5073  	// match: (ArraySelect (ArrayMake1 x))
  5074  	// result: x
  5075  	for {
  5076  		if v_0.Op != OpArrayMake1 {
  5077  			break
  5078  		}
  5079  		x := v_0.Args[0]
  5080  		v.copyOf(x)
  5081  		return true
  5082  	}
  5083  	// match: (ArraySelect [0] (IData x))
  5084  	// result: (IData x)
  5085  	for {
  5086  		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpIData {
  5087  			break
  5088  		}
  5089  		x := v_0.Args[0]
  5090  		v.reset(OpIData)
  5091  		v.AddArg(x)
  5092  		return true
  5093  	}
  5094  	return false
  5095  }
  5096  func rewriteValuegeneric_OpCeil(v *Value) bool {
  5097  	v_0 := v.Args[0]
  5098  	// match: (Ceil (Const64F [c]))
  5099  	// result: (Const64F [math.Ceil(c)])
  5100  	for {
  5101  		if v_0.Op != OpConst64F {
  5102  			break
  5103  		}
  5104  		c := auxIntToFloat64(v_0.AuxInt)
  5105  		v.reset(OpConst64F)
  5106  		v.AuxInt = float64ToAuxInt(math.Ceil(c))
  5107  		return true
  5108  	}
  5109  	return false
  5110  }
  5111  func rewriteValuegeneric_OpCom16(v *Value) bool {
  5112  	v_0 := v.Args[0]
  5113  	// match: (Com16 (Com16 x))
  5114  	// result: x
  5115  	for {
  5116  		if v_0.Op != OpCom16 {
  5117  			break
  5118  		}
  5119  		x := v_0.Args[0]
  5120  		v.copyOf(x)
  5121  		return true
  5122  	}
  5123  	// match: (Com16 (Const16 [c]))
  5124  	// result: (Const16 [^c])
  5125  	for {
  5126  		if v_0.Op != OpConst16 {
  5127  			break
  5128  		}
  5129  		c := auxIntToInt16(v_0.AuxInt)
  5130  		v.reset(OpConst16)
  5131  		v.AuxInt = int16ToAuxInt(^c)
  5132  		return true
  5133  	}
  5134  	// match: (Com16 (Add16 (Const16 [-1]) x))
  5135  	// result: (Neg16 x)
  5136  	for {
  5137  		if v_0.Op != OpAdd16 {
  5138  			break
  5139  		}
  5140  		_ = v_0.Args[1]
  5141  		v_0_0 := v_0.Args[0]
  5142  		v_0_1 := v_0.Args[1]
  5143  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5144  			if v_0_0.Op != OpConst16 || auxIntToInt16(v_0_0.AuxInt) != -1 {
  5145  				continue
  5146  			}
  5147  			x := v_0_1
  5148  			v.reset(OpNeg16)
  5149  			v.AddArg(x)
  5150  			return true
  5151  		}
  5152  		break
  5153  	}
  5154  	return false
  5155  }
  5156  func rewriteValuegeneric_OpCom32(v *Value) bool {
  5157  	v_0 := v.Args[0]
  5158  	// match: (Com32 (Com32 x))
  5159  	// result: x
  5160  	for {
  5161  		if v_0.Op != OpCom32 {
  5162  			break
  5163  		}
  5164  		x := v_0.Args[0]
  5165  		v.copyOf(x)
  5166  		return true
  5167  	}
  5168  	// match: (Com32 (Const32 [c]))
  5169  	// result: (Const32 [^c])
  5170  	for {
  5171  		if v_0.Op != OpConst32 {
  5172  			break
  5173  		}
  5174  		c := auxIntToInt32(v_0.AuxInt)
  5175  		v.reset(OpConst32)
  5176  		v.AuxInt = int32ToAuxInt(^c)
  5177  		return true
  5178  	}
  5179  	// match: (Com32 (Add32 (Const32 [-1]) x))
  5180  	// result: (Neg32 x)
  5181  	for {
  5182  		if v_0.Op != OpAdd32 {
  5183  			break
  5184  		}
  5185  		_ = v_0.Args[1]
  5186  		v_0_0 := v_0.Args[0]
  5187  		v_0_1 := v_0.Args[1]
  5188  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5189  			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != -1 {
  5190  				continue
  5191  			}
  5192  			x := v_0_1
  5193  			v.reset(OpNeg32)
  5194  			v.AddArg(x)
  5195  			return true
  5196  		}
  5197  		break
  5198  	}
  5199  	return false
  5200  }
  5201  func rewriteValuegeneric_OpCom64(v *Value) bool {
  5202  	v_0 := v.Args[0]
  5203  	// match: (Com64 (Com64 x))
  5204  	// result: x
  5205  	for {
  5206  		if v_0.Op != OpCom64 {
  5207  			break
  5208  		}
  5209  		x := v_0.Args[0]
  5210  		v.copyOf(x)
  5211  		return true
  5212  	}
  5213  	// match: (Com64 (Const64 [c]))
  5214  	// result: (Const64 [^c])
  5215  	for {
  5216  		if v_0.Op != OpConst64 {
  5217  			break
  5218  		}
  5219  		c := auxIntToInt64(v_0.AuxInt)
  5220  		v.reset(OpConst64)
  5221  		v.AuxInt = int64ToAuxInt(^c)
  5222  		return true
  5223  	}
  5224  	// match: (Com64 (Add64 (Const64 [-1]) x))
  5225  	// result: (Neg64 x)
  5226  	for {
  5227  		if v_0.Op != OpAdd64 {
  5228  			break
  5229  		}
  5230  		_ = v_0.Args[1]
  5231  		v_0_0 := v_0.Args[0]
  5232  		v_0_1 := v_0.Args[1]
  5233  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5234  			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != -1 {
  5235  				continue
  5236  			}
  5237  			x := v_0_1
  5238  			v.reset(OpNeg64)
  5239  			v.AddArg(x)
  5240  			return true
  5241  		}
  5242  		break
  5243  	}
  5244  	return false
  5245  }
  5246  func rewriteValuegeneric_OpCom8(v *Value) bool {
  5247  	v_0 := v.Args[0]
  5248  	// match: (Com8 (Com8 x))
  5249  	// result: x
  5250  	for {
  5251  		if v_0.Op != OpCom8 {
  5252  			break
  5253  		}
  5254  		x := v_0.Args[0]
  5255  		v.copyOf(x)
  5256  		return true
  5257  	}
  5258  	// match: (Com8 (Const8 [c]))
  5259  	// result: (Const8 [^c])
  5260  	for {
  5261  		if v_0.Op != OpConst8 {
  5262  			break
  5263  		}
  5264  		c := auxIntToInt8(v_0.AuxInt)
  5265  		v.reset(OpConst8)
  5266  		v.AuxInt = int8ToAuxInt(^c)
  5267  		return true
  5268  	}
  5269  	// match: (Com8 (Add8 (Const8 [-1]) x))
  5270  	// result: (Neg8 x)
  5271  	for {
  5272  		if v_0.Op != OpAdd8 {
  5273  			break
  5274  		}
  5275  		_ = v_0.Args[1]
  5276  		v_0_0 := v_0.Args[0]
  5277  		v_0_1 := v_0.Args[1]
  5278  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5279  			if v_0_0.Op != OpConst8 || auxIntToInt8(v_0_0.AuxInt) != -1 {
  5280  				continue
  5281  			}
  5282  			x := v_0_1
  5283  			v.reset(OpNeg8)
  5284  			v.AddArg(x)
  5285  			return true
  5286  		}
  5287  		break
  5288  	}
  5289  	return false
  5290  }
  5291  func rewriteValuegeneric_OpConstInterface(v *Value) bool {
  5292  	b := v.Block
  5293  	typ := &b.Func.Config.Types
  5294  	// match: (ConstInterface)
  5295  	// result: (IMake (ConstNil <typ.Uintptr>) (ConstNil <typ.BytePtr>))
  5296  	for {
  5297  		v.reset(OpIMake)
  5298  		v0 := b.NewValue0(v.Pos, OpConstNil, typ.Uintptr)
  5299  		v1 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
  5300  		v.AddArg2(v0, v1)
  5301  		return true
  5302  	}
  5303  }
  5304  func rewriteValuegeneric_OpConstSlice(v *Value) bool {
  5305  	b := v.Block
  5306  	config := b.Func.Config
  5307  	typ := &b.Func.Config.Types
  5308  	// match: (ConstSlice)
  5309  	// cond: config.PtrSize == 4
  5310  	// result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const32 <typ.Int> [0]) (Const32 <typ.Int> [0]))
  5311  	for {
  5312  		if !(config.PtrSize == 4) {
  5313  			break
  5314  		}
  5315  		v.reset(OpSliceMake)
  5316  		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo())
  5317  		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
  5318  		v1.AuxInt = int32ToAuxInt(0)
  5319  		v.AddArg3(v0, v1, v1)
  5320  		return true
  5321  	}
  5322  	// match: (ConstSlice)
  5323  	// cond: config.PtrSize == 8
  5324  	// result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const64 <typ.Int> [0]) (Const64 <typ.Int> [0]))
  5325  	for {
  5326  		if !(config.PtrSize == 8) {
  5327  			break
  5328  		}
  5329  		v.reset(OpSliceMake)
  5330  		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo())
  5331  		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
  5332  		v1.AuxInt = int64ToAuxInt(0)
  5333  		v.AddArg3(v0, v1, v1)
  5334  		return true
  5335  	}
  5336  	return false
  5337  }
  5338  func rewriteValuegeneric_OpConstString(v *Value) bool {
  5339  	b := v.Block
  5340  	config := b.Func.Config
  5341  	fe := b.Func.fe
  5342  	typ := &b.Func.Config.Types
  5343  	// match: (ConstString {str})
  5344  	// cond: config.PtrSize == 4 && str == ""
  5345  	// result: (StringMake (ConstNil) (Const32 <typ.Int> [0]))
  5346  	for {
  5347  		str := auxToString(v.Aux)
  5348  		if !(config.PtrSize == 4 && str == "") {
  5349  			break
  5350  		}
  5351  		v.reset(OpStringMake)
  5352  		v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
  5353  		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
  5354  		v1.AuxInt = int32ToAuxInt(0)
  5355  		v.AddArg2(v0, v1)
  5356  		return true
  5357  	}
  5358  	// match: (ConstString {str})
  5359  	// cond: config.PtrSize == 8 && str == ""
  5360  	// result: (StringMake (ConstNil) (Const64 <typ.Int> [0]))
  5361  	for {
  5362  		str := auxToString(v.Aux)
  5363  		if !(config.PtrSize == 8 && str == "") {
  5364  			break
  5365  		}
  5366  		v.reset(OpStringMake)
  5367  		v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
  5368  		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
  5369  		v1.AuxInt = int64ToAuxInt(0)
  5370  		v.AddArg2(v0, v1)
  5371  		return true
  5372  	}
  5373  	// match: (ConstString {str})
  5374  	// cond: config.PtrSize == 4 && str != ""
  5375  	// result: (StringMake (Addr <typ.BytePtr> {fe.StringData(str)} (SB)) (Const32 <typ.Int> [int32(len(str))]))
  5376  	for {
  5377  		str := auxToString(v.Aux)
  5378  		if !(config.PtrSize == 4 && str != "") {
  5379  			break
  5380  		}
  5381  		v.reset(OpStringMake)
  5382  		v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr)
  5383  		v0.Aux = symToAux(fe.StringData(str))
  5384  		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
  5385  		v0.AddArg(v1)
  5386  		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int)
  5387  		v2.AuxInt = int32ToAuxInt(int32(len(str)))
  5388  		v.AddArg2(v0, v2)
  5389  		return true
  5390  	}
  5391  	// match: (ConstString {str})
  5392  	// cond: config.PtrSize == 8 && str != ""
  5393  	// result: (StringMake (Addr <typ.BytePtr> {fe.StringData(str)} (SB)) (Const64 <typ.Int> [int64(len(str))]))
  5394  	for {
  5395  		str := auxToString(v.Aux)
  5396  		if !(config.PtrSize == 8 && str != "") {
  5397  			break
  5398  		}
  5399  		v.reset(OpStringMake)
  5400  		v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr)
  5401  		v0.Aux = symToAux(fe.StringData(str))
  5402  		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
  5403  		v0.AddArg(v1)
  5404  		v2 := b.NewValue0(v.Pos, OpConst64, typ.Int)
  5405  		v2.AuxInt = int64ToAuxInt(int64(len(str)))
  5406  		v.AddArg2(v0, v2)
  5407  		return true
  5408  	}
  5409  	return false
  5410  }
  5411  func rewriteValuegeneric_OpConvert(v *Value) bool {
  5412  	v_1 := v.Args[1]
  5413  	v_0 := v.Args[0]
  5414  	// match: (Convert (Add64 (Convert ptr mem) off) mem)
  5415  	// result: (AddPtr ptr off)
  5416  	for {
  5417  		if v_0.Op != OpAdd64 {
  5418  			break
  5419  		}
  5420  		_ = v_0.Args[1]
  5421  		v_0_0 := v_0.Args[0]
  5422  		v_0_1 := v_0.Args[1]
  5423  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5424  			if v_0_0.Op != OpConvert {
  5425  				continue
  5426  			}
  5427  			mem := v_0_0.Args[1]
  5428  			ptr := v_0_0.Args[0]
  5429  			off := v_0_1
  5430  			if mem != v_1 {
  5431  				continue
  5432  			}
  5433  			v.reset(OpAddPtr)
  5434  			v.AddArg2(ptr, off)
  5435  			return true
  5436  		}
  5437  		break
  5438  	}
  5439  	// match: (Convert (Add32 (Convert ptr mem) off) mem)
  5440  	// result: (AddPtr ptr off)
  5441  	for {
  5442  		if v_0.Op != OpAdd32 {
  5443  			break
  5444  		}
  5445  		_ = v_0.Args[1]
  5446  		v_0_0 := v_0.Args[0]
  5447  		v_0_1 := v_0.Args[1]
  5448  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5449  			if v_0_0.Op != OpConvert {
  5450  				continue
  5451  			}
  5452  			mem := v_0_0.Args[1]
  5453  			ptr := v_0_0.Args[0]
  5454  			off := v_0_1
  5455  			if mem != v_1 {
  5456  				continue
  5457  			}
  5458  			v.reset(OpAddPtr)
  5459  			v.AddArg2(ptr, off)
  5460  			return true
  5461  		}
  5462  		break
  5463  	}
  5464  	// match: (Convert (Convert ptr mem) mem)
  5465  	// result: ptr
  5466  	for {
  5467  		if v_0.Op != OpConvert {
  5468  			break
  5469  		}
  5470  		mem := v_0.Args[1]
  5471  		ptr := v_0.Args[0]
  5472  		if mem != v_1 {
  5473  			break
  5474  		}
  5475  		v.copyOf(ptr)
  5476  		return true
  5477  	}
  5478  	return false
  5479  }
  5480  func rewriteValuegeneric_OpCtz16(v *Value) bool {
  5481  	v_0 := v.Args[0]
  5482  	b := v.Block
  5483  	config := b.Func.Config
  5484  	// match: (Ctz16 (Const16 [c]))
  5485  	// cond: config.PtrSize == 4
  5486  	// result: (Const32 [int32(ntz16(c))])
  5487  	for {
  5488  		if v_0.Op != OpConst16 {
  5489  			break
  5490  		}
  5491  		c := auxIntToInt16(v_0.AuxInt)
  5492  		if !(config.PtrSize == 4) {
  5493  			break
  5494  		}
  5495  		v.reset(OpConst32)
  5496  		v.AuxInt = int32ToAuxInt(int32(ntz16(c)))
  5497  		return true
  5498  	}
  5499  	// match: (Ctz16 (Const16 [c]))
  5500  	// cond: config.PtrSize == 8
  5501  	// result: (Const64 [int64(ntz16(c))])
  5502  	for {
  5503  		if v_0.Op != OpConst16 {
  5504  			break
  5505  		}
  5506  		c := auxIntToInt16(v_0.AuxInt)
  5507  		if !(config.PtrSize == 8) {
  5508  			break
  5509  		}
  5510  		v.reset(OpConst64)
  5511  		v.AuxInt = int64ToAuxInt(int64(ntz16(c)))
  5512  		return true
  5513  	}
  5514  	return false
  5515  }
  5516  func rewriteValuegeneric_OpCtz32(v *Value) bool {
  5517  	v_0 := v.Args[0]
  5518  	b := v.Block
  5519  	config := b.Func.Config
  5520  	// match: (Ctz32 (Const32 [c]))
  5521  	// cond: config.PtrSize == 4
  5522  	// result: (Const32 [int32(ntz32(c))])
  5523  	for {
  5524  		if v_0.Op != OpConst32 {
  5525  			break
  5526  		}
  5527  		c := auxIntToInt32(v_0.AuxInt)
  5528  		if !(config.PtrSize == 4) {
  5529  			break
  5530  		}
  5531  		v.reset(OpConst32)
  5532  		v.AuxInt = int32ToAuxInt(int32(ntz32(c)))
  5533  		return true
  5534  	}
  5535  	// match: (Ctz32 (Const32 [c]))
  5536  	// cond: config.PtrSize == 8
  5537  	// result: (Const64 [int64(ntz32(c))])
  5538  	for {
  5539  		if v_0.Op != OpConst32 {
  5540  			break
  5541  		}
  5542  		c := auxIntToInt32(v_0.AuxInt)
  5543  		if !(config.PtrSize == 8) {
  5544  			break
  5545  		}
  5546  		v.reset(OpConst64)
  5547  		v.AuxInt = int64ToAuxInt(int64(ntz32(c)))
  5548  		return true
  5549  	}
  5550  	return false
  5551  }
  5552  func rewriteValuegeneric_OpCtz64(v *Value) bool {
  5553  	v_0 := v.Args[0]
  5554  	b := v.Block
  5555  	config := b.Func.Config
  5556  	// match: (Ctz64 (Const64 [c]))
  5557  	// cond: config.PtrSize == 4
  5558  	// result: (Const32 [int32(ntz64(c))])
  5559  	for {
  5560  		if v_0.Op != OpConst64 {
  5561  			break
  5562  		}
  5563  		c := auxIntToInt64(v_0.AuxInt)
  5564  		if !(config.PtrSize == 4) {
  5565  			break
  5566  		}
  5567  		v.reset(OpConst32)
  5568  		v.AuxInt = int32ToAuxInt(int32(ntz64(c)))
  5569  		return true
  5570  	}
  5571  	// match: (Ctz64 (Const64 [c]))
  5572  	// cond: config.PtrSize == 8
  5573  	// result: (Const64 [int64(ntz64(c))])
  5574  	for {
  5575  		if v_0.Op != OpConst64 {
  5576  			break
  5577  		}
  5578  		c := auxIntToInt64(v_0.AuxInt)
  5579  		if !(config.PtrSize == 8) {
  5580  			break
  5581  		}
  5582  		v.reset(OpConst64)
  5583  		v.AuxInt = int64ToAuxInt(int64(ntz64(c)))
  5584  		return true
  5585  	}
  5586  	return false
  5587  }
  5588  func rewriteValuegeneric_OpCtz8(v *Value) bool {
  5589  	v_0 := v.Args[0]
  5590  	b := v.Block
  5591  	config := b.Func.Config
  5592  	// match: (Ctz8 (Const8 [c]))
  5593  	// cond: config.PtrSize == 4
  5594  	// result: (Const32 [int32(ntz8(c))])
  5595  	for {
  5596  		if v_0.Op != OpConst8 {
  5597  			break
  5598  		}
  5599  		c := auxIntToInt8(v_0.AuxInt)
  5600  		if !(config.PtrSize == 4) {
  5601  			break
  5602  		}
  5603  		v.reset(OpConst32)
  5604  		v.AuxInt = int32ToAuxInt(int32(ntz8(c)))
  5605  		return true
  5606  	}
  5607  	// match: (Ctz8 (Const8 [c]))
  5608  	// cond: config.PtrSize == 8
  5609  	// result: (Const64 [int64(ntz8(c))])
  5610  	for {
  5611  		if v_0.Op != OpConst8 {
  5612  			break
  5613  		}
  5614  		c := auxIntToInt8(v_0.AuxInt)
  5615  		if !(config.PtrSize == 8) {
  5616  			break
  5617  		}
  5618  		v.reset(OpConst64)
  5619  		v.AuxInt = int64ToAuxInt(int64(ntz8(c)))
  5620  		return true
  5621  	}
  5622  	return false
  5623  }
  5624  func rewriteValuegeneric_OpCvt32Fto32(v *Value) bool {
  5625  	v_0 := v.Args[0]
  5626  	// match: (Cvt32Fto32 (Const32F [c]))
  5627  	// result: (Const32 [int32(c)])
  5628  	for {
  5629  		if v_0.Op != OpConst32F {
  5630  			break
  5631  		}
  5632  		c := auxIntToFloat32(v_0.AuxInt)
  5633  		v.reset(OpConst32)
  5634  		v.AuxInt = int32ToAuxInt(int32(c))
  5635  		return true
  5636  	}
  5637  	return false
  5638  }
  5639  func rewriteValuegeneric_OpCvt32Fto64(v *Value) bool {
  5640  	v_0 := v.Args[0]
  5641  	// match: (Cvt32Fto64 (Const32F [c]))
  5642  	// result: (Const64 [int64(c)])
  5643  	for {
  5644  		if v_0.Op != OpConst32F {
  5645  			break
  5646  		}
  5647  		c := auxIntToFloat32(v_0.AuxInt)
  5648  		v.reset(OpConst64)
  5649  		v.AuxInt = int64ToAuxInt(int64(c))
  5650  		return true
  5651  	}
  5652  	return false
  5653  }
  5654  func rewriteValuegeneric_OpCvt32Fto64F(v *Value) bool {
  5655  	v_0 := v.Args[0]
  5656  	// match: (Cvt32Fto64F (Const32F [c]))
  5657  	// result: (Const64F [float64(c)])
  5658  	for {
  5659  		if v_0.Op != OpConst32F {
  5660  			break
  5661  		}
  5662  		c := auxIntToFloat32(v_0.AuxInt)
  5663  		v.reset(OpConst64F)
  5664  		v.AuxInt = float64ToAuxInt(float64(c))
  5665  		return true
  5666  	}
  5667  	return false
  5668  }
  5669  func rewriteValuegeneric_OpCvt32to32F(v *Value) bool {
  5670  	v_0 := v.Args[0]
  5671  	// match: (Cvt32to32F (Const32 [c]))
  5672  	// result: (Const32F [float32(c)])
  5673  	for {
  5674  		if v_0.Op != OpConst32 {
  5675  			break
  5676  		}
  5677  		c := auxIntToInt32(v_0.AuxInt)
  5678  		v.reset(OpConst32F)
  5679  		v.AuxInt = float32ToAuxInt(float32(c))
  5680  		return true
  5681  	}
  5682  	return false
  5683  }
  5684  func rewriteValuegeneric_OpCvt32to64F(v *Value) bool {
  5685  	v_0 := v.Args[0]
  5686  	// match: (Cvt32to64F (Const32 [c]))
  5687  	// result: (Const64F [float64(c)])
  5688  	for {
  5689  		if v_0.Op != OpConst32 {
  5690  			break
  5691  		}
  5692  		c := auxIntToInt32(v_0.AuxInt)
  5693  		v.reset(OpConst64F)
  5694  		v.AuxInt = float64ToAuxInt(float64(c))
  5695  		return true
  5696  	}
  5697  	return false
  5698  }
  5699  func rewriteValuegeneric_OpCvt64Fto32(v *Value) bool {
  5700  	v_0 := v.Args[0]
  5701  	// match: (Cvt64Fto32 (Const64F [c]))
  5702  	// result: (Const32 [int32(c)])
  5703  	for {
  5704  		if v_0.Op != OpConst64F {
  5705  			break
  5706  		}
  5707  		c := auxIntToFloat64(v_0.AuxInt)
  5708  		v.reset(OpConst32)
  5709  		v.AuxInt = int32ToAuxInt(int32(c))
  5710  		return true
  5711  	}
  5712  	return false
  5713  }
  5714  func rewriteValuegeneric_OpCvt64Fto32F(v *Value) bool {
  5715  	v_0 := v.Args[0]
  5716  	// match: (Cvt64Fto32F (Const64F [c]))
  5717  	// result: (Const32F [float32(c)])
  5718  	for {
  5719  		if v_0.Op != OpConst64F {
  5720  			break
  5721  		}
  5722  		c := auxIntToFloat64(v_0.AuxInt)
  5723  		v.reset(OpConst32F)
  5724  		v.AuxInt = float32ToAuxInt(float32(c))
  5725  		return true
  5726  	}
  5727  	// match: (Cvt64Fto32F sqrt0:(Sqrt (Cvt32Fto64F x)))
  5728  	// cond: sqrt0.Uses==1
  5729  	// result: (Sqrt32 x)
  5730  	for {
  5731  		sqrt0 := v_0
  5732  		if sqrt0.Op != OpSqrt {
  5733  			break
  5734  		}
  5735  		sqrt0_0 := sqrt0.Args[0]
  5736  		if sqrt0_0.Op != OpCvt32Fto64F {
  5737  			break
  5738  		}
  5739  		x := sqrt0_0.Args[0]
  5740  		if !(sqrt0.Uses == 1) {
  5741  			break
  5742  		}
  5743  		v.reset(OpSqrt32)
  5744  		v.AddArg(x)
  5745  		return true
  5746  	}
  5747  	return false
  5748  }
  5749  func rewriteValuegeneric_OpCvt64Fto64(v *Value) bool {
  5750  	v_0 := v.Args[0]
  5751  	// match: (Cvt64Fto64 (Const64F [c]))
  5752  	// result: (Const64 [int64(c)])
  5753  	for {
  5754  		if v_0.Op != OpConst64F {
  5755  			break
  5756  		}
  5757  		c := auxIntToFloat64(v_0.AuxInt)
  5758  		v.reset(OpConst64)
  5759  		v.AuxInt = int64ToAuxInt(int64(c))
  5760  		return true
  5761  	}
  5762  	return false
  5763  }
  5764  func rewriteValuegeneric_OpCvt64to32F(v *Value) bool {
  5765  	v_0 := v.Args[0]
  5766  	// match: (Cvt64to32F (Const64 [c]))
  5767  	// result: (Const32F [float32(c)])
  5768  	for {
  5769  		if v_0.Op != OpConst64 {
  5770  			break
  5771  		}
  5772  		c := auxIntToInt64(v_0.AuxInt)
  5773  		v.reset(OpConst32F)
  5774  		v.AuxInt = float32ToAuxInt(float32(c))
  5775  		return true
  5776  	}
  5777  	return false
  5778  }
  5779  func rewriteValuegeneric_OpCvt64to64F(v *Value) bool {
  5780  	v_0 := v.Args[0]
  5781  	// match: (Cvt64to64F (Const64 [c]))
  5782  	// result: (Const64F [float64(c)])
  5783  	for {
  5784  		if v_0.Op != OpConst64 {
  5785  			break
  5786  		}
  5787  		c := auxIntToInt64(v_0.AuxInt)
  5788  		v.reset(OpConst64F)
  5789  		v.AuxInt = float64ToAuxInt(float64(c))
  5790  		return true
  5791  	}
  5792  	return false
  5793  }
  5794  func rewriteValuegeneric_OpCvtBoolToUint8(v *Value) bool {
  5795  	v_0 := v.Args[0]
  5796  	// match: (CvtBoolToUint8 (ConstBool [false]))
  5797  	// result: (Const8 [0])
  5798  	for {
  5799  		if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
  5800  			break
  5801  		}
  5802  		v.reset(OpConst8)
  5803  		v.AuxInt = int8ToAuxInt(0)
  5804  		return true
  5805  	}
  5806  	// match: (CvtBoolToUint8 (ConstBool [true]))
  5807  	// result: (Const8 [1])
  5808  	for {
  5809  		if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
  5810  			break
  5811  		}
  5812  		v.reset(OpConst8)
  5813  		v.AuxInt = int8ToAuxInt(1)
  5814  		return true
  5815  	}
  5816  	return false
  5817  }
  5818  func rewriteValuegeneric_OpDiv16(v *Value) bool {
  5819  	v_1 := v.Args[1]
  5820  	v_0 := v.Args[0]
  5821  	b := v.Block
  5822  	typ := &b.Func.Config.Types
  5823  	// match: (Div16 (Const16 [c]) (Const16 [d]))
  5824  	// cond: d != 0
  5825  	// result: (Const16 [c/d])
  5826  	for {
  5827  		if v_0.Op != OpConst16 {
  5828  			break
  5829  		}
  5830  		c := auxIntToInt16(v_0.AuxInt)
  5831  		if v_1.Op != OpConst16 {
  5832  			break
  5833  		}
  5834  		d := auxIntToInt16(v_1.AuxInt)
  5835  		if !(d != 0) {
  5836  			break
  5837  		}
  5838  		v.reset(OpConst16)
  5839  		v.AuxInt = int16ToAuxInt(c / d)
  5840  		return true
  5841  	}
  5842  	// match: (Div16 n (Const16 [c]))
  5843  	// cond: isNonNegative(n) && isPowerOfTwo16(c)
  5844  	// result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log16(c)]))
  5845  	for {
  5846  		n := v_0
  5847  		if v_1.Op != OpConst16 {
  5848  			break
  5849  		}
  5850  		c := auxIntToInt16(v_1.AuxInt)
  5851  		if !(isNonNegative(n) && isPowerOfTwo16(c)) {
  5852  			break
  5853  		}
  5854  		v.reset(OpRsh16Ux64)
  5855  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  5856  		v0.AuxInt = int64ToAuxInt(log16(c))
  5857  		v.AddArg2(n, v0)
  5858  		return true
  5859  	}
  5860  	// match: (Div16 <t> n (Const16 [c]))
  5861  	// cond: c < 0 && c != -1<<15
  5862  	// result: (Neg16 (Div16 <t> n (Const16 <t> [-c])))
  5863  	for {
  5864  		t := v.Type
  5865  		n := v_0
  5866  		if v_1.Op != OpConst16 {
  5867  			break
  5868  		}
  5869  		c := auxIntToInt16(v_1.AuxInt)
  5870  		if !(c < 0 && c != -1<<15) {
  5871  			break
  5872  		}
  5873  		v.reset(OpNeg16)
  5874  		v0 := b.NewValue0(v.Pos, OpDiv16, t)
  5875  		v1 := b.NewValue0(v.Pos, OpConst16, t)
  5876  		v1.AuxInt = int16ToAuxInt(-c)
  5877  		v0.AddArg2(n, v1)
  5878  		v.AddArg(v0)
  5879  		return true
  5880  	}
  5881  	// match: (Div16 <t> x (Const16 [-1<<15]))
  5882  	// result: (Rsh16Ux64 (And16 <t> x (Neg16 <t> x)) (Const64 <typ.UInt64> [15]))
  5883  	for {
  5884  		t := v.Type
  5885  		x := v_0
  5886  		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != -1<<15 {
  5887  			break
  5888  		}
  5889  		v.reset(OpRsh16Ux64)
  5890  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
  5891  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  5892  		v1.AddArg(x)
  5893  		v0.AddArg2(x, v1)
  5894  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  5895  		v2.AuxInt = int64ToAuxInt(15)
  5896  		v.AddArg2(v0, v2)
  5897  		return true
  5898  	}
  5899  	// match: (Div16 <t> n (Const16 [c]))
  5900  	// cond: isPowerOfTwo16(c)
  5901  	// result: (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [int64(16-log16(c))]))) (Const64 <typ.UInt64> [int64(log16(c))]))
  5902  	for {
  5903  		t := v.Type
  5904  		n := v_0
  5905  		if v_1.Op != OpConst16 {
  5906  			break
  5907  		}
  5908  		c := auxIntToInt16(v_1.AuxInt)
  5909  		if !(isPowerOfTwo16(c)) {
  5910  			break
  5911  		}
  5912  		v.reset(OpRsh16x64)
  5913  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
  5914  		v1 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  5915  		v2 := b.NewValue0(v.Pos, OpRsh16x64, t)
  5916  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  5917  		v3.AuxInt = int64ToAuxInt(15)
  5918  		v2.AddArg2(n, v3)
  5919  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  5920  		v4.AuxInt = int64ToAuxInt(int64(16 - log16(c)))
  5921  		v1.AddArg2(v2, v4)
  5922  		v0.AddArg2(n, v1)
  5923  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  5924  		v5.AuxInt = int64ToAuxInt(int64(log16(c)))
  5925  		v.AddArg2(v0, v5)
  5926  		return true
  5927  	}
  5928  	// match: (Div16 <t> x (Const16 [c]))
  5929  	// cond: smagicOK16(c)
  5930  	// result: (Sub16 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(smagic16(c).m)]) (SignExt16to32 x)) (Const64 <typ.UInt64> [16+smagic16(c).s])) (Rsh32x64 <t> (SignExt16to32 x) (Const64 <typ.UInt64> [31])))
  5931  	for {
  5932  		t := v.Type
  5933  		x := v_0
  5934  		if v_1.Op != OpConst16 {
  5935  			break
  5936  		}
  5937  		c := auxIntToInt16(v_1.AuxInt)
  5938  		if !(smagicOK16(c)) {
  5939  			break
  5940  		}
  5941  		v.reset(OpSub16)
  5942  		v.Type = t
  5943  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  5944  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  5945  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  5946  		v2.AuxInt = int32ToAuxInt(int32(smagic16(c).m))
  5947  		v3 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  5948  		v3.AddArg(x)
  5949  		v1.AddArg2(v2, v3)
  5950  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  5951  		v4.AuxInt = int64ToAuxInt(16 + smagic16(c).s)
  5952  		v0.AddArg2(v1, v4)
  5953  		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
  5954  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  5955  		v6.AuxInt = int64ToAuxInt(31)
  5956  		v5.AddArg2(v3, v6)
  5957  		v.AddArg2(v0, v5)
  5958  		return true
  5959  	}
  5960  	return false
  5961  }
  5962  func rewriteValuegeneric_OpDiv16u(v *Value) bool {
  5963  	v_1 := v.Args[1]
  5964  	v_0 := v.Args[0]
  5965  	b := v.Block
  5966  	config := b.Func.Config
  5967  	typ := &b.Func.Config.Types
  5968  	// match: (Div16u (Const16 [c]) (Const16 [d]))
  5969  	// cond: d != 0
  5970  	// result: (Const16 [int16(uint16(c)/uint16(d))])
  5971  	for {
  5972  		if v_0.Op != OpConst16 {
  5973  			break
  5974  		}
  5975  		c := auxIntToInt16(v_0.AuxInt)
  5976  		if v_1.Op != OpConst16 {
  5977  			break
  5978  		}
  5979  		d := auxIntToInt16(v_1.AuxInt)
  5980  		if !(d != 0) {
  5981  			break
  5982  		}
  5983  		v.reset(OpConst16)
  5984  		v.AuxInt = int16ToAuxInt(int16(uint16(c) / uint16(d)))
  5985  		return true
  5986  	}
  5987  	// match: (Div16u n (Const16 [c]))
  5988  	// cond: isPowerOfTwo16(c)
  5989  	// result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log16(c)]))
  5990  	for {
  5991  		n := v_0
  5992  		if v_1.Op != OpConst16 {
  5993  			break
  5994  		}
  5995  		c := auxIntToInt16(v_1.AuxInt)
  5996  		if !(isPowerOfTwo16(c)) {
  5997  			break
  5998  		}
  5999  		v.reset(OpRsh16Ux64)
  6000  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6001  		v0.AuxInt = int64ToAuxInt(log16(c))
  6002  		v.AddArg2(n, v0)
  6003  		return true
  6004  	}
  6005  	// match: (Div16u x (Const16 [c]))
  6006  	// cond: umagicOK16(c) && config.RegSize == 8
  6007  	// result: (Trunc64to16 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<16+umagic16(c).m)]) (ZeroExt16to64 x)) (Const64 <typ.UInt64> [16+umagic16(c).s])))
  6008  	for {
  6009  		x := v_0
  6010  		if v_1.Op != OpConst16 {
  6011  			break
  6012  		}
  6013  		c := auxIntToInt16(v_1.AuxInt)
  6014  		if !(umagicOK16(c) && config.RegSize == 8) {
  6015  			break
  6016  		}
  6017  		v.reset(OpTrunc64to16)
  6018  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6019  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6020  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6021  		v2.AuxInt = int64ToAuxInt(int64(1<<16 + umagic16(c).m))
  6022  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6023  		v3.AddArg(x)
  6024  		v1.AddArg2(v2, v3)
  6025  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6026  		v4.AuxInt = int64ToAuxInt(16 + umagic16(c).s)
  6027  		v0.AddArg2(v1, v4)
  6028  		v.AddArg(v0)
  6029  		return true
  6030  	}
  6031  	// match: (Div16u x (Const16 [c]))
  6032  	// cond: umagicOK16(c) && config.RegSize == 4 && umagic16(c).m&1 == 0
  6033  	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<15+umagic16(c).m/2)]) (ZeroExt16to32 x)) (Const64 <typ.UInt64> [16+umagic16(c).s-1])))
  6034  	for {
  6035  		x := v_0
  6036  		if v_1.Op != OpConst16 {
  6037  			break
  6038  		}
  6039  		c := auxIntToInt16(v_1.AuxInt)
  6040  		if !(umagicOK16(c) && config.RegSize == 4 && umagic16(c).m&1 == 0) {
  6041  			break
  6042  		}
  6043  		v.reset(OpTrunc32to16)
  6044  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  6045  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  6046  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6047  		v2.AuxInt = int32ToAuxInt(int32(1<<15 + umagic16(c).m/2))
  6048  		v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6049  		v3.AddArg(x)
  6050  		v1.AddArg2(v2, v3)
  6051  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6052  		v4.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 1)
  6053  		v0.AddArg2(v1, v4)
  6054  		v.AddArg(v0)
  6055  		return true
  6056  	}
  6057  	// match: (Div16u x (Const16 [c]))
  6058  	// cond: umagicOK16(c) && config.RegSize == 4 && c&1 == 0
  6059  	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<15+(umagic16(c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [16+umagic16(c).s-2])))
  6060  	for {
  6061  		x := v_0
  6062  		if v_1.Op != OpConst16 {
  6063  			break
  6064  		}
  6065  		c := auxIntToInt16(v_1.AuxInt)
  6066  		if !(umagicOK16(c) && config.RegSize == 4 && c&1 == 0) {
  6067  			break
  6068  		}
  6069  		v.reset(OpTrunc32to16)
  6070  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  6071  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  6072  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6073  		v2.AuxInt = int32ToAuxInt(int32(1<<15 + (umagic16(c).m+1)/2))
  6074  		v3 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  6075  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6076  		v4.AddArg(x)
  6077  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6078  		v5.AuxInt = int64ToAuxInt(1)
  6079  		v3.AddArg2(v4, v5)
  6080  		v1.AddArg2(v2, v3)
  6081  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6082  		v6.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 2)
  6083  		v0.AddArg2(v1, v6)
  6084  		v.AddArg(v0)
  6085  		return true
  6086  	}
  6087  	// match: (Div16u x (Const16 [c]))
  6088  	// cond: umagicOK16(c) && config.RegSize == 4 && config.useAvg
  6089  	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Avg32u (Lsh32x64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [16])) (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(umagic16(c).m)]) (ZeroExt16to32 x))) (Const64 <typ.UInt64> [16+umagic16(c).s-1])))
  6090  	for {
  6091  		x := v_0
  6092  		if v_1.Op != OpConst16 {
  6093  			break
  6094  		}
  6095  		c := auxIntToInt16(v_1.AuxInt)
  6096  		if !(umagicOK16(c) && config.RegSize == 4 && config.useAvg) {
  6097  			break
  6098  		}
  6099  		v.reset(OpTrunc32to16)
  6100  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  6101  		v1 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32)
  6102  		v2 := b.NewValue0(v.Pos, OpLsh32x64, typ.UInt32)
  6103  		v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6104  		v3.AddArg(x)
  6105  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6106  		v4.AuxInt = int64ToAuxInt(16)
  6107  		v2.AddArg2(v3, v4)
  6108  		v5 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  6109  		v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6110  		v6.AuxInt = int32ToAuxInt(int32(umagic16(c).m))
  6111  		v5.AddArg2(v6, v3)
  6112  		v1.AddArg2(v2, v5)
  6113  		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6114  		v7.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 1)
  6115  		v0.AddArg2(v1, v7)
  6116  		v.AddArg(v0)
  6117  		return true
  6118  	}
  6119  	return false
  6120  }
  6121  func rewriteValuegeneric_OpDiv32(v *Value) bool {
  6122  	v_1 := v.Args[1]
  6123  	v_0 := v.Args[0]
  6124  	b := v.Block
  6125  	config := b.Func.Config
  6126  	typ := &b.Func.Config.Types
  6127  	// match: (Div32 (Const32 [c]) (Const32 [d]))
  6128  	// cond: d != 0
  6129  	// result: (Const32 [c/d])
  6130  	for {
  6131  		if v_0.Op != OpConst32 {
  6132  			break
  6133  		}
  6134  		c := auxIntToInt32(v_0.AuxInt)
  6135  		if v_1.Op != OpConst32 {
  6136  			break
  6137  		}
  6138  		d := auxIntToInt32(v_1.AuxInt)
  6139  		if !(d != 0) {
  6140  			break
  6141  		}
  6142  		v.reset(OpConst32)
  6143  		v.AuxInt = int32ToAuxInt(c / d)
  6144  		return true
  6145  	}
  6146  	// match: (Div32 n (Const32 [c]))
  6147  	// cond: isNonNegative(n) && isPowerOfTwo32(c)
  6148  	// result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log32(c)]))
  6149  	for {
  6150  		n := v_0
  6151  		if v_1.Op != OpConst32 {
  6152  			break
  6153  		}
  6154  		c := auxIntToInt32(v_1.AuxInt)
  6155  		if !(isNonNegative(n) && isPowerOfTwo32(c)) {
  6156  			break
  6157  		}
  6158  		v.reset(OpRsh32Ux64)
  6159  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6160  		v0.AuxInt = int64ToAuxInt(log32(c))
  6161  		v.AddArg2(n, v0)
  6162  		return true
  6163  	}
  6164  	// match: (Div32 <t> n (Const32 [c]))
  6165  	// cond: c < 0 && c != -1<<31
  6166  	// result: (Neg32 (Div32 <t> n (Const32 <t> [-c])))
  6167  	for {
  6168  		t := v.Type
  6169  		n := v_0
  6170  		if v_1.Op != OpConst32 {
  6171  			break
  6172  		}
  6173  		c := auxIntToInt32(v_1.AuxInt)
  6174  		if !(c < 0 && c != -1<<31) {
  6175  			break
  6176  		}
  6177  		v.reset(OpNeg32)
  6178  		v0 := b.NewValue0(v.Pos, OpDiv32, t)
  6179  		v1 := b.NewValue0(v.Pos, OpConst32, t)
  6180  		v1.AuxInt = int32ToAuxInt(-c)
  6181  		v0.AddArg2(n, v1)
  6182  		v.AddArg(v0)
  6183  		return true
  6184  	}
  6185  	// match: (Div32 <t> x (Const32 [-1<<31]))
  6186  	// result: (Rsh32Ux64 (And32 <t> x (Neg32 <t> x)) (Const64 <typ.UInt64> [31]))
  6187  	for {
  6188  		t := v.Type
  6189  		x := v_0
  6190  		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != -1<<31 {
  6191  			break
  6192  		}
  6193  		v.reset(OpRsh32Ux64)
  6194  		v0 := b.NewValue0(v.Pos, OpAnd32, t)
  6195  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  6196  		v1.AddArg(x)
  6197  		v0.AddArg2(x, v1)
  6198  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6199  		v2.AuxInt = int64ToAuxInt(31)
  6200  		v.AddArg2(v0, v2)
  6201  		return true
  6202  	}
  6203  	// match: (Div32 <t> n (Const32 [c]))
  6204  	// cond: isPowerOfTwo32(c)
  6205  	// result: (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [int64(32-log32(c))]))) (Const64 <typ.UInt64> [int64(log32(c))]))
  6206  	for {
  6207  		t := v.Type
  6208  		n := v_0
  6209  		if v_1.Op != OpConst32 {
  6210  			break
  6211  		}
  6212  		c := auxIntToInt32(v_1.AuxInt)
  6213  		if !(isPowerOfTwo32(c)) {
  6214  			break
  6215  		}
  6216  		v.reset(OpRsh32x64)
  6217  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  6218  		v1 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
  6219  		v2 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6220  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6221  		v3.AuxInt = int64ToAuxInt(31)
  6222  		v2.AddArg2(n, v3)
  6223  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6224  		v4.AuxInt = int64ToAuxInt(int64(32 - log32(c)))
  6225  		v1.AddArg2(v2, v4)
  6226  		v0.AddArg2(n, v1)
  6227  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6228  		v5.AuxInt = int64ToAuxInt(int64(log32(c)))
  6229  		v.AddArg2(v0, v5)
  6230  		return true
  6231  	}
  6232  	// match: (Div32 <t> x (Const32 [c]))
  6233  	// cond: smagicOK32(c) && config.RegSize == 8
  6234  	// result: (Sub32 <t> (Rsh64x64 <t> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(smagic32(c).m)]) (SignExt32to64 x)) (Const64 <typ.UInt64> [32+smagic32(c).s])) (Rsh64x64 <t> (SignExt32to64 x) (Const64 <typ.UInt64> [63])))
  6235  	for {
  6236  		t := v.Type
  6237  		x := v_0
  6238  		if v_1.Op != OpConst32 {
  6239  			break
  6240  		}
  6241  		c := auxIntToInt32(v_1.AuxInt)
  6242  		if !(smagicOK32(c) && config.RegSize == 8) {
  6243  			break
  6244  		}
  6245  		v.reset(OpSub32)
  6246  		v.Type = t
  6247  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  6248  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6249  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6250  		v2.AuxInt = int64ToAuxInt(int64(smagic32(c).m))
  6251  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6252  		v3.AddArg(x)
  6253  		v1.AddArg2(v2, v3)
  6254  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6255  		v4.AuxInt = int64ToAuxInt(32 + smagic32(c).s)
  6256  		v0.AddArg2(v1, v4)
  6257  		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
  6258  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6259  		v6.AuxInt = int64ToAuxInt(63)
  6260  		v5.AddArg2(v3, v6)
  6261  		v.AddArg2(v0, v5)
  6262  		return true
  6263  	}
  6264  	// match: (Div32 <t> x (Const32 [c]))
  6265  	// cond: smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 == 0 && config.useHmul
  6266  	// result: (Sub32 <t> (Rsh32x64 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int32(smagic32(c).m/2)]) x) (Const64 <typ.UInt64> [smagic32(c).s-1])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31])))
  6267  	for {
  6268  		t := v.Type
  6269  		x := v_0
  6270  		if v_1.Op != OpConst32 {
  6271  			break
  6272  		}
  6273  		c := auxIntToInt32(v_1.AuxInt)
  6274  		if !(smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 == 0 && config.useHmul) {
  6275  			break
  6276  		}
  6277  		v.reset(OpSub32)
  6278  		v.Type = t
  6279  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6280  		v1 := b.NewValue0(v.Pos, OpHmul32, t)
  6281  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6282  		v2.AuxInt = int32ToAuxInt(int32(smagic32(c).m / 2))
  6283  		v1.AddArg2(v2, x)
  6284  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6285  		v3.AuxInt = int64ToAuxInt(smagic32(c).s - 1)
  6286  		v0.AddArg2(v1, v3)
  6287  		v4 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6288  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6289  		v5.AuxInt = int64ToAuxInt(31)
  6290  		v4.AddArg2(x, v5)
  6291  		v.AddArg2(v0, v4)
  6292  		return true
  6293  	}
  6294  	// match: (Div32 <t> x (Const32 [c]))
  6295  	// cond: smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 != 0 && config.useHmul
  6296  	// result: (Sub32 <t> (Rsh32x64 <t> (Add32 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int32(smagic32(c).m)]) x) x) (Const64 <typ.UInt64> [smagic32(c).s])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31])))
  6297  	for {
  6298  		t := v.Type
  6299  		x := v_0
  6300  		if v_1.Op != OpConst32 {
  6301  			break
  6302  		}
  6303  		c := auxIntToInt32(v_1.AuxInt)
  6304  		if !(smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 != 0 && config.useHmul) {
  6305  			break
  6306  		}
  6307  		v.reset(OpSub32)
  6308  		v.Type = t
  6309  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6310  		v1 := b.NewValue0(v.Pos, OpAdd32, t)
  6311  		v2 := b.NewValue0(v.Pos, OpHmul32, t)
  6312  		v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6313  		v3.AuxInt = int32ToAuxInt(int32(smagic32(c).m))
  6314  		v2.AddArg2(v3, x)
  6315  		v1.AddArg2(v2, x)
  6316  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6317  		v4.AuxInt = int64ToAuxInt(smagic32(c).s)
  6318  		v0.AddArg2(v1, v4)
  6319  		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6320  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6321  		v6.AuxInt = int64ToAuxInt(31)
  6322  		v5.AddArg2(x, v6)
  6323  		v.AddArg2(v0, v5)
  6324  		return true
  6325  	}
  6326  	return false
  6327  }
  6328  func rewriteValuegeneric_OpDiv32F(v *Value) bool {
  6329  	v_1 := v.Args[1]
  6330  	v_0 := v.Args[0]
  6331  	b := v.Block
  6332  	// match: (Div32F (Const32F [c]) (Const32F [d]))
  6333  	// cond: c/d == c/d
  6334  	// result: (Const32F [c/d])
  6335  	for {
  6336  		if v_0.Op != OpConst32F {
  6337  			break
  6338  		}
  6339  		c := auxIntToFloat32(v_0.AuxInt)
  6340  		if v_1.Op != OpConst32F {
  6341  			break
  6342  		}
  6343  		d := auxIntToFloat32(v_1.AuxInt)
  6344  		if !(c/d == c/d) {
  6345  			break
  6346  		}
  6347  		v.reset(OpConst32F)
  6348  		v.AuxInt = float32ToAuxInt(c / d)
  6349  		return true
  6350  	}
  6351  	// match: (Div32F x (Const32F <t> [c]))
  6352  	// cond: reciprocalExact32(c)
  6353  	// result: (Mul32F x (Const32F <t> [1/c]))
  6354  	for {
  6355  		x := v_0
  6356  		if v_1.Op != OpConst32F {
  6357  			break
  6358  		}
  6359  		t := v_1.Type
  6360  		c := auxIntToFloat32(v_1.AuxInt)
  6361  		if !(reciprocalExact32(c)) {
  6362  			break
  6363  		}
  6364  		v.reset(OpMul32F)
  6365  		v0 := b.NewValue0(v.Pos, OpConst32F, t)
  6366  		v0.AuxInt = float32ToAuxInt(1 / c)
  6367  		v.AddArg2(x, v0)
  6368  		return true
  6369  	}
  6370  	return false
  6371  }
  6372  func rewriteValuegeneric_OpDiv32u(v *Value) bool {
  6373  	v_1 := v.Args[1]
  6374  	v_0 := v.Args[0]
  6375  	b := v.Block
  6376  	config := b.Func.Config
  6377  	typ := &b.Func.Config.Types
  6378  	// match: (Div32u (Const32 [c]) (Const32 [d]))
  6379  	// cond: d != 0
  6380  	// result: (Const32 [int32(uint32(c)/uint32(d))])
  6381  	for {
  6382  		if v_0.Op != OpConst32 {
  6383  			break
  6384  		}
  6385  		c := auxIntToInt32(v_0.AuxInt)
  6386  		if v_1.Op != OpConst32 {
  6387  			break
  6388  		}
  6389  		d := auxIntToInt32(v_1.AuxInt)
  6390  		if !(d != 0) {
  6391  			break
  6392  		}
  6393  		v.reset(OpConst32)
  6394  		v.AuxInt = int32ToAuxInt(int32(uint32(c) / uint32(d)))
  6395  		return true
  6396  	}
  6397  	// match: (Div32u n (Const32 [c]))
  6398  	// cond: isPowerOfTwo32(c)
  6399  	// result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log32(c)]))
  6400  	for {
  6401  		n := v_0
  6402  		if v_1.Op != OpConst32 {
  6403  			break
  6404  		}
  6405  		c := auxIntToInt32(v_1.AuxInt)
  6406  		if !(isPowerOfTwo32(c)) {
  6407  			break
  6408  		}
  6409  		v.reset(OpRsh32Ux64)
  6410  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6411  		v0.AuxInt = int64ToAuxInt(log32(c))
  6412  		v.AddArg2(n, v0)
  6413  		return true
  6414  	}
  6415  	// match: (Div32u x (Const32 [c]))
  6416  	// cond: umagicOK32(c) && config.RegSize == 4 && umagic32(c).m&1 == 0 && config.useHmul
  6417  	// result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<31+umagic32(c).m/2)]) x) (Const64 <typ.UInt64> [umagic32(c).s-1]))
  6418  	for {
  6419  		x := v_0
  6420  		if v_1.Op != OpConst32 {
  6421  			break
  6422  		}
  6423  		c := auxIntToInt32(v_1.AuxInt)
  6424  		if !(umagicOK32(c) && config.RegSize == 4 && umagic32(c).m&1 == 0 && config.useHmul) {
  6425  			break
  6426  		}
  6427  		v.reset(OpRsh32Ux64)
  6428  		v.Type = typ.UInt32
  6429  		v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
  6430  		v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6431  		v1.AuxInt = int32ToAuxInt(int32(1<<31 + umagic32(c).m/2))
  6432  		v0.AddArg2(v1, x)
  6433  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6434  		v2.AuxInt = int64ToAuxInt(umagic32(c).s - 1)
  6435  		v.AddArg2(v0, v2)
  6436  		return true
  6437  	}
  6438  	// match: (Div32u x (Const32 [c]))
  6439  	// cond: umagicOK32(c) && config.RegSize == 4 && c&1 == 0 && config.useHmul
  6440  	// result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<31+(umagic32(c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic32(c).s-2]))
  6441  	for {
  6442  		x := v_0
  6443  		if v_1.Op != OpConst32 {
  6444  			break
  6445  		}
  6446  		c := auxIntToInt32(v_1.AuxInt)
  6447  		if !(umagicOK32(c) && config.RegSize == 4 && c&1 == 0 && config.useHmul) {
  6448  			break
  6449  		}
  6450  		v.reset(OpRsh32Ux64)
  6451  		v.Type = typ.UInt32
  6452  		v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
  6453  		v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6454  		v1.AuxInt = int32ToAuxInt(int32(1<<31 + (umagic32(c).m+1)/2))
  6455  		v2 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  6456  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6457  		v3.AuxInt = int64ToAuxInt(1)
  6458  		v2.AddArg2(x, v3)
  6459  		v0.AddArg2(v1, v2)
  6460  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6461  		v4.AuxInt = int64ToAuxInt(umagic32(c).s - 2)
  6462  		v.AddArg2(v0, v4)
  6463  		return true
  6464  	}
  6465  	// match: (Div32u x (Const32 [c]))
  6466  	// cond: umagicOK32(c) && config.RegSize == 4 && config.useAvg && config.useHmul
  6467  	// result: (Rsh32Ux64 <typ.UInt32> (Avg32u x (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(umagic32(c).m)]) x)) (Const64 <typ.UInt64> [umagic32(c).s-1]))
  6468  	for {
  6469  		x := v_0
  6470  		if v_1.Op != OpConst32 {
  6471  			break
  6472  		}
  6473  		c := auxIntToInt32(v_1.AuxInt)
  6474  		if !(umagicOK32(c) && config.RegSize == 4 && config.useAvg && config.useHmul) {
  6475  			break
  6476  		}
  6477  		v.reset(OpRsh32Ux64)
  6478  		v.Type = typ.UInt32
  6479  		v0 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32)
  6480  		v1 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
  6481  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6482  		v2.AuxInt = int32ToAuxInt(int32(umagic32(c).m))
  6483  		v1.AddArg2(v2, x)
  6484  		v0.AddArg2(x, v1)
  6485  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6486  		v3.AuxInt = int64ToAuxInt(umagic32(c).s - 1)
  6487  		v.AddArg2(v0, v3)
  6488  		return true
  6489  	}
  6490  	// match: (Div32u x (Const32 [c]))
  6491  	// cond: umagicOK32(c) && config.RegSize == 8 && umagic32(c).m&1 == 0
  6492  	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+umagic32(c).m/2)]) (ZeroExt32to64 x)) (Const64 <typ.UInt64> [32+umagic32(c).s-1])))
  6493  	for {
  6494  		x := v_0
  6495  		if v_1.Op != OpConst32 {
  6496  			break
  6497  		}
  6498  		c := auxIntToInt32(v_1.AuxInt)
  6499  		if !(umagicOK32(c) && config.RegSize == 8 && umagic32(c).m&1 == 0) {
  6500  			break
  6501  		}
  6502  		v.reset(OpTrunc64to32)
  6503  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6504  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6505  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6506  		v2.AuxInt = int64ToAuxInt(int64(1<<31 + umagic32(c).m/2))
  6507  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6508  		v3.AddArg(x)
  6509  		v1.AddArg2(v2, v3)
  6510  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6511  		v4.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 1)
  6512  		v0.AddArg2(v1, v4)
  6513  		v.AddArg(v0)
  6514  		return true
  6515  	}
  6516  	// match: (Div32u x (Const32 [c]))
  6517  	// cond: umagicOK32(c) && config.RegSize == 8 && c&1 == 0
  6518  	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+(umagic32(c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [32+umagic32(c).s-2])))
  6519  	for {
  6520  		x := v_0
  6521  		if v_1.Op != OpConst32 {
  6522  			break
  6523  		}
  6524  		c := auxIntToInt32(v_1.AuxInt)
  6525  		if !(umagicOK32(c) && config.RegSize == 8 && c&1 == 0) {
  6526  			break
  6527  		}
  6528  		v.reset(OpTrunc64to32)
  6529  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6530  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6531  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6532  		v2.AuxInt = int64ToAuxInt(int64(1<<31 + (umagic32(c).m+1)/2))
  6533  		v3 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6534  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6535  		v4.AddArg(x)
  6536  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6537  		v5.AuxInt = int64ToAuxInt(1)
  6538  		v3.AddArg2(v4, v5)
  6539  		v1.AddArg2(v2, v3)
  6540  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6541  		v6.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 2)
  6542  		v0.AddArg2(v1, v6)
  6543  		v.AddArg(v0)
  6544  		return true
  6545  	}
  6546  	// match: (Div32u x (Const32 [c]))
  6547  	// cond: umagicOK32(c) && config.RegSize == 8 && config.useAvg
  6548  	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Avg64u (Lsh64x64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [32])) (Mul64 <typ.UInt64> (Const64 <typ.UInt32> [int64(umagic32(c).m)]) (ZeroExt32to64 x))) (Const64 <typ.UInt64> [32+umagic32(c).s-1])))
  6549  	for {
  6550  		x := v_0
  6551  		if v_1.Op != OpConst32 {
  6552  			break
  6553  		}
  6554  		c := auxIntToInt32(v_1.AuxInt)
  6555  		if !(umagicOK32(c) && config.RegSize == 8 && config.useAvg) {
  6556  			break
  6557  		}
  6558  		v.reset(OpTrunc64to32)
  6559  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6560  		v1 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64)
  6561  		v2 := b.NewValue0(v.Pos, OpLsh64x64, typ.UInt64)
  6562  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6563  		v3.AddArg(x)
  6564  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6565  		v4.AuxInt = int64ToAuxInt(32)
  6566  		v2.AddArg2(v3, v4)
  6567  		v5 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6568  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt32)
  6569  		v6.AuxInt = int64ToAuxInt(int64(umagic32(c).m))
  6570  		v5.AddArg2(v6, v3)
  6571  		v1.AddArg2(v2, v5)
  6572  		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6573  		v7.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 1)
  6574  		v0.AddArg2(v1, v7)
  6575  		v.AddArg(v0)
  6576  		return true
  6577  	}
  6578  	return false
  6579  }
  6580  func rewriteValuegeneric_OpDiv64(v *Value) bool {
  6581  	v_1 := v.Args[1]
  6582  	v_0 := v.Args[0]
  6583  	b := v.Block
  6584  	config := b.Func.Config
  6585  	typ := &b.Func.Config.Types
  6586  	// match: (Div64 (Const64 [c]) (Const64 [d]))
  6587  	// cond: d != 0
  6588  	// result: (Const64 [c/d])
  6589  	for {
  6590  		if v_0.Op != OpConst64 {
  6591  			break
  6592  		}
  6593  		c := auxIntToInt64(v_0.AuxInt)
  6594  		if v_1.Op != OpConst64 {
  6595  			break
  6596  		}
  6597  		d := auxIntToInt64(v_1.AuxInt)
  6598  		if !(d != 0) {
  6599  			break
  6600  		}
  6601  		v.reset(OpConst64)
  6602  		v.AuxInt = int64ToAuxInt(c / d)
  6603  		return true
  6604  	}
  6605  	// match: (Div64 n (Const64 [c]))
  6606  	// cond: isNonNegative(n) && isPowerOfTwo64(c)
  6607  	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log64(c)]))
  6608  	for {
  6609  		n := v_0
  6610  		if v_1.Op != OpConst64 {
  6611  			break
  6612  		}
  6613  		c := auxIntToInt64(v_1.AuxInt)
  6614  		if !(isNonNegative(n) && isPowerOfTwo64(c)) {
  6615  			break
  6616  		}
  6617  		v.reset(OpRsh64Ux64)
  6618  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6619  		v0.AuxInt = int64ToAuxInt(log64(c))
  6620  		v.AddArg2(n, v0)
  6621  		return true
  6622  	}
  6623  	// match: (Div64 n (Const64 [-1<<63]))
  6624  	// cond: isNonNegative(n)
  6625  	// result: (Const64 [0])
  6626  	for {
  6627  		n := v_0
  6628  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 || !(isNonNegative(n)) {
  6629  			break
  6630  		}
  6631  		v.reset(OpConst64)
  6632  		v.AuxInt = int64ToAuxInt(0)
  6633  		return true
  6634  	}
  6635  	// match: (Div64 <t> n (Const64 [c]))
  6636  	// cond: c < 0 && c != -1<<63
  6637  	// result: (Neg64 (Div64 <t> n (Const64 <t> [-c])))
  6638  	for {
  6639  		t := v.Type
  6640  		n := v_0
  6641  		if v_1.Op != OpConst64 {
  6642  			break
  6643  		}
  6644  		c := auxIntToInt64(v_1.AuxInt)
  6645  		if !(c < 0 && c != -1<<63) {
  6646  			break
  6647  		}
  6648  		v.reset(OpNeg64)
  6649  		v0 := b.NewValue0(v.Pos, OpDiv64, t)
  6650  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  6651  		v1.AuxInt = int64ToAuxInt(-c)
  6652  		v0.AddArg2(n, v1)
  6653  		v.AddArg(v0)
  6654  		return true
  6655  	}
  6656  	// match: (Div64 <t> x (Const64 [-1<<63]))
  6657  	// result: (Rsh64Ux64 (And64 <t> x (Neg64 <t> x)) (Const64 <typ.UInt64> [63]))
  6658  	for {
  6659  		t := v.Type
  6660  		x := v_0
  6661  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
  6662  			break
  6663  		}
  6664  		v.reset(OpRsh64Ux64)
  6665  		v0 := b.NewValue0(v.Pos, OpAnd64, t)
  6666  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  6667  		v1.AddArg(x)
  6668  		v0.AddArg2(x, v1)
  6669  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6670  		v2.AuxInt = int64ToAuxInt(63)
  6671  		v.AddArg2(v0, v2)
  6672  		return true
  6673  	}
  6674  	// match: (Div64 <t> n (Const64 [c]))
  6675  	// cond: isPowerOfTwo64(c)
  6676  	// result: (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [int64(64-log64(c))]))) (Const64 <typ.UInt64> [int64(log64(c))]))
  6677  	for {
  6678  		t := v.Type
  6679  		n := v_0
  6680  		if v_1.Op != OpConst64 {
  6681  			break
  6682  		}
  6683  		c := auxIntToInt64(v_1.AuxInt)
  6684  		if !(isPowerOfTwo64(c)) {
  6685  			break
  6686  		}
  6687  		v.reset(OpRsh64x64)
  6688  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  6689  		v1 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  6690  		v2 := b.NewValue0(v.Pos, OpRsh64x64, t)
  6691  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6692  		v3.AuxInt = int64ToAuxInt(63)
  6693  		v2.AddArg2(n, v3)
  6694  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6695  		v4.AuxInt = int64ToAuxInt(int64(64 - log64(c)))
  6696  		v1.AddArg2(v2, v4)
  6697  		v0.AddArg2(n, v1)
  6698  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6699  		v5.AuxInt = int64ToAuxInt(int64(log64(c)))
  6700  		v.AddArg2(v0, v5)
  6701  		return true
  6702  	}
  6703  	// match: (Div64 <t> x (Const64 [c]))
  6704  	// cond: smagicOK64(c) && smagic64(c).m&1 == 0 && config.useHmul
  6705  	// result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic64(c).m/2)]) x) (Const64 <typ.UInt64> [smagic64(c).s-1])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63])))
  6706  	for {
  6707  		t := v.Type
  6708  		x := v_0
  6709  		if v_1.Op != OpConst64 {
  6710  			break
  6711  		}
  6712  		c := auxIntToInt64(v_1.AuxInt)
  6713  		if !(smagicOK64(c) && smagic64(c).m&1 == 0 && config.useHmul) {
  6714  			break
  6715  		}
  6716  		v.reset(OpSub64)
  6717  		v.Type = t
  6718  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  6719  		v1 := b.NewValue0(v.Pos, OpHmul64, t)
  6720  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6721  		v2.AuxInt = int64ToAuxInt(int64(smagic64(c).m / 2))
  6722  		v1.AddArg2(v2, x)
  6723  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6724  		v3.AuxInt = int64ToAuxInt(smagic64(c).s - 1)
  6725  		v0.AddArg2(v1, v3)
  6726  		v4 := b.NewValue0(v.Pos, OpRsh64x64, t)
  6727  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6728  		v5.AuxInt = int64ToAuxInt(63)
  6729  		v4.AddArg2(x, v5)
  6730  		v.AddArg2(v0, v4)
  6731  		return true
  6732  	}
  6733  	// match: (Div64 <t> x (Const64 [c]))
  6734  	// cond: smagicOK64(c) && smagic64(c).m&1 != 0 && config.useHmul
  6735  	// result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic64(c).m)]) x) x) (Const64 <typ.UInt64> [smagic64(c).s])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63])))
  6736  	for {
  6737  		t := v.Type
  6738  		x := v_0
  6739  		if v_1.Op != OpConst64 {
  6740  			break
  6741  		}
  6742  		c := auxIntToInt64(v_1.AuxInt)
  6743  		if !(smagicOK64(c) && smagic64(c).m&1 != 0 && config.useHmul) {
  6744  			break
  6745  		}
  6746  		v.reset(OpSub64)
  6747  		v.Type = t
  6748  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  6749  		v1 := b.NewValue0(v.Pos, OpAdd64, t)
  6750  		v2 := b.NewValue0(v.Pos, OpHmul64, t)
  6751  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6752  		v3.AuxInt = int64ToAuxInt(int64(smagic64(c).m))
  6753  		v2.AddArg2(v3, x)
  6754  		v1.AddArg2(v2, x)
  6755  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6756  		v4.AuxInt = int64ToAuxInt(smagic64(c).s)
  6757  		v0.AddArg2(v1, v4)
  6758  		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
  6759  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6760  		v6.AuxInt = int64ToAuxInt(63)
  6761  		v5.AddArg2(x, v6)
  6762  		v.AddArg2(v0, v5)
  6763  		return true
  6764  	}
  6765  	return false
  6766  }
  6767  func rewriteValuegeneric_OpDiv64F(v *Value) bool {
  6768  	v_1 := v.Args[1]
  6769  	v_0 := v.Args[0]
  6770  	b := v.Block
  6771  	// match: (Div64F (Const64F [c]) (Const64F [d]))
  6772  	// cond: c/d == c/d
  6773  	// result: (Const64F [c/d])
  6774  	for {
  6775  		if v_0.Op != OpConst64F {
  6776  			break
  6777  		}
  6778  		c := auxIntToFloat64(v_0.AuxInt)
  6779  		if v_1.Op != OpConst64F {
  6780  			break
  6781  		}
  6782  		d := auxIntToFloat64(v_1.AuxInt)
  6783  		if !(c/d == c/d) {
  6784  			break
  6785  		}
  6786  		v.reset(OpConst64F)
  6787  		v.AuxInt = float64ToAuxInt(c / d)
  6788  		return true
  6789  	}
  6790  	// match: (Div64F x (Const64F <t> [c]))
  6791  	// cond: reciprocalExact64(c)
  6792  	// result: (Mul64F x (Const64F <t> [1/c]))
  6793  	for {
  6794  		x := v_0
  6795  		if v_1.Op != OpConst64F {
  6796  			break
  6797  		}
  6798  		t := v_1.Type
  6799  		c := auxIntToFloat64(v_1.AuxInt)
  6800  		if !(reciprocalExact64(c)) {
  6801  			break
  6802  		}
  6803  		v.reset(OpMul64F)
  6804  		v0 := b.NewValue0(v.Pos, OpConst64F, t)
  6805  		v0.AuxInt = float64ToAuxInt(1 / c)
  6806  		v.AddArg2(x, v0)
  6807  		return true
  6808  	}
  6809  	return false
  6810  }
  6811  func rewriteValuegeneric_OpDiv64u(v *Value) bool {
  6812  	v_1 := v.Args[1]
  6813  	v_0 := v.Args[0]
  6814  	b := v.Block
  6815  	config := b.Func.Config
  6816  	typ := &b.Func.Config.Types
  6817  	// match: (Div64u (Const64 [c]) (Const64 [d]))
  6818  	// cond: d != 0
  6819  	// result: (Const64 [int64(uint64(c)/uint64(d))])
  6820  	for {
  6821  		if v_0.Op != OpConst64 {
  6822  			break
  6823  		}
  6824  		c := auxIntToInt64(v_0.AuxInt)
  6825  		if v_1.Op != OpConst64 {
  6826  			break
  6827  		}
  6828  		d := auxIntToInt64(v_1.AuxInt)
  6829  		if !(d != 0) {
  6830  			break
  6831  		}
  6832  		v.reset(OpConst64)
  6833  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  6834  		return true
  6835  	}
  6836  	// match: (Div64u n (Const64 [c]))
  6837  	// cond: isPowerOfTwo64(c)
  6838  	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log64(c)]))
  6839  	for {
  6840  		n := v_0
  6841  		if v_1.Op != OpConst64 {
  6842  			break
  6843  		}
  6844  		c := auxIntToInt64(v_1.AuxInt)
  6845  		if !(isPowerOfTwo64(c)) {
  6846  			break
  6847  		}
  6848  		v.reset(OpRsh64Ux64)
  6849  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6850  		v0.AuxInt = int64ToAuxInt(log64(c))
  6851  		v.AddArg2(n, v0)
  6852  		return true
  6853  	}
  6854  	// match: (Div64u n (Const64 [-1<<63]))
  6855  	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [63]))
  6856  	for {
  6857  		n := v_0
  6858  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
  6859  			break
  6860  		}
  6861  		v.reset(OpRsh64Ux64)
  6862  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6863  		v0.AuxInt = int64ToAuxInt(63)
  6864  		v.AddArg2(n, v0)
  6865  		return true
  6866  	}
  6867  	// match: (Div64u x (Const64 [c]))
  6868  	// cond: c > 0 && c <= 0xFFFF && umagicOK32(int32(c)) && config.RegSize == 4 && config.useHmul
  6869  	// result: (Add64 (Add64 <typ.UInt64> (Add64 <typ.UInt64> (Lsh64x64 <typ.UInt64> (ZeroExt32to64 (Div32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)]))) (Const64 <typ.UInt64> [32])) (ZeroExt32to64 (Div32u <typ.UInt32> (Trunc64to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(c)])))) (Mul64 <typ.UInt64> (ZeroExt32to64 <typ.UInt64> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)]))) (Const64 <typ.UInt64> [int64((1<<32)/c)]))) (ZeroExt32to64 (Div32u <typ.UInt32> (Add32 <typ.UInt32> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(c)])) (Mul32 <typ.UInt32> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)])) (Const32 <typ.UInt32> [int32((1<<32)%c)]))) (Const32 <typ.UInt32> [int32(c)]))))
  6870  	for {
  6871  		x := v_0
  6872  		if v_1.Op != OpConst64 {
  6873  			break
  6874  		}
  6875  		c := auxIntToInt64(v_1.AuxInt)
  6876  		if !(c > 0 && c <= 0xFFFF && umagicOK32(int32(c)) && config.RegSize == 4 && config.useHmul) {
  6877  			break
  6878  		}
  6879  		v.reset(OpAdd64)
  6880  		v0 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
  6881  		v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
  6882  		v2 := b.NewValue0(v.Pos, OpLsh64x64, typ.UInt64)
  6883  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6884  		v4 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
  6885  		v5 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
  6886  		v6 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6887  		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6888  		v7.AuxInt = int64ToAuxInt(32)
  6889  		v6.AddArg2(x, v7)
  6890  		v5.AddArg(v6)
  6891  		v8 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6892  		v8.AuxInt = int32ToAuxInt(int32(c))
  6893  		v4.AddArg2(v5, v8)
  6894  		v3.AddArg(v4)
  6895  		v2.AddArg2(v3, v7)
  6896  		v9 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6897  		v10 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
  6898  		v11 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
  6899  		v11.AddArg(x)
  6900  		v10.AddArg2(v11, v8)
  6901  		v9.AddArg(v10)
  6902  		v1.AddArg2(v2, v9)
  6903  		v12 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6904  		v13 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6905  		v14 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
  6906  		v14.AddArg2(v5, v8)
  6907  		v13.AddArg(v14)
  6908  		v15 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6909  		v15.AuxInt = int64ToAuxInt(int64((1 << 32) / c))
  6910  		v12.AddArg2(v13, v15)
  6911  		v0.AddArg2(v1, v12)
  6912  		v16 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6913  		v17 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
  6914  		v18 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
  6915  		v19 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
  6916  		v19.AddArg2(v11, v8)
  6917  		v20 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  6918  		v21 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6919  		v21.AuxInt = int32ToAuxInt(int32((1 << 32) % c))
  6920  		v20.AddArg2(v14, v21)
  6921  		v18.AddArg2(v19, v20)
  6922  		v17.AddArg2(v18, v8)
  6923  		v16.AddArg(v17)
  6924  		v.AddArg2(v0, v16)
  6925  		return true
  6926  	}
  6927  	// match: (Div64u x (Const64 [c]))
  6928  	// cond: umagicOK64(c) && config.RegSize == 8 && umagic64(c).m&1 == 0 && config.useHmul
  6929  	// result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+umagic64(c).m/2)]) x) (Const64 <typ.UInt64> [umagic64(c).s-1]))
  6930  	for {
  6931  		x := v_0
  6932  		if v_1.Op != OpConst64 {
  6933  			break
  6934  		}
  6935  		c := auxIntToInt64(v_1.AuxInt)
  6936  		if !(umagicOK64(c) && config.RegSize == 8 && umagic64(c).m&1 == 0 && config.useHmul) {
  6937  			break
  6938  		}
  6939  		v.reset(OpRsh64Ux64)
  6940  		v.Type = typ.UInt64
  6941  		v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
  6942  		v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6943  		v1.AuxInt = int64ToAuxInt(int64(1<<63 + umagic64(c).m/2))
  6944  		v0.AddArg2(v1, x)
  6945  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6946  		v2.AuxInt = int64ToAuxInt(umagic64(c).s - 1)
  6947  		v.AddArg2(v0, v2)
  6948  		return true
  6949  	}
  6950  	// match: (Div64u x (Const64 [c]))
  6951  	// cond: umagicOK64(c) && config.RegSize == 8 && c&1 == 0 && config.useHmul
  6952  	// result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+(umagic64(c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic64(c).s-2]))
  6953  	for {
  6954  		x := v_0
  6955  		if v_1.Op != OpConst64 {
  6956  			break
  6957  		}
  6958  		c := auxIntToInt64(v_1.AuxInt)
  6959  		if !(umagicOK64(c) && config.RegSize == 8 && c&1 == 0 && config.useHmul) {
  6960  			break
  6961  		}
  6962  		v.reset(OpRsh64Ux64)
  6963  		v.Type = typ.UInt64
  6964  		v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
  6965  		v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6966  		v1.AuxInt = int64ToAuxInt(int64(1<<63 + (umagic64(c).m+1)/2))
  6967  		v2 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6968  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6969  		v3.AuxInt = int64ToAuxInt(1)
  6970  		v2.AddArg2(x, v3)
  6971  		v0.AddArg2(v1, v2)
  6972  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6973  		v4.AuxInt = int64ToAuxInt(umagic64(c).s - 2)
  6974  		v.AddArg2(v0, v4)
  6975  		return true
  6976  	}
  6977  	// match: (Div64u x (Const64 [c]))
  6978  	// cond: umagicOK64(c) && config.RegSize == 8 && config.useAvg && config.useHmul
  6979  	// result: (Rsh64Ux64 <typ.UInt64> (Avg64u x (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(umagic64(c).m)]) x)) (Const64 <typ.UInt64> [umagic64(c).s-1]))
  6980  	for {
  6981  		x := v_0
  6982  		if v_1.Op != OpConst64 {
  6983  			break
  6984  		}
  6985  		c := auxIntToInt64(v_1.AuxInt)
  6986  		if !(umagicOK64(c) && config.RegSize == 8 && config.useAvg && config.useHmul) {
  6987  			break
  6988  		}
  6989  		v.reset(OpRsh64Ux64)
  6990  		v.Type = typ.UInt64
  6991  		v0 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64)
  6992  		v1 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
  6993  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6994  		v2.AuxInt = int64ToAuxInt(int64(umagic64(c).m))
  6995  		v1.AddArg2(v2, x)
  6996  		v0.AddArg2(x, v1)
  6997  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6998  		v3.AuxInt = int64ToAuxInt(umagic64(c).s - 1)
  6999  		v.AddArg2(v0, v3)
  7000  		return true
  7001  	}
  7002  	return false
  7003  }
  7004  func rewriteValuegeneric_OpDiv8(v *Value) bool {
  7005  	v_1 := v.Args[1]
  7006  	v_0 := v.Args[0]
  7007  	b := v.Block
  7008  	typ := &b.Func.Config.Types
  7009  	// match: (Div8 (Const8 [c]) (Const8 [d]))
  7010  	// cond: d != 0
  7011  	// result: (Const8 [c/d])
  7012  	for {
  7013  		if v_0.Op != OpConst8 {
  7014  			break
  7015  		}
  7016  		c := auxIntToInt8(v_0.AuxInt)
  7017  		if v_1.Op != OpConst8 {
  7018  			break
  7019  		}
  7020  		d := auxIntToInt8(v_1.AuxInt)
  7021  		if !(d != 0) {
  7022  			break
  7023  		}
  7024  		v.reset(OpConst8)
  7025  		v.AuxInt = int8ToAuxInt(c / d)
  7026  		return true
  7027  	}
  7028  	// match: (Div8 n (Const8 [c]))
  7029  	// cond: isNonNegative(n) && isPowerOfTwo8(c)
  7030  	// result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log8(c)]))
  7031  	for {
  7032  		n := v_0
  7033  		if v_1.Op != OpConst8 {
  7034  			break
  7035  		}
  7036  		c := auxIntToInt8(v_1.AuxInt)
  7037  		if !(isNonNegative(n) && isPowerOfTwo8(c)) {
  7038  			break
  7039  		}
  7040  		v.reset(OpRsh8Ux64)
  7041  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7042  		v0.AuxInt = int64ToAuxInt(log8(c))
  7043  		v.AddArg2(n, v0)
  7044  		return true
  7045  	}
  7046  	// match: (Div8 <t> n (Const8 [c]))
  7047  	// cond: c < 0 && c != -1<<7
  7048  	// result: (Neg8 (Div8 <t> n (Const8 <t> [-c])))
  7049  	for {
  7050  		t := v.Type
  7051  		n := v_0
  7052  		if v_1.Op != OpConst8 {
  7053  			break
  7054  		}
  7055  		c := auxIntToInt8(v_1.AuxInt)
  7056  		if !(c < 0 && c != -1<<7) {
  7057  			break
  7058  		}
  7059  		v.reset(OpNeg8)
  7060  		v0 := b.NewValue0(v.Pos, OpDiv8, t)
  7061  		v1 := b.NewValue0(v.Pos, OpConst8, t)
  7062  		v1.AuxInt = int8ToAuxInt(-c)
  7063  		v0.AddArg2(n, v1)
  7064  		v.AddArg(v0)
  7065  		return true
  7066  	}
  7067  	// match: (Div8 <t> x (Const8 [-1<<7 ]))
  7068  	// result: (Rsh8Ux64 (And8 <t> x (Neg8 <t> x)) (Const64 <typ.UInt64> [7 ]))
  7069  	for {
  7070  		t := v.Type
  7071  		x := v_0
  7072  		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != -1<<7 {
  7073  			break
  7074  		}
  7075  		v.reset(OpRsh8Ux64)
  7076  		v0 := b.NewValue0(v.Pos, OpAnd8, t)
  7077  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  7078  		v1.AddArg(x)
  7079  		v0.AddArg2(x, v1)
  7080  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7081  		v2.AuxInt = int64ToAuxInt(7)
  7082  		v.AddArg2(v0, v2)
  7083  		return true
  7084  	}
  7085  	// match: (Div8 <t> n (Const8 [c]))
  7086  	// cond: isPowerOfTwo8(c)
  7087  	// result: (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [int64( 8-log8(c))]))) (Const64 <typ.UInt64> [int64(log8(c))]))
  7088  	for {
  7089  		t := v.Type
  7090  		n := v_0
  7091  		if v_1.Op != OpConst8 {
  7092  			break
  7093  		}
  7094  		c := auxIntToInt8(v_1.AuxInt)
  7095  		if !(isPowerOfTwo8(c)) {
  7096  			break
  7097  		}
  7098  		v.reset(OpRsh8x64)
  7099  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  7100  		v1 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  7101  		v2 := b.NewValue0(v.Pos, OpRsh8x64, t)
  7102  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7103  		v3.AuxInt = int64ToAuxInt(7)
  7104  		v2.AddArg2(n, v3)
  7105  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7106  		v4.AuxInt = int64ToAuxInt(int64(8 - log8(c)))
  7107  		v1.AddArg2(v2, v4)
  7108  		v0.AddArg2(n, v1)
  7109  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7110  		v5.AuxInt = int64ToAuxInt(int64(log8(c)))
  7111  		v.AddArg2(v0, v5)
  7112  		return true
  7113  	}
  7114  	// match: (Div8 <t> x (Const8 [c]))
  7115  	// cond: smagicOK8(c)
  7116  	// result: (Sub8 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(smagic8(c).m)]) (SignExt8to32 x)) (Const64 <typ.UInt64> [8+smagic8(c).s])) (Rsh32x64 <t> (SignExt8to32 x) (Const64 <typ.UInt64> [31])))
  7117  	for {
  7118  		t := v.Type
  7119  		x := v_0
  7120  		if v_1.Op != OpConst8 {
  7121  			break
  7122  		}
  7123  		c := auxIntToInt8(v_1.AuxInt)
  7124  		if !(smagicOK8(c)) {
  7125  			break
  7126  		}
  7127  		v.reset(OpSub8)
  7128  		v.Type = t
  7129  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7130  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  7131  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7132  		v2.AuxInt = int32ToAuxInt(int32(smagic8(c).m))
  7133  		v3 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  7134  		v3.AddArg(x)
  7135  		v1.AddArg2(v2, v3)
  7136  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7137  		v4.AuxInt = int64ToAuxInt(8 + smagic8(c).s)
  7138  		v0.AddArg2(v1, v4)
  7139  		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7140  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7141  		v6.AuxInt = int64ToAuxInt(31)
  7142  		v5.AddArg2(v3, v6)
  7143  		v.AddArg2(v0, v5)
  7144  		return true
  7145  	}
  7146  	return false
  7147  }
  7148  func rewriteValuegeneric_OpDiv8u(v *Value) bool {
  7149  	v_1 := v.Args[1]
  7150  	v_0 := v.Args[0]
  7151  	b := v.Block
  7152  	typ := &b.Func.Config.Types
  7153  	// match: (Div8u (Const8 [c]) (Const8 [d]))
  7154  	// cond: d != 0
  7155  	// result: (Const8 [int8(uint8(c)/uint8(d))])
  7156  	for {
  7157  		if v_0.Op != OpConst8 {
  7158  			break
  7159  		}
  7160  		c := auxIntToInt8(v_0.AuxInt)
  7161  		if v_1.Op != OpConst8 {
  7162  			break
  7163  		}
  7164  		d := auxIntToInt8(v_1.AuxInt)
  7165  		if !(d != 0) {
  7166  			break
  7167  		}
  7168  		v.reset(OpConst8)
  7169  		v.AuxInt = int8ToAuxInt(int8(uint8(c) / uint8(d)))
  7170  		return true
  7171  	}
  7172  	// match: (Div8u n (Const8 [c]))
  7173  	// cond: isPowerOfTwo8(c)
  7174  	// result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log8(c)]))
  7175  	for {
  7176  		n := v_0
  7177  		if v_1.Op != OpConst8 {
  7178  			break
  7179  		}
  7180  		c := auxIntToInt8(v_1.AuxInt)
  7181  		if !(isPowerOfTwo8(c)) {
  7182  			break
  7183  		}
  7184  		v.reset(OpRsh8Ux64)
  7185  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7186  		v0.AuxInt = int64ToAuxInt(log8(c))
  7187  		v.AddArg2(n, v0)
  7188  		return true
  7189  	}
  7190  	// match: (Div8u x (Const8 [c]))
  7191  	// cond: umagicOK8(c)
  7192  	// result: (Trunc32to8 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<8+umagic8(c).m)]) (ZeroExt8to32 x)) (Const64 <typ.UInt64> [8+umagic8(c).s])))
  7193  	for {
  7194  		x := v_0
  7195  		if v_1.Op != OpConst8 {
  7196  			break
  7197  		}
  7198  		c := auxIntToInt8(v_1.AuxInt)
  7199  		if !(umagicOK8(c)) {
  7200  			break
  7201  		}
  7202  		v.reset(OpTrunc32to8)
  7203  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  7204  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  7205  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7206  		v2.AuxInt = int32ToAuxInt(int32(1<<8 + umagic8(c).m))
  7207  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7208  		v3.AddArg(x)
  7209  		v1.AddArg2(v2, v3)
  7210  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7211  		v4.AuxInt = int64ToAuxInt(8 + umagic8(c).s)
  7212  		v0.AddArg2(v1, v4)
  7213  		v.AddArg(v0)
  7214  		return true
  7215  	}
  7216  	return false
  7217  }
  7218  func rewriteValuegeneric_OpEq16(v *Value) bool {
  7219  	v_1 := v.Args[1]
  7220  	v_0 := v.Args[0]
  7221  	b := v.Block
  7222  	config := b.Func.Config
  7223  	typ := &b.Func.Config.Types
  7224  	// match: (Eq16 x x)
  7225  	// result: (ConstBool [true])
  7226  	for {
  7227  		x := v_0
  7228  		if x != v_1 {
  7229  			break
  7230  		}
  7231  		v.reset(OpConstBool)
  7232  		v.AuxInt = boolToAuxInt(true)
  7233  		return true
  7234  	}
  7235  	// match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
  7236  	// result: (Eq16 (Const16 <t> [c-d]) x)
  7237  	for {
  7238  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7239  			if v_0.Op != OpConst16 {
  7240  				continue
  7241  			}
  7242  			t := v_0.Type
  7243  			c := auxIntToInt16(v_0.AuxInt)
  7244  			if v_1.Op != OpAdd16 {
  7245  				continue
  7246  			}
  7247  			_ = v_1.Args[1]
  7248  			v_1_0 := v_1.Args[0]
  7249  			v_1_1 := v_1.Args[1]
  7250  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7251  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
  7252  					continue
  7253  				}
  7254  				d := auxIntToInt16(v_1_0.AuxInt)
  7255  				x := v_1_1
  7256  				v.reset(OpEq16)
  7257  				v0 := b.NewValue0(v.Pos, OpConst16, t)
  7258  				v0.AuxInt = int16ToAuxInt(c - d)
  7259  				v.AddArg2(v0, x)
  7260  				return true
  7261  			}
  7262  		}
  7263  		break
  7264  	}
  7265  	// match: (Eq16 (Const16 [c]) (Const16 [d]))
  7266  	// result: (ConstBool [c == d])
  7267  	for {
  7268  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7269  			if v_0.Op != OpConst16 {
  7270  				continue
  7271  			}
  7272  			c := auxIntToInt16(v_0.AuxInt)
  7273  			if v_1.Op != OpConst16 {
  7274  				continue
  7275  			}
  7276  			d := auxIntToInt16(v_1.AuxInt)
  7277  			v.reset(OpConstBool)
  7278  			v.AuxInt = boolToAuxInt(c == d)
  7279  			return true
  7280  		}
  7281  		break
  7282  	}
  7283  	// match: (Eq16 (Mod16u x (Const16 [c])) (Const16 [0]))
  7284  	// cond: x.Op != OpConst16 && udivisibleOK16(c) && !hasSmallRotate(config)
  7285  	// result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt16to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(uint16(c))])) (Const32 <typ.UInt32> [0]))
  7286  	for {
  7287  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7288  			if v_0.Op != OpMod16u {
  7289  				continue
  7290  			}
  7291  			_ = v_0.Args[1]
  7292  			x := v_0.Args[0]
  7293  			v_0_1 := v_0.Args[1]
  7294  			if v_0_1.Op != OpConst16 {
  7295  				continue
  7296  			}
  7297  			c := auxIntToInt16(v_0_1.AuxInt)
  7298  			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(x.Op != OpConst16 && udivisibleOK16(c) && !hasSmallRotate(config)) {
  7299  				continue
  7300  			}
  7301  			v.reset(OpEq32)
  7302  			v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
  7303  			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7304  			v1.AddArg(x)
  7305  			v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7306  			v2.AuxInt = int32ToAuxInt(int32(uint16(c)))
  7307  			v0.AddArg2(v1, v2)
  7308  			v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7309  			v3.AuxInt = int32ToAuxInt(0)
  7310  			v.AddArg2(v0, v3)
  7311  			return true
  7312  		}
  7313  		break
  7314  	}
  7315  	// match: (Eq16 (Mod16 x (Const16 [c])) (Const16 [0]))
  7316  	// cond: x.Op != OpConst16 && sdivisibleOK16(c) && !hasSmallRotate(config)
  7317  	// result: (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [int32(c)])) (Const32 <typ.Int32> [0]))
  7318  	for {
  7319  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7320  			if v_0.Op != OpMod16 {
  7321  				continue
  7322  			}
  7323  			_ = v_0.Args[1]
  7324  			x := v_0.Args[0]
  7325  			v_0_1 := v_0.Args[1]
  7326  			if v_0_1.Op != OpConst16 {
  7327  				continue
  7328  			}
  7329  			c := auxIntToInt16(v_0_1.AuxInt)
  7330  			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(x.Op != OpConst16 && sdivisibleOK16(c) && !hasSmallRotate(config)) {
  7331  				continue
  7332  			}
  7333  			v.reset(OpEq32)
  7334  			v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
  7335  			v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7336  			v1.AddArg(x)
  7337  			v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  7338  			v2.AuxInt = int32ToAuxInt(int32(c))
  7339  			v0.AddArg2(v1, v2)
  7340  			v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  7341  			v3.AuxInt = int32ToAuxInt(0)
  7342  			v.AddArg2(v0, v3)
  7343  			return true
  7344  		}
  7345  		break
  7346  	}
  7347  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s]))) ) )
  7348  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic16(c).m) && s == 16+umagic16(c).s && x.Op != OpConst16 && udivisibleOK16(c)
  7349  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
  7350  	for {
  7351  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7352  			x := v_0
  7353  			if v_1.Op != OpMul16 {
  7354  				continue
  7355  			}
  7356  			_ = v_1.Args[1]
  7357  			v_1_0 := v_1.Args[0]
  7358  			v_1_1 := v_1.Args[1]
  7359  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7360  				if v_1_0.Op != OpConst16 {
  7361  					continue
  7362  				}
  7363  				c := auxIntToInt16(v_1_0.AuxInt)
  7364  				if v_1_1.Op != OpTrunc64to16 {
  7365  					continue
  7366  				}
  7367  				v_1_1_0 := v_1_1.Args[0]
  7368  				if v_1_1_0.Op != OpRsh64Ux64 {
  7369  					continue
  7370  				}
  7371  				_ = v_1_1_0.Args[1]
  7372  				mul := v_1_1_0.Args[0]
  7373  				if mul.Op != OpMul64 {
  7374  					continue
  7375  				}
  7376  				_ = mul.Args[1]
  7377  				mul_0 := mul.Args[0]
  7378  				mul_1 := mul.Args[1]
  7379  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7380  					if mul_0.Op != OpConst64 {
  7381  						continue
  7382  					}
  7383  					m := auxIntToInt64(mul_0.AuxInt)
  7384  					if mul_1.Op != OpZeroExt16to64 || x != mul_1.Args[0] {
  7385  						continue
  7386  					}
  7387  					v_1_1_0_1 := v_1_1_0.Args[1]
  7388  					if v_1_1_0_1.Op != OpConst64 {
  7389  						continue
  7390  					}
  7391  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  7392  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic16(c).m) && s == 16+umagic16(c).s && x.Op != OpConst16 && udivisibleOK16(c)) {
  7393  						continue
  7394  					}
  7395  					v.reset(OpLeq16U)
  7396  					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  7397  					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  7398  					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7399  					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
  7400  					v1.AddArg2(v2, x)
  7401  					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7402  					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
  7403  					v0.AddArg2(v1, v3)
  7404  					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7405  					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
  7406  					v.AddArg2(v0, v4)
  7407  					return true
  7408  				}
  7409  			}
  7410  		}
  7411  		break
  7412  	}
  7413  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s]))) ) )
  7414  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+umagic16(c).m/2) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)
  7415  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
  7416  	for {
  7417  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7418  			x := v_0
  7419  			if v_1.Op != OpMul16 {
  7420  				continue
  7421  			}
  7422  			_ = v_1.Args[1]
  7423  			v_1_0 := v_1.Args[0]
  7424  			v_1_1 := v_1.Args[1]
  7425  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7426  				if v_1_0.Op != OpConst16 {
  7427  					continue
  7428  				}
  7429  				c := auxIntToInt16(v_1_0.AuxInt)
  7430  				if v_1_1.Op != OpTrunc32to16 {
  7431  					continue
  7432  				}
  7433  				v_1_1_0 := v_1_1.Args[0]
  7434  				if v_1_1_0.Op != OpRsh32Ux64 {
  7435  					continue
  7436  				}
  7437  				_ = v_1_1_0.Args[1]
  7438  				mul := v_1_1_0.Args[0]
  7439  				if mul.Op != OpMul32 {
  7440  					continue
  7441  				}
  7442  				_ = mul.Args[1]
  7443  				mul_0 := mul.Args[0]
  7444  				mul_1 := mul.Args[1]
  7445  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7446  					if mul_0.Op != OpConst32 {
  7447  						continue
  7448  					}
  7449  					m := auxIntToInt32(mul_0.AuxInt)
  7450  					if mul_1.Op != OpZeroExt16to32 || x != mul_1.Args[0] {
  7451  						continue
  7452  					}
  7453  					v_1_1_0_1 := v_1_1_0.Args[1]
  7454  					if v_1_1_0_1.Op != OpConst64 {
  7455  						continue
  7456  					}
  7457  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  7458  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+umagic16(c).m/2) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)) {
  7459  						continue
  7460  					}
  7461  					v.reset(OpLeq16U)
  7462  					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  7463  					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  7464  					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7465  					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
  7466  					v1.AddArg2(v2, x)
  7467  					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7468  					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
  7469  					v0.AddArg2(v1, v3)
  7470  					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7471  					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
  7472  					v.AddArg2(v0, v4)
  7473  					return true
  7474  				}
  7475  			}
  7476  		}
  7477  		break
  7478  	}
  7479  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s]))) ) )
  7480  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+(umagic16(c).m+1)/2) && s == 16+umagic16(c).s-2 && x.Op != OpConst16 && udivisibleOK16(c)
  7481  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
  7482  	for {
  7483  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7484  			x := v_0
  7485  			if v_1.Op != OpMul16 {
  7486  				continue
  7487  			}
  7488  			_ = v_1.Args[1]
  7489  			v_1_0 := v_1.Args[0]
  7490  			v_1_1 := v_1.Args[1]
  7491  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7492  				if v_1_0.Op != OpConst16 {
  7493  					continue
  7494  				}
  7495  				c := auxIntToInt16(v_1_0.AuxInt)
  7496  				if v_1_1.Op != OpTrunc32to16 {
  7497  					continue
  7498  				}
  7499  				v_1_1_0 := v_1_1.Args[0]
  7500  				if v_1_1_0.Op != OpRsh32Ux64 {
  7501  					continue
  7502  				}
  7503  				_ = v_1_1_0.Args[1]
  7504  				mul := v_1_1_0.Args[0]
  7505  				if mul.Op != OpMul32 {
  7506  					continue
  7507  				}
  7508  				_ = mul.Args[1]
  7509  				mul_0 := mul.Args[0]
  7510  				mul_1 := mul.Args[1]
  7511  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7512  					if mul_0.Op != OpConst32 {
  7513  						continue
  7514  					}
  7515  					m := auxIntToInt32(mul_0.AuxInt)
  7516  					if mul_1.Op != OpRsh32Ux64 {
  7517  						continue
  7518  					}
  7519  					_ = mul_1.Args[1]
  7520  					mul_1_0 := mul_1.Args[0]
  7521  					if mul_1_0.Op != OpZeroExt16to32 || x != mul_1_0.Args[0] {
  7522  						continue
  7523  					}
  7524  					mul_1_1 := mul_1.Args[1]
  7525  					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
  7526  						continue
  7527  					}
  7528  					v_1_1_0_1 := v_1_1_0.Args[1]
  7529  					if v_1_1_0_1.Op != OpConst64 {
  7530  						continue
  7531  					}
  7532  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  7533  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+(umagic16(c).m+1)/2) && s == 16+umagic16(c).s-2 && x.Op != OpConst16 && udivisibleOK16(c)) {
  7534  						continue
  7535  					}
  7536  					v.reset(OpLeq16U)
  7537  					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  7538  					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  7539  					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7540  					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
  7541  					v1.AddArg2(v2, x)
  7542  					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7543  					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
  7544  					v0.AddArg2(v1, v3)
  7545  					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7546  					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
  7547  					v.AddArg2(v0, v4)
  7548  					return true
  7549  				}
  7550  			}
  7551  		}
  7552  		break
  7553  	}
  7554  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s]))) ) )
  7555  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic16(c).m) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)
  7556  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
  7557  	for {
  7558  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7559  			x := v_0
  7560  			if v_1.Op != OpMul16 {
  7561  				continue
  7562  			}
  7563  			_ = v_1.Args[1]
  7564  			v_1_0 := v_1.Args[0]
  7565  			v_1_1 := v_1.Args[1]
  7566  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7567  				if v_1_0.Op != OpConst16 {
  7568  					continue
  7569  				}
  7570  				c := auxIntToInt16(v_1_0.AuxInt)
  7571  				if v_1_1.Op != OpTrunc32to16 {
  7572  					continue
  7573  				}
  7574  				v_1_1_0 := v_1_1.Args[0]
  7575  				if v_1_1_0.Op != OpRsh32Ux64 {
  7576  					continue
  7577  				}
  7578  				_ = v_1_1_0.Args[1]
  7579  				v_1_1_0_0 := v_1_1_0.Args[0]
  7580  				if v_1_1_0_0.Op != OpAvg32u {
  7581  					continue
  7582  				}
  7583  				_ = v_1_1_0_0.Args[1]
  7584  				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
  7585  				if v_1_1_0_0_0.Op != OpLsh32x64 {
  7586  					continue
  7587  				}
  7588  				_ = v_1_1_0_0_0.Args[1]
  7589  				v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
  7590  				if v_1_1_0_0_0_0.Op != OpZeroExt16to32 || x != v_1_1_0_0_0_0.Args[0] {
  7591  					continue
  7592  				}
  7593  				v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
  7594  				if v_1_1_0_0_0_1.Op != OpConst64 || auxIntToInt64(v_1_1_0_0_0_1.AuxInt) != 16 {
  7595  					continue
  7596  				}
  7597  				mul := v_1_1_0_0.Args[1]
  7598  				if mul.Op != OpMul32 {
  7599  					continue
  7600  				}
  7601  				_ = mul.Args[1]
  7602  				mul_0 := mul.Args[0]
  7603  				mul_1 := mul.Args[1]
  7604  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7605  					if mul_0.Op != OpConst32 {
  7606  						continue
  7607  					}
  7608  					m := auxIntToInt32(mul_0.AuxInt)
  7609  					if mul_1.Op != OpZeroExt16to32 || x != mul_1.Args[0] {
  7610  						continue
  7611  					}
  7612  					v_1_1_0_1 := v_1_1_0.Args[1]
  7613  					if v_1_1_0_1.Op != OpConst64 {
  7614  						continue
  7615  					}
  7616  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  7617  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic16(c).m) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)) {
  7618  						continue
  7619  					}
  7620  					v.reset(OpLeq16U)
  7621  					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  7622  					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  7623  					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7624  					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
  7625  					v1.AddArg2(v2, x)
  7626  					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7627  					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
  7628  					v0.AddArg2(v1, v3)
  7629  					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7630  					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
  7631  					v.AddArg2(v0, v4)
  7632  					return true
  7633  				}
  7634  			}
  7635  		}
  7636  		break
  7637  	}
  7638  	// match: (Eq16 x (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) ) )
  7639  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic16(c).m) && s == 16+smagic16(c).s && x.Op != OpConst16 && sdivisibleOK16(c)
  7640  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(sdivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(sdivisible16(c).a)]) ) (Const16 <typ.UInt16> [int16(16-sdivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(sdivisible16(c).max)]) )
  7641  	for {
  7642  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7643  			x := v_0
  7644  			if v_1.Op != OpMul16 {
  7645  				continue
  7646  			}
  7647  			_ = v_1.Args[1]
  7648  			v_1_0 := v_1.Args[0]
  7649  			v_1_1 := v_1.Args[1]
  7650  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7651  				if v_1_0.Op != OpConst16 {
  7652  					continue
  7653  				}
  7654  				c := auxIntToInt16(v_1_0.AuxInt)
  7655  				if v_1_1.Op != OpSub16 {
  7656  					continue
  7657  				}
  7658  				_ = v_1_1.Args[1]
  7659  				v_1_1_0 := v_1_1.Args[0]
  7660  				if v_1_1_0.Op != OpRsh32x64 {
  7661  					continue
  7662  				}
  7663  				_ = v_1_1_0.Args[1]
  7664  				mul := v_1_1_0.Args[0]
  7665  				if mul.Op != OpMul32 {
  7666  					continue
  7667  				}
  7668  				_ = mul.Args[1]
  7669  				mul_0 := mul.Args[0]
  7670  				mul_1 := mul.Args[1]
  7671  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7672  					if mul_0.Op != OpConst32 {
  7673  						continue
  7674  					}
  7675  					m := auxIntToInt32(mul_0.AuxInt)
  7676  					if mul_1.Op != OpSignExt16to32 || x != mul_1.Args[0] {
  7677  						continue
  7678  					}
  7679  					v_1_1_0_1 := v_1_1_0.Args[1]
  7680  					if v_1_1_0_1.Op != OpConst64 {
  7681  						continue
  7682  					}
  7683  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  7684  					v_1_1_1 := v_1_1.Args[1]
  7685  					if v_1_1_1.Op != OpRsh32x64 {
  7686  						continue
  7687  					}
  7688  					_ = v_1_1_1.Args[1]
  7689  					v_1_1_1_0 := v_1_1_1.Args[0]
  7690  					if v_1_1_1_0.Op != OpSignExt16to32 || x != v_1_1_1_0.Args[0] {
  7691  						continue
  7692  					}
  7693  					v_1_1_1_1 := v_1_1_1.Args[1]
  7694  					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic16(c).m) && s == 16+smagic16(c).s && x.Op != OpConst16 && sdivisibleOK16(c)) {
  7695  						continue
  7696  					}
  7697  					v.reset(OpLeq16U)
  7698  					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  7699  					v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
  7700  					v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  7701  					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7702  					v3.AuxInt = int16ToAuxInt(int16(sdivisible16(c).m))
  7703  					v2.AddArg2(v3, x)
  7704  					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7705  					v4.AuxInt = int16ToAuxInt(int16(sdivisible16(c).a))
  7706  					v1.AddArg2(v2, v4)
  7707  					v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7708  					v5.AuxInt = int16ToAuxInt(int16(16 - sdivisible16(c).k))
  7709  					v0.AddArg2(v1, v5)
  7710  					v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7711  					v6.AuxInt = int16ToAuxInt(int16(sdivisible16(c).max))
  7712  					v.AddArg2(v0, v6)
  7713  					return true
  7714  				}
  7715  			}
  7716  		}
  7717  		break
  7718  	}
  7719  	// match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
  7720  	// cond: k > 0 && k < 15 && kbar == 16 - k
  7721  	// result: (Eq16 (And16 <t> n (Const16 <t> [1<<uint(k)-1])) (Const16 <t> [0]))
  7722  	for {
  7723  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7724  			n := v_0
  7725  			if v_1.Op != OpLsh16x64 {
  7726  				continue
  7727  			}
  7728  			_ = v_1.Args[1]
  7729  			v_1_0 := v_1.Args[0]
  7730  			if v_1_0.Op != OpRsh16x64 {
  7731  				continue
  7732  			}
  7733  			_ = v_1_0.Args[1]
  7734  			v_1_0_0 := v_1_0.Args[0]
  7735  			if v_1_0_0.Op != OpAdd16 {
  7736  				continue
  7737  			}
  7738  			t := v_1_0_0.Type
  7739  			_ = v_1_0_0.Args[1]
  7740  			v_1_0_0_0 := v_1_0_0.Args[0]
  7741  			v_1_0_0_1 := v_1_0_0.Args[1]
  7742  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
  7743  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh16Ux64 || v_1_0_0_1.Type != t {
  7744  					continue
  7745  				}
  7746  				_ = v_1_0_0_1.Args[1]
  7747  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
  7748  				if v_1_0_0_1_0.Op != OpRsh16x64 || v_1_0_0_1_0.Type != t {
  7749  					continue
  7750  				}
  7751  				_ = v_1_0_0_1_0.Args[1]
  7752  				if n != v_1_0_0_1_0.Args[0] {
  7753  					continue
  7754  				}
  7755  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
  7756  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 15 {
  7757  					continue
  7758  				}
  7759  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
  7760  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
  7761  					continue
  7762  				}
  7763  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
  7764  				v_1_0_1 := v_1_0.Args[1]
  7765  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
  7766  					continue
  7767  				}
  7768  				k := auxIntToInt64(v_1_0_1.AuxInt)
  7769  				v_1_1 := v_1.Args[1]
  7770  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 15 && kbar == 16-k) {
  7771  					continue
  7772  				}
  7773  				v.reset(OpEq16)
  7774  				v0 := b.NewValue0(v.Pos, OpAnd16, t)
  7775  				v1 := b.NewValue0(v.Pos, OpConst16, t)
  7776  				v1.AuxInt = int16ToAuxInt(1<<uint(k) - 1)
  7777  				v0.AddArg2(n, v1)
  7778  				v2 := b.NewValue0(v.Pos, OpConst16, t)
  7779  				v2.AuxInt = int16ToAuxInt(0)
  7780  				v.AddArg2(v0, v2)
  7781  				return true
  7782  			}
  7783  		}
  7784  		break
  7785  	}
  7786  	// match: (Eq16 s:(Sub16 x y) (Const16 [0]))
  7787  	// cond: s.Uses == 1
  7788  	// result: (Eq16 x y)
  7789  	for {
  7790  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7791  			s := v_0
  7792  			if s.Op != OpSub16 {
  7793  				continue
  7794  			}
  7795  			y := s.Args[1]
  7796  			x := s.Args[0]
  7797  			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(s.Uses == 1) {
  7798  				continue
  7799  			}
  7800  			v.reset(OpEq16)
  7801  			v.AddArg2(x, y)
  7802  			return true
  7803  		}
  7804  		break
  7805  	}
  7806  	// match: (Eq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [y]))
  7807  	// cond: oneBit16(y)
  7808  	// result: (Neq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [0]))
  7809  	for {
  7810  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7811  			if v_0.Op != OpAnd16 {
  7812  				continue
  7813  			}
  7814  			t := v_0.Type
  7815  			_ = v_0.Args[1]
  7816  			v_0_0 := v_0.Args[0]
  7817  			v_0_1 := v_0.Args[1]
  7818  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  7819  				x := v_0_0
  7820  				if v_0_1.Op != OpConst16 || v_0_1.Type != t {
  7821  					continue
  7822  				}
  7823  				y := auxIntToInt16(v_0_1.AuxInt)
  7824  				if v_1.Op != OpConst16 || v_1.Type != t || auxIntToInt16(v_1.AuxInt) != y || !(oneBit16(y)) {
  7825  					continue
  7826  				}
  7827  				v.reset(OpNeq16)
  7828  				v0 := b.NewValue0(v.Pos, OpAnd16, t)
  7829  				v1 := b.NewValue0(v.Pos, OpConst16, t)
  7830  				v1.AuxInt = int16ToAuxInt(y)
  7831  				v0.AddArg2(x, v1)
  7832  				v2 := b.NewValue0(v.Pos, OpConst16, t)
  7833  				v2.AuxInt = int16ToAuxInt(0)
  7834  				v.AddArg2(v0, v2)
  7835  				return true
  7836  			}
  7837  		}
  7838  		break
  7839  	}
  7840  	return false
  7841  }
  7842  func rewriteValuegeneric_OpEq32(v *Value) bool {
  7843  	v_1 := v.Args[1]
  7844  	v_0 := v.Args[0]
  7845  	b := v.Block
  7846  	typ := &b.Func.Config.Types
  7847  	// match: (Eq32 x x)
  7848  	// result: (ConstBool [true])
  7849  	for {
  7850  		x := v_0
  7851  		if x != v_1 {
  7852  			break
  7853  		}
  7854  		v.reset(OpConstBool)
  7855  		v.AuxInt = boolToAuxInt(true)
  7856  		return true
  7857  	}
  7858  	// match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
  7859  	// result: (Eq32 (Const32 <t> [c-d]) x)
  7860  	for {
  7861  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7862  			if v_0.Op != OpConst32 {
  7863  				continue
  7864  			}
  7865  			t := v_0.Type
  7866  			c := auxIntToInt32(v_0.AuxInt)
  7867  			if v_1.Op != OpAdd32 {
  7868  				continue
  7869  			}
  7870  			_ = v_1.Args[1]
  7871  			v_1_0 := v_1.Args[0]
  7872  			v_1_1 := v_1.Args[1]
  7873  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7874  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
  7875  					continue
  7876  				}
  7877  				d := auxIntToInt32(v_1_0.AuxInt)
  7878  				x := v_1_1
  7879  				v.reset(OpEq32)
  7880  				v0 := b.NewValue0(v.Pos, OpConst32, t)
  7881  				v0.AuxInt = int32ToAuxInt(c - d)
  7882  				v.AddArg2(v0, x)
  7883  				return true
  7884  			}
  7885  		}
  7886  		break
  7887  	}
  7888  	// match: (Eq32 (Const32 [c]) (Const32 [d]))
  7889  	// result: (ConstBool [c == d])
  7890  	for {
  7891  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7892  			if v_0.Op != OpConst32 {
  7893  				continue
  7894  			}
  7895  			c := auxIntToInt32(v_0.AuxInt)
  7896  			if v_1.Op != OpConst32 {
  7897  				continue
  7898  			}
  7899  			d := auxIntToInt32(v_1.AuxInt)
  7900  			v.reset(OpConstBool)
  7901  			v.AuxInt = boolToAuxInt(c == d)
  7902  			return true
  7903  		}
  7904  		break
  7905  	}
  7906  	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) ) )
  7907  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+umagic32(c).m/2) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
  7908  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  7909  	for {
  7910  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7911  			x := v_0
  7912  			if v_1.Op != OpMul32 {
  7913  				continue
  7914  			}
  7915  			_ = v_1.Args[1]
  7916  			v_1_0 := v_1.Args[0]
  7917  			v_1_1 := v_1.Args[1]
  7918  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7919  				if v_1_0.Op != OpConst32 {
  7920  					continue
  7921  				}
  7922  				c := auxIntToInt32(v_1_0.AuxInt)
  7923  				if v_1_1.Op != OpRsh32Ux64 {
  7924  					continue
  7925  				}
  7926  				_ = v_1_1.Args[1]
  7927  				mul := v_1_1.Args[0]
  7928  				if mul.Op != OpHmul32u {
  7929  					continue
  7930  				}
  7931  				_ = mul.Args[1]
  7932  				mul_0 := mul.Args[0]
  7933  				mul_1 := mul.Args[1]
  7934  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7935  					if mul_0.Op != OpConst32 {
  7936  						continue
  7937  					}
  7938  					m := auxIntToInt32(mul_0.AuxInt)
  7939  					if x != mul_1 {
  7940  						continue
  7941  					}
  7942  					v_1_1_1 := v_1_1.Args[1]
  7943  					if v_1_1_1.Op != OpConst64 {
  7944  						continue
  7945  					}
  7946  					s := auxIntToInt64(v_1_1_1.AuxInt)
  7947  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+umagic32(c).m/2) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
  7948  						continue
  7949  					}
  7950  					v.reset(OpLeq32U)
  7951  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  7952  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  7953  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7954  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  7955  					v1.AddArg2(v2, x)
  7956  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7957  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  7958  					v0.AddArg2(v1, v3)
  7959  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7960  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  7961  					v.AddArg2(v0, v4)
  7962  					return true
  7963  				}
  7964  			}
  7965  		}
  7966  		break
  7967  	}
  7968  	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) ) )
  7969  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+(umagic32(c).m+1)/2) && s == umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)
  7970  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  7971  	for {
  7972  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7973  			x := v_0
  7974  			if v_1.Op != OpMul32 {
  7975  				continue
  7976  			}
  7977  			_ = v_1.Args[1]
  7978  			v_1_0 := v_1.Args[0]
  7979  			v_1_1 := v_1.Args[1]
  7980  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7981  				if v_1_0.Op != OpConst32 {
  7982  					continue
  7983  				}
  7984  				c := auxIntToInt32(v_1_0.AuxInt)
  7985  				if v_1_1.Op != OpRsh32Ux64 {
  7986  					continue
  7987  				}
  7988  				_ = v_1_1.Args[1]
  7989  				mul := v_1_1.Args[0]
  7990  				if mul.Op != OpHmul32u {
  7991  					continue
  7992  				}
  7993  				_ = mul.Args[1]
  7994  				mul_0 := mul.Args[0]
  7995  				mul_1 := mul.Args[1]
  7996  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7997  					if mul_0.Op != OpConst32 || mul_0.Type != typ.UInt32 {
  7998  						continue
  7999  					}
  8000  					m := auxIntToInt32(mul_0.AuxInt)
  8001  					if mul_1.Op != OpRsh32Ux64 {
  8002  						continue
  8003  					}
  8004  					_ = mul_1.Args[1]
  8005  					if x != mul_1.Args[0] {
  8006  						continue
  8007  					}
  8008  					mul_1_1 := mul_1.Args[1]
  8009  					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
  8010  						continue
  8011  					}
  8012  					v_1_1_1 := v_1_1.Args[1]
  8013  					if v_1_1_1.Op != OpConst64 {
  8014  						continue
  8015  					}
  8016  					s := auxIntToInt64(v_1_1_1.AuxInt)
  8017  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+(umagic32(c).m+1)/2) && s == umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8018  						continue
  8019  					}
  8020  					v.reset(OpLeq32U)
  8021  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8022  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8023  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8024  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8025  					v1.AddArg2(v2, x)
  8026  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8027  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8028  					v0.AddArg2(v1, v3)
  8029  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8030  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8031  					v.AddArg2(v0, v4)
  8032  					return true
  8033  				}
  8034  			}
  8035  		}
  8036  		break
  8037  	}
  8038  	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) ) )
  8039  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic32(c).m) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
  8040  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  8041  	for {
  8042  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8043  			x := v_0
  8044  			if v_1.Op != OpMul32 {
  8045  				continue
  8046  			}
  8047  			_ = v_1.Args[1]
  8048  			v_1_0 := v_1.Args[0]
  8049  			v_1_1 := v_1.Args[1]
  8050  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8051  				if v_1_0.Op != OpConst32 {
  8052  					continue
  8053  				}
  8054  				c := auxIntToInt32(v_1_0.AuxInt)
  8055  				if v_1_1.Op != OpRsh32Ux64 {
  8056  					continue
  8057  				}
  8058  				_ = v_1_1.Args[1]
  8059  				v_1_1_0 := v_1_1.Args[0]
  8060  				if v_1_1_0.Op != OpAvg32u {
  8061  					continue
  8062  				}
  8063  				_ = v_1_1_0.Args[1]
  8064  				if x != v_1_1_0.Args[0] {
  8065  					continue
  8066  				}
  8067  				mul := v_1_1_0.Args[1]
  8068  				if mul.Op != OpHmul32u {
  8069  					continue
  8070  				}
  8071  				_ = mul.Args[1]
  8072  				mul_0 := mul.Args[0]
  8073  				mul_1 := mul.Args[1]
  8074  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8075  					if mul_0.Op != OpConst32 {
  8076  						continue
  8077  					}
  8078  					m := auxIntToInt32(mul_0.AuxInt)
  8079  					if x != mul_1 {
  8080  						continue
  8081  					}
  8082  					v_1_1_1 := v_1_1.Args[1]
  8083  					if v_1_1_1.Op != OpConst64 {
  8084  						continue
  8085  					}
  8086  					s := auxIntToInt64(v_1_1_1.AuxInt)
  8087  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic32(c).m) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8088  						continue
  8089  					}
  8090  					v.reset(OpLeq32U)
  8091  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8092  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8093  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8094  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8095  					v1.AddArg2(v2, x)
  8096  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8097  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8098  					v0.AddArg2(v1, v3)
  8099  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8100  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8101  					v.AddArg2(v0, v4)
  8102  					return true
  8103  				}
  8104  			}
  8105  		}
  8106  		break
  8107  	}
  8108  	// match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) ) )
  8109  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic32(c).m/2) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
  8110  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  8111  	for {
  8112  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8113  			x := v_0
  8114  			if v_1.Op != OpMul32 {
  8115  				continue
  8116  			}
  8117  			_ = v_1.Args[1]
  8118  			v_1_0 := v_1.Args[0]
  8119  			v_1_1 := v_1.Args[1]
  8120  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8121  				if v_1_0.Op != OpConst32 {
  8122  					continue
  8123  				}
  8124  				c := auxIntToInt32(v_1_0.AuxInt)
  8125  				if v_1_1.Op != OpTrunc64to32 {
  8126  					continue
  8127  				}
  8128  				v_1_1_0 := v_1_1.Args[0]
  8129  				if v_1_1_0.Op != OpRsh64Ux64 {
  8130  					continue
  8131  				}
  8132  				_ = v_1_1_0.Args[1]
  8133  				mul := v_1_1_0.Args[0]
  8134  				if mul.Op != OpMul64 {
  8135  					continue
  8136  				}
  8137  				_ = mul.Args[1]
  8138  				mul_0 := mul.Args[0]
  8139  				mul_1 := mul.Args[1]
  8140  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8141  					if mul_0.Op != OpConst64 {
  8142  						continue
  8143  					}
  8144  					m := auxIntToInt64(mul_0.AuxInt)
  8145  					if mul_1.Op != OpZeroExt32to64 || x != mul_1.Args[0] {
  8146  						continue
  8147  					}
  8148  					v_1_1_0_1 := v_1_1_0.Args[1]
  8149  					if v_1_1_0_1.Op != OpConst64 {
  8150  						continue
  8151  					}
  8152  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8153  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic32(c).m/2) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8154  						continue
  8155  					}
  8156  					v.reset(OpLeq32U)
  8157  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8158  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8159  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8160  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8161  					v1.AddArg2(v2, x)
  8162  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8163  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8164  					v0.AddArg2(v1, v3)
  8165  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8166  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8167  					v.AddArg2(v0, v4)
  8168  					return true
  8169  				}
  8170  			}
  8171  		}
  8172  		break
  8173  	}
  8174  	// match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) ) )
  8175  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic32(c).m+1)/2) && s == 32+umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)
  8176  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  8177  	for {
  8178  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8179  			x := v_0
  8180  			if v_1.Op != OpMul32 {
  8181  				continue
  8182  			}
  8183  			_ = v_1.Args[1]
  8184  			v_1_0 := v_1.Args[0]
  8185  			v_1_1 := v_1.Args[1]
  8186  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8187  				if v_1_0.Op != OpConst32 {
  8188  					continue
  8189  				}
  8190  				c := auxIntToInt32(v_1_0.AuxInt)
  8191  				if v_1_1.Op != OpTrunc64to32 {
  8192  					continue
  8193  				}
  8194  				v_1_1_0 := v_1_1.Args[0]
  8195  				if v_1_1_0.Op != OpRsh64Ux64 {
  8196  					continue
  8197  				}
  8198  				_ = v_1_1_0.Args[1]
  8199  				mul := v_1_1_0.Args[0]
  8200  				if mul.Op != OpMul64 {
  8201  					continue
  8202  				}
  8203  				_ = mul.Args[1]
  8204  				mul_0 := mul.Args[0]
  8205  				mul_1 := mul.Args[1]
  8206  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8207  					if mul_0.Op != OpConst64 {
  8208  						continue
  8209  					}
  8210  					m := auxIntToInt64(mul_0.AuxInt)
  8211  					if mul_1.Op != OpRsh64Ux64 {
  8212  						continue
  8213  					}
  8214  					_ = mul_1.Args[1]
  8215  					mul_1_0 := mul_1.Args[0]
  8216  					if mul_1_0.Op != OpZeroExt32to64 || x != mul_1_0.Args[0] {
  8217  						continue
  8218  					}
  8219  					mul_1_1 := mul_1.Args[1]
  8220  					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
  8221  						continue
  8222  					}
  8223  					v_1_1_0_1 := v_1_1_0.Args[1]
  8224  					if v_1_1_0_1.Op != OpConst64 {
  8225  						continue
  8226  					}
  8227  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8228  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic32(c).m+1)/2) && s == 32+umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8229  						continue
  8230  					}
  8231  					v.reset(OpLeq32U)
  8232  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8233  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8234  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8235  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8236  					v1.AddArg2(v2, x)
  8237  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8238  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8239  					v0.AddArg2(v1, v3)
  8240  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8241  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8242  					v.AddArg2(v0, v4)
  8243  					return true
  8244  				}
  8245  			}
  8246  		}
  8247  		break
  8248  	}
  8249  	// match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) ) )
  8250  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic32(c).m) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
  8251  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  8252  	for {
  8253  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8254  			x := v_0
  8255  			if v_1.Op != OpMul32 {
  8256  				continue
  8257  			}
  8258  			_ = v_1.Args[1]
  8259  			v_1_0 := v_1.Args[0]
  8260  			v_1_1 := v_1.Args[1]
  8261  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8262  				if v_1_0.Op != OpConst32 {
  8263  					continue
  8264  				}
  8265  				c := auxIntToInt32(v_1_0.AuxInt)
  8266  				if v_1_1.Op != OpTrunc64to32 {
  8267  					continue
  8268  				}
  8269  				v_1_1_0 := v_1_1.Args[0]
  8270  				if v_1_1_0.Op != OpRsh64Ux64 {
  8271  					continue
  8272  				}
  8273  				_ = v_1_1_0.Args[1]
  8274  				v_1_1_0_0 := v_1_1_0.Args[0]
  8275  				if v_1_1_0_0.Op != OpAvg64u {
  8276  					continue
  8277  				}
  8278  				_ = v_1_1_0_0.Args[1]
  8279  				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
  8280  				if v_1_1_0_0_0.Op != OpLsh64x64 {
  8281  					continue
  8282  				}
  8283  				_ = v_1_1_0_0_0.Args[1]
  8284  				v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
  8285  				if v_1_1_0_0_0_0.Op != OpZeroExt32to64 || x != v_1_1_0_0_0_0.Args[0] {
  8286  					continue
  8287  				}
  8288  				v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
  8289  				if v_1_1_0_0_0_1.Op != OpConst64 || auxIntToInt64(v_1_1_0_0_0_1.AuxInt) != 32 {
  8290  					continue
  8291  				}
  8292  				mul := v_1_1_0_0.Args[1]
  8293  				if mul.Op != OpMul64 {
  8294  					continue
  8295  				}
  8296  				_ = mul.Args[1]
  8297  				mul_0 := mul.Args[0]
  8298  				mul_1 := mul.Args[1]
  8299  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8300  					if mul_0.Op != OpConst64 {
  8301  						continue
  8302  					}
  8303  					m := auxIntToInt64(mul_0.AuxInt)
  8304  					if mul_1.Op != OpZeroExt32to64 || x != mul_1.Args[0] {
  8305  						continue
  8306  					}
  8307  					v_1_1_0_1 := v_1_1_0.Args[1]
  8308  					if v_1_1_0_1.Op != OpConst64 {
  8309  						continue
  8310  					}
  8311  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8312  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic32(c).m) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8313  						continue
  8314  					}
  8315  					v.reset(OpLeq32U)
  8316  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8317  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8318  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8319  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8320  					v1.AddArg2(v2, x)
  8321  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8322  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8323  					v0.AddArg2(v1, v3)
  8324  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8325  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8326  					v.AddArg2(v0, v4)
  8327  					return true
  8328  				}
  8329  			}
  8330  		}
  8331  		break
  8332  	}
  8333  	// match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) ) )
  8334  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic32(c).m) && s == 32+smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)
  8335  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
  8336  	for {
  8337  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8338  			x := v_0
  8339  			if v_1.Op != OpMul32 {
  8340  				continue
  8341  			}
  8342  			_ = v_1.Args[1]
  8343  			v_1_0 := v_1.Args[0]
  8344  			v_1_1 := v_1.Args[1]
  8345  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8346  				if v_1_0.Op != OpConst32 {
  8347  					continue
  8348  				}
  8349  				c := auxIntToInt32(v_1_0.AuxInt)
  8350  				if v_1_1.Op != OpSub32 {
  8351  					continue
  8352  				}
  8353  				_ = v_1_1.Args[1]
  8354  				v_1_1_0 := v_1_1.Args[0]
  8355  				if v_1_1_0.Op != OpRsh64x64 {
  8356  					continue
  8357  				}
  8358  				_ = v_1_1_0.Args[1]
  8359  				mul := v_1_1_0.Args[0]
  8360  				if mul.Op != OpMul64 {
  8361  					continue
  8362  				}
  8363  				_ = mul.Args[1]
  8364  				mul_0 := mul.Args[0]
  8365  				mul_1 := mul.Args[1]
  8366  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8367  					if mul_0.Op != OpConst64 {
  8368  						continue
  8369  					}
  8370  					m := auxIntToInt64(mul_0.AuxInt)
  8371  					if mul_1.Op != OpSignExt32to64 || x != mul_1.Args[0] {
  8372  						continue
  8373  					}
  8374  					v_1_1_0_1 := v_1_1_0.Args[1]
  8375  					if v_1_1_0_1.Op != OpConst64 {
  8376  						continue
  8377  					}
  8378  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8379  					v_1_1_1 := v_1_1.Args[1]
  8380  					if v_1_1_1.Op != OpRsh64x64 {
  8381  						continue
  8382  					}
  8383  					_ = v_1_1_1.Args[1]
  8384  					v_1_1_1_0 := v_1_1_1.Args[0]
  8385  					if v_1_1_1_0.Op != OpSignExt32to64 || x != v_1_1_1_0.Args[0] {
  8386  						continue
  8387  					}
  8388  					v_1_1_1_1 := v_1_1_1.Args[1]
  8389  					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic32(c).m) && s == 32+smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)) {
  8390  						continue
  8391  					}
  8392  					v.reset(OpLeq32U)
  8393  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8394  					v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
  8395  					v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8396  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8397  					v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
  8398  					v2.AddArg2(v3, x)
  8399  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8400  					v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
  8401  					v1.AddArg2(v2, v4)
  8402  					v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8403  					v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
  8404  					v0.AddArg2(v1, v5)
  8405  					v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8406  					v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
  8407  					v.AddArg2(v0, v6)
  8408  					return true
  8409  				}
  8410  			}
  8411  		}
  8412  		break
  8413  	}
  8414  	// match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) ) )
  8415  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m/2) && s == smagic32(c).s-1 && x.Op != OpConst32 && sdivisibleOK32(c)
  8416  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
  8417  	for {
  8418  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8419  			x := v_0
  8420  			if v_1.Op != OpMul32 {
  8421  				continue
  8422  			}
  8423  			_ = v_1.Args[1]
  8424  			v_1_0 := v_1.Args[0]
  8425  			v_1_1 := v_1.Args[1]
  8426  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8427  				if v_1_0.Op != OpConst32 {
  8428  					continue
  8429  				}
  8430  				c := auxIntToInt32(v_1_0.AuxInt)
  8431  				if v_1_1.Op != OpSub32 {
  8432  					continue
  8433  				}
  8434  				_ = v_1_1.Args[1]
  8435  				v_1_1_0 := v_1_1.Args[0]
  8436  				if v_1_1_0.Op != OpRsh32x64 {
  8437  					continue
  8438  				}
  8439  				_ = v_1_1_0.Args[1]
  8440  				mul := v_1_1_0.Args[0]
  8441  				if mul.Op != OpHmul32 {
  8442  					continue
  8443  				}
  8444  				_ = mul.Args[1]
  8445  				mul_0 := mul.Args[0]
  8446  				mul_1 := mul.Args[1]
  8447  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8448  					if mul_0.Op != OpConst32 {
  8449  						continue
  8450  					}
  8451  					m := auxIntToInt32(mul_0.AuxInt)
  8452  					if x != mul_1 {
  8453  						continue
  8454  					}
  8455  					v_1_1_0_1 := v_1_1_0.Args[1]
  8456  					if v_1_1_0_1.Op != OpConst64 {
  8457  						continue
  8458  					}
  8459  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8460  					v_1_1_1 := v_1_1.Args[1]
  8461  					if v_1_1_1.Op != OpRsh32x64 {
  8462  						continue
  8463  					}
  8464  					_ = v_1_1_1.Args[1]
  8465  					if x != v_1_1_1.Args[0] {
  8466  						continue
  8467  					}
  8468  					v_1_1_1_1 := v_1_1_1.Args[1]
  8469  					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m/2) && s == smagic32(c).s-1 && x.Op != OpConst32 && sdivisibleOK32(c)) {
  8470  						continue
  8471  					}
  8472  					v.reset(OpLeq32U)
  8473  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8474  					v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
  8475  					v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8476  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8477  					v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
  8478  					v2.AddArg2(v3, x)
  8479  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8480  					v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
  8481  					v1.AddArg2(v2, v4)
  8482  					v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8483  					v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
  8484  					v0.AddArg2(v1, v5)
  8485  					v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8486  					v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
  8487  					v.AddArg2(v0, v6)
  8488  					return true
  8489  				}
  8490  			}
  8491  		}
  8492  		break
  8493  	}
  8494  	// match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) ) )
  8495  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m) && s == smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)
  8496  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
  8497  	for {
  8498  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8499  			x := v_0
  8500  			if v_1.Op != OpMul32 {
  8501  				continue
  8502  			}
  8503  			_ = v_1.Args[1]
  8504  			v_1_0 := v_1.Args[0]
  8505  			v_1_1 := v_1.Args[1]
  8506  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8507  				if v_1_0.Op != OpConst32 {
  8508  					continue
  8509  				}
  8510  				c := auxIntToInt32(v_1_0.AuxInt)
  8511  				if v_1_1.Op != OpSub32 {
  8512  					continue
  8513  				}
  8514  				_ = v_1_1.Args[1]
  8515  				v_1_1_0 := v_1_1.Args[0]
  8516  				if v_1_1_0.Op != OpRsh32x64 {
  8517  					continue
  8518  				}
  8519  				_ = v_1_1_0.Args[1]
  8520  				v_1_1_0_0 := v_1_1_0.Args[0]
  8521  				if v_1_1_0_0.Op != OpAdd32 {
  8522  					continue
  8523  				}
  8524  				_ = v_1_1_0_0.Args[1]
  8525  				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
  8526  				v_1_1_0_0_1 := v_1_1_0_0.Args[1]
  8527  				for _i2 := 0; _i2 <= 1; _i2, v_1_1_0_0_0, v_1_1_0_0_1 = _i2+1, v_1_1_0_0_1, v_1_1_0_0_0 {
  8528  					mul := v_1_1_0_0_0
  8529  					if mul.Op != OpHmul32 {
  8530  						continue
  8531  					}
  8532  					_ = mul.Args[1]
  8533  					mul_0 := mul.Args[0]
  8534  					mul_1 := mul.Args[1]
  8535  					for _i3 := 0; _i3 <= 1; _i3, mul_0, mul_1 = _i3+1, mul_1, mul_0 {
  8536  						if mul_0.Op != OpConst32 {
  8537  							continue
  8538  						}
  8539  						m := auxIntToInt32(mul_0.AuxInt)
  8540  						if x != mul_1 || x != v_1_1_0_0_1 {
  8541  							continue
  8542  						}
  8543  						v_1_1_0_1 := v_1_1_0.Args[1]
  8544  						if v_1_1_0_1.Op != OpConst64 {
  8545  							continue
  8546  						}
  8547  						s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8548  						v_1_1_1 := v_1_1.Args[1]
  8549  						if v_1_1_1.Op != OpRsh32x64 {
  8550  							continue
  8551  						}
  8552  						_ = v_1_1_1.Args[1]
  8553  						if x != v_1_1_1.Args[0] {
  8554  							continue
  8555  						}
  8556  						v_1_1_1_1 := v_1_1_1.Args[1]
  8557  						if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m) && s == smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)) {
  8558  							continue
  8559  						}
  8560  						v.reset(OpLeq32U)
  8561  						v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8562  						v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
  8563  						v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8564  						v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8565  						v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
  8566  						v2.AddArg2(v3, x)
  8567  						v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8568  						v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
  8569  						v1.AddArg2(v2, v4)
  8570  						v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8571  						v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
  8572  						v0.AddArg2(v1, v5)
  8573  						v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8574  						v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
  8575  						v.AddArg2(v0, v6)
  8576  						return true
  8577  					}
  8578  				}
  8579  			}
  8580  		}
  8581  		break
  8582  	}
  8583  	// match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
  8584  	// cond: k > 0 && k < 31 && kbar == 32 - k
  8585  	// result: (Eq32 (And32 <t> n (Const32 <t> [1<<uint(k)-1])) (Const32 <t> [0]))
  8586  	for {
  8587  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8588  			n := v_0
  8589  			if v_1.Op != OpLsh32x64 {
  8590  				continue
  8591  			}
  8592  			_ = v_1.Args[1]
  8593  			v_1_0 := v_1.Args[0]
  8594  			if v_1_0.Op != OpRsh32x64 {
  8595  				continue
  8596  			}
  8597  			_ = v_1_0.Args[1]
  8598  			v_1_0_0 := v_1_0.Args[0]
  8599  			if v_1_0_0.Op != OpAdd32 {
  8600  				continue
  8601  			}
  8602  			t := v_1_0_0.Type
  8603  			_ = v_1_0_0.Args[1]
  8604  			v_1_0_0_0 := v_1_0_0.Args[0]
  8605  			v_1_0_0_1 := v_1_0_0.Args[1]
  8606  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
  8607  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh32Ux64 || v_1_0_0_1.Type != t {
  8608  					continue
  8609  				}
  8610  				_ = v_1_0_0_1.Args[1]
  8611  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
  8612  				if v_1_0_0_1_0.Op != OpRsh32x64 || v_1_0_0_1_0.Type != t {
  8613  					continue
  8614  				}
  8615  				_ = v_1_0_0_1_0.Args[1]
  8616  				if n != v_1_0_0_1_0.Args[0] {
  8617  					continue
  8618  				}
  8619  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
  8620  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 31 {
  8621  					continue
  8622  				}
  8623  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
  8624  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
  8625  					continue
  8626  				}
  8627  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
  8628  				v_1_0_1 := v_1_0.Args[1]
  8629  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
  8630  					continue
  8631  				}
  8632  				k := auxIntToInt64(v_1_0_1.AuxInt)
  8633  				v_1_1 := v_1.Args[1]
  8634  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 31 && kbar == 32-k) {
  8635  					continue
  8636  				}
  8637  				v.reset(OpEq32)
  8638  				v0 := b.NewValue0(v.Pos, OpAnd32, t)
  8639  				v1 := b.NewValue0(v.Pos, OpConst32, t)
  8640  				v1.AuxInt = int32ToAuxInt(1<<uint(k) - 1)
  8641  				v0.AddArg2(n, v1)
  8642  				v2 := b.NewValue0(v.Pos, OpConst32, t)
  8643  				v2.AuxInt = int32ToAuxInt(0)
  8644  				v.AddArg2(v0, v2)
  8645  				return true
  8646  			}
  8647  		}
  8648  		break
  8649  	}
  8650  	// match: (Eq32 s:(Sub32 x y) (Const32 [0]))
  8651  	// cond: s.Uses == 1
  8652  	// result: (Eq32 x y)
  8653  	for {
  8654  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8655  			s := v_0
  8656  			if s.Op != OpSub32 {
  8657  				continue
  8658  			}
  8659  			y := s.Args[1]
  8660  			x := s.Args[0]
  8661  			if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 || !(s.Uses == 1) {
  8662  				continue
  8663  			}
  8664  			v.reset(OpEq32)
  8665  			v.AddArg2(x, y)
  8666  			return true
  8667  		}
  8668  		break
  8669  	}
  8670  	// match: (Eq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [y]))
  8671  	// cond: oneBit32(y)
  8672  	// result: (Neq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [0]))
  8673  	for {
  8674  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8675  			if v_0.Op != OpAnd32 {
  8676  				continue
  8677  			}
  8678  			t := v_0.Type
  8679  			_ = v_0.Args[1]
  8680  			v_0_0 := v_0.Args[0]
  8681  			v_0_1 := v_0.Args[1]
  8682  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  8683  				x := v_0_0
  8684  				if v_0_1.Op != OpConst32 || v_0_1.Type != t {
  8685  					continue
  8686  				}
  8687  				y := auxIntToInt32(v_0_1.AuxInt)
  8688  				if v_1.Op != OpConst32 || v_1.Type != t || auxIntToInt32(v_1.AuxInt) != y || !(oneBit32(y)) {
  8689  					continue
  8690  				}
  8691  				v.reset(OpNeq32)
  8692  				v0 := b.NewValue0(v.Pos, OpAnd32, t)
  8693  				v1 := b.NewValue0(v.Pos, OpConst32, t)
  8694  				v1.AuxInt = int32ToAuxInt(y)
  8695  				v0.AddArg2(x, v1)
  8696  				v2 := b.NewValue0(v.Pos, OpConst32, t)
  8697  				v2.AuxInt = int32ToAuxInt(0)
  8698  				v.AddArg2(v0, v2)
  8699  				return true
  8700  			}
  8701  		}
  8702  		break
  8703  	}
  8704  	return false
  8705  }
  8706  func rewriteValuegeneric_OpEq32F(v *Value) bool {
  8707  	v_1 := v.Args[1]
  8708  	v_0 := v.Args[0]
  8709  	// match: (Eq32F (Const32F [c]) (Const32F [d]))
  8710  	// result: (ConstBool [c == d])
  8711  	for {
  8712  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8713  			if v_0.Op != OpConst32F {
  8714  				continue
  8715  			}
  8716  			c := auxIntToFloat32(v_0.AuxInt)
  8717  			if v_1.Op != OpConst32F {
  8718  				continue
  8719  			}
  8720  			d := auxIntToFloat32(v_1.AuxInt)
  8721  			v.reset(OpConstBool)
  8722  			v.AuxInt = boolToAuxInt(c == d)
  8723  			return true
  8724  		}
  8725  		break
  8726  	}
  8727  	return false
  8728  }
  8729  func rewriteValuegeneric_OpEq64(v *Value) bool {
  8730  	v_1 := v.Args[1]
  8731  	v_0 := v.Args[0]
  8732  	b := v.Block
  8733  	typ := &b.Func.Config.Types
  8734  	// match: (Eq64 x x)
  8735  	// result: (ConstBool [true])
  8736  	for {
  8737  		x := v_0
  8738  		if x != v_1 {
  8739  			break
  8740  		}
  8741  		v.reset(OpConstBool)
  8742  		v.AuxInt = boolToAuxInt(true)
  8743  		return true
  8744  	}
  8745  	// match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
  8746  	// result: (Eq64 (Const64 <t> [c-d]) x)
  8747  	for {
  8748  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8749  			if v_0.Op != OpConst64 {
  8750  				continue
  8751  			}
  8752  			t := v_0.Type
  8753  			c := auxIntToInt64(v_0.AuxInt)
  8754  			if v_1.Op != OpAdd64 {
  8755  				continue
  8756  			}
  8757  			_ = v_1.Args[1]
  8758  			v_1_0 := v_1.Args[0]
  8759  			v_1_1 := v_1.Args[1]
  8760  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8761  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
  8762  					continue
  8763  				}
  8764  				d := auxIntToInt64(v_1_0.AuxInt)
  8765  				x := v_1_1
  8766  				v.reset(OpEq64)
  8767  				v0 := b.NewValue0(v.Pos, OpConst64, t)
  8768  				v0.AuxInt = int64ToAuxInt(c - d)
  8769  				v.AddArg2(v0, x)
  8770  				return true
  8771  			}
  8772  		}
  8773  		break
  8774  	}
  8775  	// match: (Eq64 (Const64 [c]) (Const64 [d]))
  8776  	// result: (ConstBool [c == d])
  8777  	for {
  8778  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8779  			if v_0.Op != OpConst64 {
  8780  				continue
  8781  			}
  8782  			c := auxIntToInt64(v_0.AuxInt)
  8783  			if v_1.Op != OpConst64 {
  8784  				continue
  8785  			}
  8786  			d := auxIntToInt64(v_1.AuxInt)
  8787  			v.reset(OpConstBool)
  8788  			v.AuxInt = boolToAuxInt(c == d)
  8789  			return true
  8790  		}
  8791  		break
  8792  	}
  8793  	// match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) ) )
  8794  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic64(c).m/2) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)
  8795  	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
  8796  	for {
  8797  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8798  			x := v_0
  8799  			if v_1.Op != OpMul64 {
  8800  				continue
  8801  			}
  8802  			_ = v_1.Args[1]
  8803  			v_1_0 := v_1.Args[0]
  8804  			v_1_1 := v_1.Args[1]
  8805  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8806  				if v_1_0.Op != OpConst64 {
  8807  					continue
  8808  				}
  8809  				c := auxIntToInt64(v_1_0.AuxInt)
  8810  				if v_1_1.Op != OpRsh64Ux64 {
  8811  					continue
  8812  				}
  8813  				_ = v_1_1.Args[1]
  8814  				mul := v_1_1.Args[0]
  8815  				if mul.Op != OpHmul64u {
  8816  					continue
  8817  				}
  8818  				_ = mul.Args[1]
  8819  				mul_0 := mul.Args[0]
  8820  				mul_1 := mul.Args[1]
  8821  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8822  					if mul_0.Op != OpConst64 {
  8823  						continue
  8824  					}
  8825  					m := auxIntToInt64(mul_0.AuxInt)
  8826  					if x != mul_1 {
  8827  						continue
  8828  					}
  8829  					v_1_1_1 := v_1_1.Args[1]
  8830  					if v_1_1_1.Op != OpConst64 {
  8831  						continue
  8832  					}
  8833  					s := auxIntToInt64(v_1_1_1.AuxInt)
  8834  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic64(c).m/2) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)) {
  8835  						continue
  8836  					}
  8837  					v.reset(OpLeq64U)
  8838  					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
  8839  					v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  8840  					v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8841  					v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
  8842  					v1.AddArg2(v2, x)
  8843  					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8844  					v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
  8845  					v0.AddArg2(v1, v3)
  8846  					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8847  					v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
  8848  					v.AddArg2(v0, v4)
  8849  					return true
  8850  				}
  8851  			}
  8852  		}
  8853  		break
  8854  	}
  8855  	// match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) ) )
  8856  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic64(c).m+1)/2) && s == umagic64(c).s-2 && x.Op != OpConst64 && udivisibleOK64(c)
  8857  	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
  8858  	for {
  8859  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8860  			x := v_0
  8861  			if v_1.Op != OpMul64 {
  8862  				continue
  8863  			}
  8864  			_ = v_1.Args[1]
  8865  			v_1_0 := v_1.Args[0]
  8866  			v_1_1 := v_1.Args[1]
  8867  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8868  				if v_1_0.Op != OpConst64 {
  8869  					continue
  8870  				}
  8871  				c := auxIntToInt64(v_1_0.AuxInt)
  8872  				if v_1_1.Op != OpRsh64Ux64 {
  8873  					continue
  8874  				}
  8875  				_ = v_1_1.Args[1]
  8876  				mul := v_1_1.Args[0]
  8877  				if mul.Op != OpHmul64u {
  8878  					continue
  8879  				}
  8880  				_ = mul.Args[1]
  8881  				mul_0 := mul.Args[0]
  8882  				mul_1 := mul.Args[1]
  8883  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8884  					if mul_0.Op != OpConst64 {
  8885  						continue
  8886  					}
  8887  					m := auxIntToInt64(mul_0.AuxInt)
  8888  					if mul_1.Op != OpRsh64Ux64 {
  8889  						continue
  8890  					}
  8891  					_ = mul_1.Args[1]
  8892  					if x != mul_1.Args[0] {
  8893  						continue
  8894  					}
  8895  					mul_1_1 := mul_1.Args[1]
  8896  					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
  8897  						continue
  8898  					}
  8899  					v_1_1_1 := v_1_1.Args[1]
  8900  					if v_1_1_1.Op != OpConst64 {
  8901  						continue
  8902  					}
  8903  					s := auxIntToInt64(v_1_1_1.AuxInt)
  8904  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic64(c).m+1)/2) && s == umagic64(c).s-2 && x.Op != OpConst64 && udivisibleOK64(c)) {
  8905  						continue
  8906  					}
  8907  					v.reset(OpLeq64U)
  8908  					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
  8909  					v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  8910  					v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8911  					v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
  8912  					v1.AddArg2(v2, x)
  8913  					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8914  					v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
  8915  					v0.AddArg2(v1, v3)
  8916  					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8917  					v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
  8918  					v.AddArg2(v0, v4)
  8919  					return true
  8920  				}
  8921  			}
  8922  		}
  8923  		break
  8924  	}
  8925  	// match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) ) )
  8926  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic64(c).m) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)
  8927  	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
  8928  	for {
  8929  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8930  			x := v_0
  8931  			if v_1.Op != OpMul64 {
  8932  				continue
  8933  			}
  8934  			_ = v_1.Args[1]
  8935  			v_1_0 := v_1.Args[0]
  8936  			v_1_1 := v_1.Args[1]
  8937  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8938  				if v_1_0.Op != OpConst64 {
  8939  					continue
  8940  				}
  8941  				c := auxIntToInt64(v_1_0.AuxInt)
  8942  				if v_1_1.Op != OpRsh64Ux64 {
  8943  					continue
  8944  				}
  8945  				_ = v_1_1.Args[1]
  8946  				v_1_1_0 := v_1_1.Args[0]
  8947  				if v_1_1_0.Op != OpAvg64u {
  8948  					continue
  8949  				}
  8950  				_ = v_1_1_0.Args[1]
  8951  				if x != v_1_1_0.Args[0] {
  8952  					continue
  8953  				}
  8954  				mul := v_1_1_0.Args[1]
  8955  				if mul.Op != OpHmul64u {
  8956  					continue
  8957  				}
  8958  				_ = mul.Args[1]
  8959  				mul_0 := mul.Args[0]
  8960  				mul_1 := mul.Args[1]
  8961  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8962  					if mul_0.Op != OpConst64 {
  8963  						continue
  8964  					}
  8965  					m := auxIntToInt64(mul_0.AuxInt)
  8966  					if x != mul_1 {
  8967  						continue
  8968  					}
  8969  					v_1_1_1 := v_1_1.Args[1]
  8970  					if v_1_1_1.Op != OpConst64 {
  8971  						continue
  8972  					}
  8973  					s := auxIntToInt64(v_1_1_1.AuxInt)
  8974  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic64(c).m) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)) {
  8975  						continue
  8976  					}
  8977  					v.reset(OpLeq64U)
  8978  					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
  8979  					v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  8980  					v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8981  					v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
  8982  					v1.AddArg2(v2, x)
  8983  					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8984  					v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
  8985  					v0.AddArg2(v1, v3)
  8986  					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8987  					v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
  8988  					v.AddArg2(v0, v4)
  8989  					return true
  8990  				}
  8991  			}
  8992  		}
  8993  		break
  8994  	}
  8995  	// match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) ) )
  8996  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m/2) && s == smagic64(c).s-1 && x.Op != OpConst64 && sdivisibleOK64(c)
  8997  	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible64(c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible64(c).a)]) ) (Const64 <typ.UInt64> [64-sdivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(sdivisible64(c).max)]) )
  8998  	for {
  8999  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9000  			x := v_0
  9001  			if v_1.Op != OpMul64 {
  9002  				continue
  9003  			}
  9004  			_ = v_1.Args[1]
  9005  			v_1_0 := v_1.Args[0]
  9006  			v_1_1 := v_1.Args[1]
  9007  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9008  				if v_1_0.Op != OpConst64 {
  9009  					continue
  9010  				}
  9011  				c := auxIntToInt64(v_1_0.AuxInt)
  9012  				if v_1_1.Op != OpSub64 {
  9013  					continue
  9014  				}
  9015  				_ = v_1_1.Args[1]
  9016  				v_1_1_0 := v_1_1.Args[0]
  9017  				if v_1_1_0.Op != OpRsh64x64 {
  9018  					continue
  9019  				}
  9020  				_ = v_1_1_0.Args[1]
  9021  				mul := v_1_1_0.Args[0]
  9022  				if mul.Op != OpHmul64 {
  9023  					continue
  9024  				}
  9025  				_ = mul.Args[1]
  9026  				mul_0 := mul.Args[0]
  9027  				mul_1 := mul.Args[1]
  9028  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  9029  					if mul_0.Op != OpConst64 {
  9030  						continue
  9031  					}
  9032  					m := auxIntToInt64(mul_0.AuxInt)
  9033  					if x != mul_1 {
  9034  						continue
  9035  					}
  9036  					v_1_1_0_1 := v_1_1_0.Args[1]
  9037  					if v_1_1_0_1.Op != OpConst64 {
  9038  						continue
  9039  					}
  9040  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  9041  					v_1_1_1 := v_1_1.Args[1]
  9042  					if v_1_1_1.Op != OpRsh64x64 {
  9043  						continue
  9044  					}
  9045  					_ = v_1_1_1.Args[1]
  9046  					if x != v_1_1_1.Args[0] {
  9047  						continue
  9048  					}
  9049  					v_1_1_1_1 := v_1_1_1.Args[1]
  9050  					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m/2) && s == smagic64(c).s-1 && x.Op != OpConst64 && sdivisibleOK64(c)) {
  9051  						continue
  9052  					}
  9053  					v.reset(OpLeq64U)
  9054  					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
  9055  					v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
  9056  					v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  9057  					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9058  					v3.AuxInt = int64ToAuxInt(int64(sdivisible64(c).m))
  9059  					v2.AddArg2(v3, x)
  9060  					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9061  					v4.AuxInt = int64ToAuxInt(int64(sdivisible64(c).a))
  9062  					v1.AddArg2(v2, v4)
  9063  					v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9064  					v5.AuxInt = int64ToAuxInt(64 - sdivisible64(c).k)
  9065  					v0.AddArg2(v1, v5)
  9066  					v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9067  					v6.AuxInt = int64ToAuxInt(int64(sdivisible64(c).max))
  9068  					v.AddArg2(v0, v6)
  9069  					return true
  9070  				}
  9071  			}
  9072  		}
  9073  		break
  9074  	}
  9075  	// match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) ) )
  9076  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m) && s == smagic64(c).s && x.Op != OpConst64 && sdivisibleOK64(c)
  9077  	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible64(c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible64(c).a)]) ) (Const64 <typ.UInt64> [64-sdivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(sdivisible64(c).max)]) )
  9078  	for {
  9079  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9080  			x := v_0
  9081  			if v_1.Op != OpMul64 {
  9082  				continue
  9083  			}
  9084  			_ = v_1.Args[1]
  9085  			v_1_0 := v_1.Args[0]
  9086  			v_1_1 := v_1.Args[1]
  9087  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9088  				if v_1_0.Op != OpConst64 {
  9089  					continue
  9090  				}
  9091  				c := auxIntToInt64(v_1_0.AuxInt)
  9092  				if v_1_1.Op != OpSub64 {
  9093  					continue
  9094  				}
  9095  				_ = v_1_1.Args[1]
  9096  				v_1_1_0 := v_1_1.Args[0]
  9097  				if v_1_1_0.Op != OpRsh64x64 {
  9098  					continue
  9099  				}
  9100  				_ = v_1_1_0.Args[1]
  9101  				v_1_1_0_0 := v_1_1_0.Args[0]
  9102  				if v_1_1_0_0.Op != OpAdd64 {
  9103  					continue
  9104  				}
  9105  				_ = v_1_1_0_0.Args[1]
  9106  				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
  9107  				v_1_1_0_0_1 := v_1_1_0_0.Args[1]
  9108  				for _i2 := 0; _i2 <= 1; _i2, v_1_1_0_0_0, v_1_1_0_0_1 = _i2+1, v_1_1_0_0_1, v_1_1_0_0_0 {
  9109  					mul := v_1_1_0_0_0
  9110  					if mul.Op != OpHmul64 {
  9111  						continue
  9112  					}
  9113  					_ = mul.Args[1]
  9114  					mul_0 := mul.Args[0]
  9115  					mul_1 := mul.Args[1]
  9116  					for _i3 := 0; _i3 <= 1; _i3, mul_0, mul_1 = _i3+1, mul_1, mul_0 {
  9117  						if mul_0.Op != OpConst64 {
  9118  							continue
  9119  						}
  9120  						m := auxIntToInt64(mul_0.AuxInt)
  9121  						if x != mul_1 || x != v_1_1_0_0_1 {
  9122  							continue
  9123  						}
  9124  						v_1_1_0_1 := v_1_1_0.Args[1]
  9125  						if v_1_1_0_1.Op != OpConst64 {
  9126  							continue
  9127  						}
  9128  						s := auxIntToInt64(v_1_1_0_1.AuxInt)
  9129  						v_1_1_1 := v_1_1.Args[1]
  9130  						if v_1_1_1.Op != OpRsh64x64 {
  9131  							continue
  9132  						}
  9133  						_ = v_1_1_1.Args[1]
  9134  						if x != v_1_1_1.Args[0] {
  9135  							continue
  9136  						}
  9137  						v_1_1_1_1 := v_1_1_1.Args[1]
  9138  						if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m) && s == smagic64(c).s && x.Op != OpConst64 && sdivisibleOK64(c)) {
  9139  							continue
  9140  						}
  9141  						v.reset(OpLeq64U)
  9142  						v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
  9143  						v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
  9144  						v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  9145  						v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9146  						v3.AuxInt = int64ToAuxInt(int64(sdivisible64(c).m))
  9147  						v2.AddArg2(v3, x)
  9148  						v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9149  						v4.AuxInt = int64ToAuxInt(int64(sdivisible64(c).a))
  9150  						v1.AddArg2(v2, v4)
  9151  						v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9152  						v5.AuxInt = int64ToAuxInt(64 - sdivisible64(c).k)
  9153  						v0.AddArg2(v1, v5)
  9154  						v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9155  						v6.AuxInt = int64ToAuxInt(int64(sdivisible64(c).max))
  9156  						v.AddArg2(v0, v6)
  9157  						return true
  9158  					}
  9159  				}
  9160  			}
  9161  		}
  9162  		break
  9163  	}
  9164  	// match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
  9165  	// cond: k > 0 && k < 63 && kbar == 64 - k
  9166  	// result: (Eq64 (And64 <t> n (Const64 <t> [1<<uint(k)-1])) (Const64 <t> [0]))
  9167  	for {
  9168  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9169  			n := v_0
  9170  			if v_1.Op != OpLsh64x64 {
  9171  				continue
  9172  			}
  9173  			_ = v_1.Args[1]
  9174  			v_1_0 := v_1.Args[0]
  9175  			if v_1_0.Op != OpRsh64x64 {
  9176  				continue
  9177  			}
  9178  			_ = v_1_0.Args[1]
  9179  			v_1_0_0 := v_1_0.Args[0]
  9180  			if v_1_0_0.Op != OpAdd64 {
  9181  				continue
  9182  			}
  9183  			t := v_1_0_0.Type
  9184  			_ = v_1_0_0.Args[1]
  9185  			v_1_0_0_0 := v_1_0_0.Args[0]
  9186  			v_1_0_0_1 := v_1_0_0.Args[1]
  9187  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
  9188  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh64Ux64 || v_1_0_0_1.Type != t {
  9189  					continue
  9190  				}
  9191  				_ = v_1_0_0_1.Args[1]
  9192  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
  9193  				if v_1_0_0_1_0.Op != OpRsh64x64 || v_1_0_0_1_0.Type != t {
  9194  					continue
  9195  				}
  9196  				_ = v_1_0_0_1_0.Args[1]
  9197  				if n != v_1_0_0_1_0.Args[0] {
  9198  					continue
  9199  				}
  9200  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
  9201  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 63 {
  9202  					continue
  9203  				}
  9204  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
  9205  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
  9206  					continue
  9207  				}
  9208  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
  9209  				v_1_0_1 := v_1_0.Args[1]
  9210  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
  9211  					continue
  9212  				}
  9213  				k := auxIntToInt64(v_1_0_1.AuxInt)
  9214  				v_1_1 := v_1.Args[1]
  9215  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 63 && kbar == 64-k) {
  9216  					continue
  9217  				}
  9218  				v.reset(OpEq64)
  9219  				v0 := b.NewValue0(v.Pos, OpAnd64, t)
  9220  				v1 := b.NewValue0(v.Pos, OpConst64, t)
  9221  				v1.AuxInt = int64ToAuxInt(1<<uint(k) - 1)
  9222  				v0.AddArg2(n, v1)
  9223  				v2 := b.NewValue0(v.Pos, OpConst64, t)
  9224  				v2.AuxInt = int64ToAuxInt(0)
  9225  				v.AddArg2(v0, v2)
  9226  				return true
  9227  			}
  9228  		}
  9229  		break
  9230  	}
  9231  	// match: (Eq64 s:(Sub64 x y) (Const64 [0]))
  9232  	// cond: s.Uses == 1
  9233  	// result: (Eq64 x y)
  9234  	for {
  9235  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9236  			s := v_0
  9237  			if s.Op != OpSub64 {
  9238  				continue
  9239  			}
  9240  			y := s.Args[1]
  9241  			x := s.Args[0]
  9242  			if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 || !(s.Uses == 1) {
  9243  				continue
  9244  			}
  9245  			v.reset(OpEq64)
  9246  			v.AddArg2(x, y)
  9247  			return true
  9248  		}
  9249  		break
  9250  	}
  9251  	// match: (Eq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [y]))
  9252  	// cond: oneBit64(y)
  9253  	// result: (Neq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [0]))
  9254  	for {
  9255  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9256  			if v_0.Op != OpAnd64 {
  9257  				continue
  9258  			}
  9259  			t := v_0.Type
  9260  			_ = v_0.Args[1]
  9261  			v_0_0 := v_0.Args[0]
  9262  			v_0_1 := v_0.Args[1]
  9263  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  9264  				x := v_0_0
  9265  				if v_0_1.Op != OpConst64 || v_0_1.Type != t {
  9266  					continue
  9267  				}
  9268  				y := auxIntToInt64(v_0_1.AuxInt)
  9269  				if v_1.Op != OpConst64 || v_1.Type != t || auxIntToInt64(v_1.AuxInt) != y || !(oneBit64(y)) {
  9270  					continue
  9271  				}
  9272  				v.reset(OpNeq64)
  9273  				v0 := b.NewValue0(v.Pos, OpAnd64, t)
  9274  				v1 := b.NewValue0(v.Pos, OpConst64, t)
  9275  				v1.AuxInt = int64ToAuxInt(y)
  9276  				v0.AddArg2(x, v1)
  9277  				v2 := b.NewValue0(v.Pos, OpConst64, t)
  9278  				v2.AuxInt = int64ToAuxInt(0)
  9279  				v.AddArg2(v0, v2)
  9280  				return true
  9281  			}
  9282  		}
  9283  		break
  9284  	}
  9285  	return false
  9286  }
  9287  func rewriteValuegeneric_OpEq64F(v *Value) bool {
  9288  	v_1 := v.Args[1]
  9289  	v_0 := v.Args[0]
  9290  	// match: (Eq64F (Const64F [c]) (Const64F [d]))
  9291  	// result: (ConstBool [c == d])
  9292  	for {
  9293  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9294  			if v_0.Op != OpConst64F {
  9295  				continue
  9296  			}
  9297  			c := auxIntToFloat64(v_0.AuxInt)
  9298  			if v_1.Op != OpConst64F {
  9299  				continue
  9300  			}
  9301  			d := auxIntToFloat64(v_1.AuxInt)
  9302  			v.reset(OpConstBool)
  9303  			v.AuxInt = boolToAuxInt(c == d)
  9304  			return true
  9305  		}
  9306  		break
  9307  	}
  9308  	return false
  9309  }
  9310  func rewriteValuegeneric_OpEq8(v *Value) bool {
  9311  	v_1 := v.Args[1]
  9312  	v_0 := v.Args[0]
  9313  	b := v.Block
  9314  	config := b.Func.Config
  9315  	typ := &b.Func.Config.Types
  9316  	// match: (Eq8 x x)
  9317  	// result: (ConstBool [true])
  9318  	for {
  9319  		x := v_0
  9320  		if x != v_1 {
  9321  			break
  9322  		}
  9323  		v.reset(OpConstBool)
  9324  		v.AuxInt = boolToAuxInt(true)
  9325  		return true
  9326  	}
  9327  	// match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
  9328  	// result: (Eq8 (Const8 <t> [c-d]) x)
  9329  	for {
  9330  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9331  			if v_0.Op != OpConst8 {
  9332  				continue
  9333  			}
  9334  			t := v_0.Type
  9335  			c := auxIntToInt8(v_0.AuxInt)
  9336  			if v_1.Op != OpAdd8 {
  9337  				continue
  9338  			}
  9339  			_ = v_1.Args[1]
  9340  			v_1_0 := v_1.Args[0]
  9341  			v_1_1 := v_1.Args[1]
  9342  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9343  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
  9344  					continue
  9345  				}
  9346  				d := auxIntToInt8(v_1_0.AuxInt)
  9347  				x := v_1_1
  9348  				v.reset(OpEq8)
  9349  				v0 := b.NewValue0(v.Pos, OpConst8, t)
  9350  				v0.AuxInt = int8ToAuxInt(c - d)
  9351  				v.AddArg2(v0, x)
  9352  				return true
  9353  			}
  9354  		}
  9355  		break
  9356  	}
  9357  	// match: (Eq8 (Const8 [c]) (Const8 [d]))
  9358  	// result: (ConstBool [c == d])
  9359  	for {
  9360  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9361  			if v_0.Op != OpConst8 {
  9362  				continue
  9363  			}
  9364  			c := auxIntToInt8(v_0.AuxInt)
  9365  			if v_1.Op != OpConst8 {
  9366  				continue
  9367  			}
  9368  			d := auxIntToInt8(v_1.AuxInt)
  9369  			v.reset(OpConstBool)
  9370  			v.AuxInt = boolToAuxInt(c == d)
  9371  			return true
  9372  		}
  9373  		break
  9374  	}
  9375  	// match: (Eq8 (Mod8u x (Const8 [c])) (Const8 [0]))
  9376  	// cond: x.Op != OpConst8 && udivisibleOK8(c) && !hasSmallRotate(config)
  9377  	// result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(uint8(c))])) (Const32 <typ.UInt32> [0]))
  9378  	for {
  9379  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9380  			if v_0.Op != OpMod8u {
  9381  				continue
  9382  			}
  9383  			_ = v_0.Args[1]
  9384  			x := v_0.Args[0]
  9385  			v_0_1 := v_0.Args[1]
  9386  			if v_0_1.Op != OpConst8 {
  9387  				continue
  9388  			}
  9389  			c := auxIntToInt8(v_0_1.AuxInt)
  9390  			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(x.Op != OpConst8 && udivisibleOK8(c) && !hasSmallRotate(config)) {
  9391  				continue
  9392  			}
  9393  			v.reset(OpEq32)
  9394  			v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
  9395  			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  9396  			v1.AddArg(x)
  9397  			v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  9398  			v2.AuxInt = int32ToAuxInt(int32(uint8(c)))
  9399  			v0.AddArg2(v1, v2)
  9400  			v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  9401  			v3.AuxInt = int32ToAuxInt(0)
  9402  			v.AddArg2(v0, v3)
  9403  			return true
  9404  		}
  9405  		break
  9406  	}
  9407  	// match: (Eq8 (Mod8 x (Const8 [c])) (Const8 [0]))
  9408  	// cond: x.Op != OpConst8 && sdivisibleOK8(c) && !hasSmallRotate(config)
  9409  	// result: (Eq32 (Mod32 <typ.Int32> (SignExt8to32 <typ.Int32> x) (Const32 <typ.Int32> [int32(c)])) (Const32 <typ.Int32> [0]))
  9410  	for {
  9411  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9412  			if v_0.Op != OpMod8 {
  9413  				continue
  9414  			}
  9415  			_ = v_0.Args[1]
  9416  			x := v_0.Args[0]
  9417  			v_0_1 := v_0.Args[1]
  9418  			if v_0_1.Op != OpConst8 {
  9419  				continue
  9420  			}
  9421  			c := auxIntToInt8(v_0_1.AuxInt)
  9422  			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(x.Op != OpConst8 && sdivisibleOK8(c) && !hasSmallRotate(config)) {
  9423  				continue
  9424  			}
  9425  			v.reset(OpEq32)
  9426  			v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
  9427  			v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  9428  			v1.AddArg(x)
  9429  			v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  9430  			v2.AuxInt = int32ToAuxInt(int32(c))
  9431  			v0.AddArg2(v1, v2)
  9432  			v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  9433  			v3.AuxInt = int32ToAuxInt(0)
  9434  			v.AddArg2(v0, v3)
  9435  			return true
  9436  		}
  9437  		break
  9438  	}
  9439  	// match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) ) )
  9440  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<8+umagic8(c).m) && s == 8+umagic8(c).s && x.Op != OpConst8 && udivisibleOK8(c)
  9441  	// result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int8(udivisible8(c).m)]) x) (Const8 <typ.UInt8> [int8(8-udivisible8(c).k)]) ) (Const8 <typ.UInt8> [int8(udivisible8(c).max)]) )
  9442  	for {
  9443  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9444  			x := v_0
  9445  			if v_1.Op != OpMul8 {
  9446  				continue
  9447  			}
  9448  			_ = v_1.Args[1]
  9449  			v_1_0 := v_1.Args[0]
  9450  			v_1_1 := v_1.Args[1]
  9451  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9452  				if v_1_0.Op != OpConst8 {
  9453  					continue
  9454  				}
  9455  				c := auxIntToInt8(v_1_0.AuxInt)
  9456  				if v_1_1.Op != OpTrunc32to8 {
  9457  					continue
  9458  				}
  9459  				v_1_1_0 := v_1_1.Args[0]
  9460  				if v_1_1_0.Op != OpRsh32Ux64 {
  9461  					continue
  9462  				}
  9463  				_ = v_1_1_0.Args[1]
  9464  				mul := v_1_1_0.Args[0]
  9465  				if mul.Op != OpMul32 {
  9466  					continue
  9467  				}
  9468  				_ = mul.Args[1]
  9469  				mul_0 := mul.Args[0]
  9470  				mul_1 := mul.Args[1]
  9471  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  9472  					if mul_0.Op != OpConst32 {
  9473  						continue
  9474  					}
  9475  					m := auxIntToInt32(mul_0.AuxInt)
  9476  					if mul_1.Op != OpZeroExt8to32 || x != mul_1.Args[0] {
  9477  						continue
  9478  					}
  9479  					v_1_1_0_1 := v_1_1_0.Args[1]
  9480  					if v_1_1_0_1.Op != OpConst64 {
  9481  						continue
  9482  					}
  9483  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  9484  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<8+umagic8(c).m) && s == 8+umagic8(c).s && x.Op != OpConst8 && udivisibleOK8(c)) {
  9485  						continue
  9486  					}
  9487  					v.reset(OpLeq8U)
  9488  					v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
  9489  					v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
  9490  					v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9491  					v2.AuxInt = int8ToAuxInt(int8(udivisible8(c).m))
  9492  					v1.AddArg2(v2, x)
  9493  					v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9494  					v3.AuxInt = int8ToAuxInt(int8(8 - udivisible8(c).k))
  9495  					v0.AddArg2(v1, v3)
  9496  					v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9497  					v4.AuxInt = int8ToAuxInt(int8(udivisible8(c).max))
  9498  					v.AddArg2(v0, v4)
  9499  					return true
  9500  				}
  9501  			}
  9502  		}
  9503  		break
  9504  	}
  9505  	// match: (Eq8 x (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) ) )
  9506  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic8(c).m) && s == 8+smagic8(c).s && x.Op != OpConst8 && sdivisibleOK8(c)
  9507  	// result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int8(sdivisible8(c).m)]) x) (Const8 <typ.UInt8> [int8(sdivisible8(c).a)]) ) (Const8 <typ.UInt8> [int8(8-sdivisible8(c).k)]) ) (Const8 <typ.UInt8> [int8(sdivisible8(c).max)]) )
  9508  	for {
  9509  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9510  			x := v_0
  9511  			if v_1.Op != OpMul8 {
  9512  				continue
  9513  			}
  9514  			_ = v_1.Args[1]
  9515  			v_1_0 := v_1.Args[0]
  9516  			v_1_1 := v_1.Args[1]
  9517  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9518  				if v_1_0.Op != OpConst8 {
  9519  					continue
  9520  				}
  9521  				c := auxIntToInt8(v_1_0.AuxInt)
  9522  				if v_1_1.Op != OpSub8 {
  9523  					continue
  9524  				}
  9525  				_ = v_1_1.Args[1]
  9526  				v_1_1_0 := v_1_1.Args[0]
  9527  				if v_1_1_0.Op != OpRsh32x64 {
  9528  					continue
  9529  				}
  9530  				_ = v_1_1_0.Args[1]
  9531  				mul := v_1_1_0.Args[0]
  9532  				if mul.Op != OpMul32 {
  9533  					continue
  9534  				}
  9535  				_ = mul.Args[1]
  9536  				mul_0 := mul.Args[0]
  9537  				mul_1 := mul.Args[1]
  9538  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  9539  					if mul_0.Op != OpConst32 {
  9540  						continue
  9541  					}
  9542  					m := auxIntToInt32(mul_0.AuxInt)
  9543  					if mul_1.Op != OpSignExt8to32 || x != mul_1.Args[0] {
  9544  						continue
  9545  					}
  9546  					v_1_1_0_1 := v_1_1_0.Args[1]
  9547  					if v_1_1_0_1.Op != OpConst64 {
  9548  						continue
  9549  					}
  9550  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  9551  					v_1_1_1 := v_1_1.Args[1]
  9552  					if v_1_1_1.Op != OpRsh32x64 {
  9553  						continue
  9554  					}
  9555  					_ = v_1_1_1.Args[1]
  9556  					v_1_1_1_0 := v_1_1_1.Args[0]
  9557  					if v_1_1_1_0.Op != OpSignExt8to32 || x != v_1_1_1_0.Args[0] {
  9558  						continue
  9559  					}
  9560  					v_1_1_1_1 := v_1_1_1.Args[1]
  9561  					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic8(c).m) && s == 8+smagic8(c).s && x.Op != OpConst8 && sdivisibleOK8(c)) {
  9562  						continue
  9563  					}
  9564  					v.reset(OpLeq8U)
  9565  					v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
  9566  					v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
  9567  					v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
  9568  					v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9569  					v3.AuxInt = int8ToAuxInt(int8(sdivisible8(c).m))
  9570  					v2.AddArg2(v3, x)
  9571  					v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9572  					v4.AuxInt = int8ToAuxInt(int8(sdivisible8(c).a))
  9573  					v1.AddArg2(v2, v4)
  9574  					v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9575  					v5.AuxInt = int8ToAuxInt(int8(8 - sdivisible8(c).k))
  9576  					v0.AddArg2(v1, v5)
  9577  					v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9578  					v6.AuxInt = int8ToAuxInt(int8(sdivisible8(c).max))
  9579  					v.AddArg2(v0, v6)
  9580  					return true
  9581  				}
  9582  			}
  9583  		}
  9584  		break
  9585  	}
  9586  	// match: (Eq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
  9587  	// cond: k > 0 && k < 7 && kbar == 8 - k
  9588  	// result: (Eq8 (And8 <t> n (Const8 <t> [1<<uint(k)-1])) (Const8 <t> [0]))
  9589  	for {
  9590  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9591  			n := v_0
  9592  			if v_1.Op != OpLsh8x64 {
  9593  				continue
  9594  			}
  9595  			_ = v_1.Args[1]
  9596  			v_1_0 := v_1.Args[0]
  9597  			if v_1_0.Op != OpRsh8x64 {
  9598  				continue
  9599  			}
  9600  			_ = v_1_0.Args[1]
  9601  			v_1_0_0 := v_1_0.Args[0]
  9602  			if v_1_0_0.Op != OpAdd8 {
  9603  				continue
  9604  			}
  9605  			t := v_1_0_0.Type
  9606  			_ = v_1_0_0.Args[1]
  9607  			v_1_0_0_0 := v_1_0_0.Args[0]
  9608  			v_1_0_0_1 := v_1_0_0.Args[1]
  9609  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
  9610  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh8Ux64 || v_1_0_0_1.Type != t {
  9611  					continue
  9612  				}
  9613  				_ = v_1_0_0_1.Args[1]
  9614  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
  9615  				if v_1_0_0_1_0.Op != OpRsh8x64 || v_1_0_0_1_0.Type != t {
  9616  					continue
  9617  				}
  9618  				_ = v_1_0_0_1_0.Args[1]
  9619  				if n != v_1_0_0_1_0.Args[0] {
  9620  					continue
  9621  				}
  9622  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
  9623  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 7 {
  9624  					continue
  9625  				}
  9626  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
  9627  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
  9628  					continue
  9629  				}
  9630  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
  9631  				v_1_0_1 := v_1_0.Args[1]
  9632  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
  9633  					continue
  9634  				}
  9635  				k := auxIntToInt64(v_1_0_1.AuxInt)
  9636  				v_1_1 := v_1.Args[1]
  9637  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 7 && kbar == 8-k) {
  9638  					continue
  9639  				}
  9640  				v.reset(OpEq8)
  9641  				v0 := b.NewValue0(v.Pos, OpAnd8, t)
  9642  				v1 := b.NewValue0(v.Pos, OpConst8, t)
  9643  				v1.AuxInt = int8ToAuxInt(1<<uint(k) - 1)
  9644  				v0.AddArg2(n, v1)
  9645  				v2 := b.NewValue0(v.Pos, OpConst8, t)
  9646  				v2.AuxInt = int8ToAuxInt(0)
  9647  				v.AddArg2(v0, v2)
  9648  				return true
  9649  			}
  9650  		}
  9651  		break
  9652  	}
  9653  	// match: (Eq8 s:(Sub8 x y) (Const8 [0]))
  9654  	// cond: s.Uses == 1
  9655  	// result: (Eq8 x y)
  9656  	for {
  9657  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9658  			s := v_0
  9659  			if s.Op != OpSub8 {
  9660  				continue
  9661  			}
  9662  			y := s.Args[1]
  9663  			x := s.Args[0]
  9664  			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(s.Uses == 1) {
  9665  				continue
  9666  			}
  9667  			v.reset(OpEq8)
  9668  			v.AddArg2(x, y)
  9669  			return true
  9670  		}
  9671  		break
  9672  	}
  9673  	// match: (Eq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [y]))
  9674  	// cond: oneBit8(y)
  9675  	// result: (Neq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [0]))
  9676  	for {
  9677  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9678  			if v_0.Op != OpAnd8 {
  9679  				continue
  9680  			}
  9681  			t := v_0.Type
  9682  			_ = v_0.Args[1]
  9683  			v_0_0 := v_0.Args[0]
  9684  			v_0_1 := v_0.Args[1]
  9685  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  9686  				x := v_0_0
  9687  				if v_0_1.Op != OpConst8 || v_0_1.Type != t {
  9688  					continue
  9689  				}
  9690  				y := auxIntToInt8(v_0_1.AuxInt)
  9691  				if v_1.Op != OpConst8 || v_1.Type != t || auxIntToInt8(v_1.AuxInt) != y || !(oneBit8(y)) {
  9692  					continue
  9693  				}
  9694  				v.reset(OpNeq8)
  9695  				v0 := b.NewValue0(v.Pos, OpAnd8, t)
  9696  				v1 := b.NewValue0(v.Pos, OpConst8, t)
  9697  				v1.AuxInt = int8ToAuxInt(y)
  9698  				v0.AddArg2(x, v1)
  9699  				v2 := b.NewValue0(v.Pos, OpConst8, t)
  9700  				v2.AuxInt = int8ToAuxInt(0)
  9701  				v.AddArg2(v0, v2)
  9702  				return true
  9703  			}
  9704  		}
  9705  		break
  9706  	}
  9707  	return false
  9708  }
  9709  func rewriteValuegeneric_OpEqB(v *Value) bool {
  9710  	v_1 := v.Args[1]
  9711  	v_0 := v.Args[0]
  9712  	// match: (EqB (ConstBool [c]) (ConstBool [d]))
  9713  	// result: (ConstBool [c == d])
  9714  	for {
  9715  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9716  			if v_0.Op != OpConstBool {
  9717  				continue
  9718  			}
  9719  			c := auxIntToBool(v_0.AuxInt)
  9720  			if v_1.Op != OpConstBool {
  9721  				continue
  9722  			}
  9723  			d := auxIntToBool(v_1.AuxInt)
  9724  			v.reset(OpConstBool)
  9725  			v.AuxInt = boolToAuxInt(c == d)
  9726  			return true
  9727  		}
  9728  		break
  9729  	}
  9730  	// match: (EqB (ConstBool [false]) x)
  9731  	// result: (Not x)
  9732  	for {
  9733  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9734  			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
  9735  				continue
  9736  			}
  9737  			x := v_1
  9738  			v.reset(OpNot)
  9739  			v.AddArg(x)
  9740  			return true
  9741  		}
  9742  		break
  9743  	}
  9744  	// match: (EqB (ConstBool [true]) x)
  9745  	// result: x
  9746  	for {
  9747  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9748  			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
  9749  				continue
  9750  			}
  9751  			x := v_1
  9752  			v.copyOf(x)
  9753  			return true
  9754  		}
  9755  		break
  9756  	}
  9757  	return false
  9758  }
  9759  func rewriteValuegeneric_OpEqInter(v *Value) bool {
  9760  	v_1 := v.Args[1]
  9761  	v_0 := v.Args[0]
  9762  	b := v.Block
  9763  	typ := &b.Func.Config.Types
  9764  	// match: (EqInter x y)
  9765  	// result: (EqPtr (ITab x) (ITab y))
  9766  	for {
  9767  		x := v_0
  9768  		y := v_1
  9769  		v.reset(OpEqPtr)
  9770  		v0 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
  9771  		v0.AddArg(x)
  9772  		v1 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
  9773  		v1.AddArg(y)
  9774  		v.AddArg2(v0, v1)
  9775  		return true
  9776  	}
  9777  }
  9778  func rewriteValuegeneric_OpEqPtr(v *Value) bool {
  9779  	v_1 := v.Args[1]
  9780  	v_0 := v.Args[0]
  9781  	b := v.Block
  9782  	typ := &b.Func.Config.Types
  9783  	// match: (EqPtr x x)
  9784  	// result: (ConstBool [true])
  9785  	for {
  9786  		x := v_0
  9787  		if x != v_1 {
  9788  			break
  9789  		}
  9790  		v.reset(OpConstBool)
  9791  		v.AuxInt = boolToAuxInt(true)
  9792  		return true
  9793  	}
  9794  	// match: (EqPtr (Addr {x} _) (Addr {y} _))
  9795  	// result: (ConstBool [x == y])
  9796  	for {
  9797  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9798  			if v_0.Op != OpAddr {
  9799  				continue
  9800  			}
  9801  			x := auxToSym(v_0.Aux)
  9802  			if v_1.Op != OpAddr {
  9803  				continue
  9804  			}
  9805  			y := auxToSym(v_1.Aux)
  9806  			v.reset(OpConstBool)
  9807  			v.AuxInt = boolToAuxInt(x == y)
  9808  			return true
  9809  		}
  9810  		break
  9811  	}
  9812  	// match: (EqPtr (Addr {x} _) (OffPtr [o] (Addr {y} _)))
  9813  	// result: (ConstBool [x == y && o == 0])
  9814  	for {
  9815  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9816  			if v_0.Op != OpAddr {
  9817  				continue
  9818  			}
  9819  			x := auxToSym(v_0.Aux)
  9820  			if v_1.Op != OpOffPtr {
  9821  				continue
  9822  			}
  9823  			o := auxIntToInt64(v_1.AuxInt)
  9824  			v_1_0 := v_1.Args[0]
  9825  			if v_1_0.Op != OpAddr {
  9826  				continue
  9827  			}
  9828  			y := auxToSym(v_1_0.Aux)
  9829  			v.reset(OpConstBool)
  9830  			v.AuxInt = boolToAuxInt(x == y && o == 0)
  9831  			return true
  9832  		}
  9833  		break
  9834  	}
  9835  	// match: (EqPtr (OffPtr [o1] (Addr {x} _)) (OffPtr [o2] (Addr {y} _)))
  9836  	// result: (ConstBool [x == y && o1 == o2])
  9837  	for {
  9838  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9839  			if v_0.Op != OpOffPtr {
  9840  				continue
  9841  			}
  9842  			o1 := auxIntToInt64(v_0.AuxInt)
  9843  			v_0_0 := v_0.Args[0]
  9844  			if v_0_0.Op != OpAddr {
  9845  				continue
  9846  			}
  9847  			x := auxToSym(v_0_0.Aux)
  9848  			if v_1.Op != OpOffPtr {
  9849  				continue
  9850  			}
  9851  			o2 := auxIntToInt64(v_1.AuxInt)
  9852  			v_1_0 := v_1.Args[0]
  9853  			if v_1_0.Op != OpAddr {
  9854  				continue
  9855  			}
  9856  			y := auxToSym(v_1_0.Aux)
  9857  			v.reset(OpConstBool)
  9858  			v.AuxInt = boolToAuxInt(x == y && o1 == o2)
  9859  			return true
  9860  		}
  9861  		break
  9862  	}
  9863  	// match: (EqPtr (LocalAddr {x} _ _) (LocalAddr {y} _ _))
  9864  	// result: (ConstBool [x == y])
  9865  	for {
  9866  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9867  			if v_0.Op != OpLocalAddr {
  9868  				continue
  9869  			}
  9870  			x := auxToSym(v_0.Aux)
  9871  			if v_1.Op != OpLocalAddr {
  9872  				continue
  9873  			}
  9874  			y := auxToSym(v_1.Aux)
  9875  			v.reset(OpConstBool)
  9876  			v.AuxInt = boolToAuxInt(x == y)
  9877  			return true
  9878  		}
  9879  		break
  9880  	}
  9881  	// match: (EqPtr (LocalAddr {x} _ _) (OffPtr [o] (LocalAddr {y} _ _)))
  9882  	// result: (ConstBool [x == y && o == 0])
  9883  	for {
  9884  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9885  			if v_0.Op != OpLocalAddr {
  9886  				continue
  9887  			}
  9888  			x := auxToSym(v_0.Aux)
  9889  			if v_1.Op != OpOffPtr {
  9890  				continue
  9891  			}
  9892  			o := auxIntToInt64(v_1.AuxInt)
  9893  			v_1_0 := v_1.Args[0]
  9894  			if v_1_0.Op != OpLocalAddr {
  9895  				continue
  9896  			}
  9897  			y := auxToSym(v_1_0.Aux)
  9898  			v.reset(OpConstBool)
  9899  			v.AuxInt = boolToAuxInt(x == y && o == 0)
  9900  			return true
  9901  		}
  9902  		break
  9903  	}
  9904  	// match: (EqPtr (OffPtr [o1] (LocalAddr {x} _ _)) (OffPtr [o2] (LocalAddr {y} _ _)))
  9905  	// result: (ConstBool [x == y && o1 == o2])
  9906  	for {
  9907  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9908  			if v_0.Op != OpOffPtr {
  9909  				continue
  9910  			}
  9911  			o1 := auxIntToInt64(v_0.AuxInt)
  9912  			v_0_0 := v_0.Args[0]
  9913  			if v_0_0.Op != OpLocalAddr {
  9914  				continue
  9915  			}
  9916  			x := auxToSym(v_0_0.Aux)
  9917  			if v_1.Op != OpOffPtr {
  9918  				continue
  9919  			}
  9920  			o2 := auxIntToInt64(v_1.AuxInt)
  9921  			v_1_0 := v_1.Args[0]
  9922  			if v_1_0.Op != OpLocalAddr {
  9923  				continue
  9924  			}
  9925  			y := auxToSym(v_1_0.Aux)
  9926  			v.reset(OpConstBool)
  9927  			v.AuxInt = boolToAuxInt(x == y && o1 == o2)
  9928  			return true
  9929  		}
  9930  		break
  9931  	}
  9932  	// match: (EqPtr (OffPtr [o1] p1) p2)
  9933  	// cond: isSamePtr(p1, p2)
  9934  	// result: (ConstBool [o1 == 0])
  9935  	for {
  9936  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9937  			if v_0.Op != OpOffPtr {
  9938  				continue
  9939  			}
  9940  			o1 := auxIntToInt64(v_0.AuxInt)
  9941  			p1 := v_0.Args[0]
  9942  			p2 := v_1
  9943  			if !(isSamePtr(p1, p2)) {
  9944  				continue
  9945  			}
  9946  			v.reset(OpConstBool)
  9947  			v.AuxInt = boolToAuxInt(o1 == 0)
  9948  			return true
  9949  		}
  9950  		break
  9951  	}
  9952  	// match: (EqPtr (OffPtr [o1] p1) (OffPtr [o2] p2))
  9953  	// cond: isSamePtr(p1, p2)
  9954  	// result: (ConstBool [o1 == o2])
  9955  	for {
  9956  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9957  			if v_0.Op != OpOffPtr {
  9958  				continue
  9959  			}
  9960  			o1 := auxIntToInt64(v_0.AuxInt)
  9961  			p1 := v_0.Args[0]
  9962  			if v_1.Op != OpOffPtr {
  9963  				continue
  9964  			}
  9965  			o2 := auxIntToInt64(v_1.AuxInt)
  9966  			p2 := v_1.Args[0]
  9967  			if !(isSamePtr(p1, p2)) {
  9968  				continue
  9969  			}
  9970  			v.reset(OpConstBool)
  9971  			v.AuxInt = boolToAuxInt(o1 == o2)
  9972  			return true
  9973  		}
  9974  		break
  9975  	}
  9976  	// match: (EqPtr (Const32 [c]) (Const32 [d]))
  9977  	// result: (ConstBool [c == d])
  9978  	for {
  9979  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9980  			if v_0.Op != OpConst32 {
  9981  				continue
  9982  			}
  9983  			c := auxIntToInt32(v_0.AuxInt)
  9984  			if v_1.Op != OpConst32 {
  9985  				continue
  9986  			}
  9987  			d := auxIntToInt32(v_1.AuxInt)
  9988  			v.reset(OpConstBool)
  9989  			v.AuxInt = boolToAuxInt(c == d)
  9990  			return true
  9991  		}
  9992  		break
  9993  	}
  9994  	// match: (EqPtr (Const64 [c]) (Const64 [d]))
  9995  	// result: (ConstBool [c == d])
  9996  	for {
  9997  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9998  			if v_0.Op != OpConst64 {
  9999  				continue
 10000  			}
 10001  			c := auxIntToInt64(v_0.AuxInt)
 10002  			if v_1.Op != OpConst64 {
 10003  				continue
 10004  			}
 10005  			d := auxIntToInt64(v_1.AuxInt)
 10006  			v.reset(OpConstBool)
 10007  			v.AuxInt = boolToAuxInt(c == d)
 10008  			return true
 10009  		}
 10010  		break
 10011  	}
 10012  	// match: (EqPtr (LocalAddr _ _) (Addr _))
 10013  	// result: (ConstBool [false])
 10014  	for {
 10015  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10016  			if v_0.Op != OpLocalAddr || v_1.Op != OpAddr {
 10017  				continue
 10018  			}
 10019  			v.reset(OpConstBool)
 10020  			v.AuxInt = boolToAuxInt(false)
 10021  			return true
 10022  		}
 10023  		break
 10024  	}
 10025  	// match: (EqPtr (OffPtr (LocalAddr _ _)) (Addr _))
 10026  	// result: (ConstBool [false])
 10027  	for {
 10028  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10029  			if v_0.Op != OpOffPtr {
 10030  				continue
 10031  			}
 10032  			v_0_0 := v_0.Args[0]
 10033  			if v_0_0.Op != OpLocalAddr || v_1.Op != OpAddr {
 10034  				continue
 10035  			}
 10036  			v.reset(OpConstBool)
 10037  			v.AuxInt = boolToAuxInt(false)
 10038  			return true
 10039  		}
 10040  		break
 10041  	}
 10042  	// match: (EqPtr (LocalAddr _ _) (OffPtr (Addr _)))
 10043  	// result: (ConstBool [false])
 10044  	for {
 10045  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10046  			if v_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
 10047  				continue
 10048  			}
 10049  			v_1_0 := v_1.Args[0]
 10050  			if v_1_0.Op != OpAddr {
 10051  				continue
 10052  			}
 10053  			v.reset(OpConstBool)
 10054  			v.AuxInt = boolToAuxInt(false)
 10055  			return true
 10056  		}
 10057  		break
 10058  	}
 10059  	// match: (EqPtr (OffPtr (LocalAddr _ _)) (OffPtr (Addr _)))
 10060  	// result: (ConstBool [false])
 10061  	for {
 10062  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10063  			if v_0.Op != OpOffPtr {
 10064  				continue
 10065  			}
 10066  			v_0_0 := v_0.Args[0]
 10067  			if v_0_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
 10068  				continue
 10069  			}
 10070  			v_1_0 := v_1.Args[0]
 10071  			if v_1_0.Op != OpAddr {
 10072  				continue
 10073  			}
 10074  			v.reset(OpConstBool)
 10075  			v.AuxInt = boolToAuxInt(false)
 10076  			return true
 10077  		}
 10078  		break
 10079  	}
 10080  	// match: (EqPtr (AddPtr p1 o1) p2)
 10081  	// cond: isSamePtr(p1, p2)
 10082  	// result: (Not (IsNonNil o1))
 10083  	for {
 10084  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10085  			if v_0.Op != OpAddPtr {
 10086  				continue
 10087  			}
 10088  			o1 := v_0.Args[1]
 10089  			p1 := v_0.Args[0]
 10090  			p2 := v_1
 10091  			if !(isSamePtr(p1, p2)) {
 10092  				continue
 10093  			}
 10094  			v.reset(OpNot)
 10095  			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
 10096  			v0.AddArg(o1)
 10097  			v.AddArg(v0)
 10098  			return true
 10099  		}
 10100  		break
 10101  	}
 10102  	// match: (EqPtr (Const32 [0]) p)
 10103  	// result: (Not (IsNonNil p))
 10104  	for {
 10105  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10106  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 10107  				continue
 10108  			}
 10109  			p := v_1
 10110  			v.reset(OpNot)
 10111  			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
 10112  			v0.AddArg(p)
 10113  			v.AddArg(v0)
 10114  			return true
 10115  		}
 10116  		break
 10117  	}
 10118  	// match: (EqPtr (Const64 [0]) p)
 10119  	// result: (Not (IsNonNil p))
 10120  	for {
 10121  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10122  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 10123  				continue
 10124  			}
 10125  			p := v_1
 10126  			v.reset(OpNot)
 10127  			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
 10128  			v0.AddArg(p)
 10129  			v.AddArg(v0)
 10130  			return true
 10131  		}
 10132  		break
 10133  	}
 10134  	// match: (EqPtr (ConstNil) p)
 10135  	// result: (Not (IsNonNil p))
 10136  	for {
 10137  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10138  			if v_0.Op != OpConstNil {
 10139  				continue
 10140  			}
 10141  			p := v_1
 10142  			v.reset(OpNot)
 10143  			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
 10144  			v0.AddArg(p)
 10145  			v.AddArg(v0)
 10146  			return true
 10147  		}
 10148  		break
 10149  	}
 10150  	return false
 10151  }
 10152  func rewriteValuegeneric_OpEqSlice(v *Value) bool {
 10153  	v_1 := v.Args[1]
 10154  	v_0 := v.Args[0]
 10155  	b := v.Block
 10156  	typ := &b.Func.Config.Types
 10157  	// match: (EqSlice x y)
 10158  	// result: (EqPtr (SlicePtr x) (SlicePtr y))
 10159  	for {
 10160  		x := v_0
 10161  		y := v_1
 10162  		v.reset(OpEqPtr)
 10163  		v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
 10164  		v0.AddArg(x)
 10165  		v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
 10166  		v1.AddArg(y)
 10167  		v.AddArg2(v0, v1)
 10168  		return true
 10169  	}
 10170  }
 10171  func rewriteValuegeneric_OpFloor(v *Value) bool {
 10172  	v_0 := v.Args[0]
 10173  	// match: (Floor (Const64F [c]))
 10174  	// result: (Const64F [math.Floor(c)])
 10175  	for {
 10176  		if v_0.Op != OpConst64F {
 10177  			break
 10178  		}
 10179  		c := auxIntToFloat64(v_0.AuxInt)
 10180  		v.reset(OpConst64F)
 10181  		v.AuxInt = float64ToAuxInt(math.Floor(c))
 10182  		return true
 10183  	}
 10184  	return false
 10185  }
 10186  func rewriteValuegeneric_OpIMake(v *Value) bool {
 10187  	v_1 := v.Args[1]
 10188  	v_0 := v.Args[0]
 10189  	// match: (IMake _typ (StructMake1 val))
 10190  	// result: (IMake _typ val)
 10191  	for {
 10192  		_typ := v_0
 10193  		if v_1.Op != OpStructMake1 {
 10194  			break
 10195  		}
 10196  		val := v_1.Args[0]
 10197  		v.reset(OpIMake)
 10198  		v.AddArg2(_typ, val)
 10199  		return true
 10200  	}
 10201  	// match: (IMake _typ (ArrayMake1 val))
 10202  	// result: (IMake _typ val)
 10203  	for {
 10204  		_typ := v_0
 10205  		if v_1.Op != OpArrayMake1 {
 10206  			break
 10207  		}
 10208  		val := v_1.Args[0]
 10209  		v.reset(OpIMake)
 10210  		v.AddArg2(_typ, val)
 10211  		return true
 10212  	}
 10213  	return false
 10214  }
 10215  func rewriteValuegeneric_OpInterLECall(v *Value) bool {
 10216  	// match: (InterLECall [argsize] {auxCall} (Load (OffPtr [off] (ITab (IMake (Addr {itab} (SB)) _))) _) ___)
 10217  	// cond: devirtLESym(v, auxCall, itab, off) != nil
 10218  	// result: devirtLECall(v, devirtLESym(v, auxCall, itab, off))
 10219  	for {
 10220  		if len(v.Args) < 1 {
 10221  			break
 10222  		}
 10223  		auxCall := auxToCall(v.Aux)
 10224  		v_0 := v.Args[0]
 10225  		if v_0.Op != OpLoad {
 10226  			break
 10227  		}
 10228  		v_0_0 := v_0.Args[0]
 10229  		if v_0_0.Op != OpOffPtr {
 10230  			break
 10231  		}
 10232  		off := auxIntToInt64(v_0_0.AuxInt)
 10233  		v_0_0_0 := v_0_0.Args[0]
 10234  		if v_0_0_0.Op != OpITab {
 10235  			break
 10236  		}
 10237  		v_0_0_0_0 := v_0_0_0.Args[0]
 10238  		if v_0_0_0_0.Op != OpIMake {
 10239  			break
 10240  		}
 10241  		v_0_0_0_0_0 := v_0_0_0_0.Args[0]
 10242  		if v_0_0_0_0_0.Op != OpAddr {
 10243  			break
 10244  		}
 10245  		itab := auxToSym(v_0_0_0_0_0.Aux)
 10246  		v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
 10247  		if v_0_0_0_0_0_0.Op != OpSB || !(devirtLESym(v, auxCall, itab, off) != nil) {
 10248  			break
 10249  		}
 10250  		v.copyOf(devirtLECall(v, devirtLESym(v, auxCall, itab, off)))
 10251  		return true
 10252  	}
 10253  	return false
 10254  }
 10255  func rewriteValuegeneric_OpIsInBounds(v *Value) bool {
 10256  	v_1 := v.Args[1]
 10257  	v_0 := v.Args[0]
 10258  	// match: (IsInBounds (ZeroExt8to32 _) (Const32 [c]))
 10259  	// cond: (1 << 8) <= c
 10260  	// result: (ConstBool [true])
 10261  	for {
 10262  		if v_0.Op != OpZeroExt8to32 || v_1.Op != OpConst32 {
 10263  			break
 10264  		}
 10265  		c := auxIntToInt32(v_1.AuxInt)
 10266  		if !((1 << 8) <= c) {
 10267  			break
 10268  		}
 10269  		v.reset(OpConstBool)
 10270  		v.AuxInt = boolToAuxInt(true)
 10271  		return true
 10272  	}
 10273  	// match: (IsInBounds (ZeroExt8to64 _) (Const64 [c]))
 10274  	// cond: (1 << 8) <= c
 10275  	// result: (ConstBool [true])
 10276  	for {
 10277  		if v_0.Op != OpZeroExt8to64 || v_1.Op != OpConst64 {
 10278  			break
 10279  		}
 10280  		c := auxIntToInt64(v_1.AuxInt)
 10281  		if !((1 << 8) <= c) {
 10282  			break
 10283  		}
 10284  		v.reset(OpConstBool)
 10285  		v.AuxInt = boolToAuxInt(true)
 10286  		return true
 10287  	}
 10288  	// match: (IsInBounds (ZeroExt16to32 _) (Const32 [c]))
 10289  	// cond: (1 << 16) <= c
 10290  	// result: (ConstBool [true])
 10291  	for {
 10292  		if v_0.Op != OpZeroExt16to32 || v_1.Op != OpConst32 {
 10293  			break
 10294  		}
 10295  		c := auxIntToInt32(v_1.AuxInt)
 10296  		if !((1 << 16) <= c) {
 10297  			break
 10298  		}
 10299  		v.reset(OpConstBool)
 10300  		v.AuxInt = boolToAuxInt(true)
 10301  		return true
 10302  	}
 10303  	// match: (IsInBounds (ZeroExt16to64 _) (Const64 [c]))
 10304  	// cond: (1 << 16) <= c
 10305  	// result: (ConstBool [true])
 10306  	for {
 10307  		if v_0.Op != OpZeroExt16to64 || v_1.Op != OpConst64 {
 10308  			break
 10309  		}
 10310  		c := auxIntToInt64(v_1.AuxInt)
 10311  		if !((1 << 16) <= c) {
 10312  			break
 10313  		}
 10314  		v.reset(OpConstBool)
 10315  		v.AuxInt = boolToAuxInt(true)
 10316  		return true
 10317  	}
 10318  	// match: (IsInBounds x x)
 10319  	// result: (ConstBool [false])
 10320  	for {
 10321  		x := v_0
 10322  		if x != v_1 {
 10323  			break
 10324  		}
 10325  		v.reset(OpConstBool)
 10326  		v.AuxInt = boolToAuxInt(false)
 10327  		return true
 10328  	}
 10329  	// match: (IsInBounds (And8 (Const8 [c]) _) (Const8 [d]))
 10330  	// cond: 0 <= c && c < d
 10331  	// result: (ConstBool [true])
 10332  	for {
 10333  		if v_0.Op != OpAnd8 {
 10334  			break
 10335  		}
 10336  		v_0_0 := v_0.Args[0]
 10337  		v_0_1 := v_0.Args[1]
 10338  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10339  			if v_0_0.Op != OpConst8 {
 10340  				continue
 10341  			}
 10342  			c := auxIntToInt8(v_0_0.AuxInt)
 10343  			if v_1.Op != OpConst8 {
 10344  				continue
 10345  			}
 10346  			d := auxIntToInt8(v_1.AuxInt)
 10347  			if !(0 <= c && c < d) {
 10348  				continue
 10349  			}
 10350  			v.reset(OpConstBool)
 10351  			v.AuxInt = boolToAuxInt(true)
 10352  			return true
 10353  		}
 10354  		break
 10355  	}
 10356  	// match: (IsInBounds (ZeroExt8to16 (And8 (Const8 [c]) _)) (Const16 [d]))
 10357  	// cond: 0 <= c && int16(c) < d
 10358  	// result: (ConstBool [true])
 10359  	for {
 10360  		if v_0.Op != OpZeroExt8to16 {
 10361  			break
 10362  		}
 10363  		v_0_0 := v_0.Args[0]
 10364  		if v_0_0.Op != OpAnd8 {
 10365  			break
 10366  		}
 10367  		v_0_0_0 := v_0_0.Args[0]
 10368  		v_0_0_1 := v_0_0.Args[1]
 10369  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10370  			if v_0_0_0.Op != OpConst8 {
 10371  				continue
 10372  			}
 10373  			c := auxIntToInt8(v_0_0_0.AuxInt)
 10374  			if v_1.Op != OpConst16 {
 10375  				continue
 10376  			}
 10377  			d := auxIntToInt16(v_1.AuxInt)
 10378  			if !(0 <= c && int16(c) < d) {
 10379  				continue
 10380  			}
 10381  			v.reset(OpConstBool)
 10382  			v.AuxInt = boolToAuxInt(true)
 10383  			return true
 10384  		}
 10385  		break
 10386  	}
 10387  	// match: (IsInBounds (ZeroExt8to32 (And8 (Const8 [c]) _)) (Const32 [d]))
 10388  	// cond: 0 <= c && int32(c) < d
 10389  	// result: (ConstBool [true])
 10390  	for {
 10391  		if v_0.Op != OpZeroExt8to32 {
 10392  			break
 10393  		}
 10394  		v_0_0 := v_0.Args[0]
 10395  		if v_0_0.Op != OpAnd8 {
 10396  			break
 10397  		}
 10398  		v_0_0_0 := v_0_0.Args[0]
 10399  		v_0_0_1 := v_0_0.Args[1]
 10400  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10401  			if v_0_0_0.Op != OpConst8 {
 10402  				continue
 10403  			}
 10404  			c := auxIntToInt8(v_0_0_0.AuxInt)
 10405  			if v_1.Op != OpConst32 {
 10406  				continue
 10407  			}
 10408  			d := auxIntToInt32(v_1.AuxInt)
 10409  			if !(0 <= c && int32(c) < d) {
 10410  				continue
 10411  			}
 10412  			v.reset(OpConstBool)
 10413  			v.AuxInt = boolToAuxInt(true)
 10414  			return true
 10415  		}
 10416  		break
 10417  	}
 10418  	// match: (IsInBounds (ZeroExt8to64 (And8 (Const8 [c]) _)) (Const64 [d]))
 10419  	// cond: 0 <= c && int64(c) < d
 10420  	// result: (ConstBool [true])
 10421  	for {
 10422  		if v_0.Op != OpZeroExt8to64 {
 10423  			break
 10424  		}
 10425  		v_0_0 := v_0.Args[0]
 10426  		if v_0_0.Op != OpAnd8 {
 10427  			break
 10428  		}
 10429  		v_0_0_0 := v_0_0.Args[0]
 10430  		v_0_0_1 := v_0_0.Args[1]
 10431  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10432  			if v_0_0_0.Op != OpConst8 {
 10433  				continue
 10434  			}
 10435  			c := auxIntToInt8(v_0_0_0.AuxInt)
 10436  			if v_1.Op != OpConst64 {
 10437  				continue
 10438  			}
 10439  			d := auxIntToInt64(v_1.AuxInt)
 10440  			if !(0 <= c && int64(c) < d) {
 10441  				continue
 10442  			}
 10443  			v.reset(OpConstBool)
 10444  			v.AuxInt = boolToAuxInt(true)
 10445  			return true
 10446  		}
 10447  		break
 10448  	}
 10449  	// match: (IsInBounds (And16 (Const16 [c]) _) (Const16 [d]))
 10450  	// cond: 0 <= c && c < d
 10451  	// result: (ConstBool [true])
 10452  	for {
 10453  		if v_0.Op != OpAnd16 {
 10454  			break
 10455  		}
 10456  		v_0_0 := v_0.Args[0]
 10457  		v_0_1 := v_0.Args[1]
 10458  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10459  			if v_0_0.Op != OpConst16 {
 10460  				continue
 10461  			}
 10462  			c := auxIntToInt16(v_0_0.AuxInt)
 10463  			if v_1.Op != OpConst16 {
 10464  				continue
 10465  			}
 10466  			d := auxIntToInt16(v_1.AuxInt)
 10467  			if !(0 <= c && c < d) {
 10468  				continue
 10469  			}
 10470  			v.reset(OpConstBool)
 10471  			v.AuxInt = boolToAuxInt(true)
 10472  			return true
 10473  		}
 10474  		break
 10475  	}
 10476  	// match: (IsInBounds (ZeroExt16to32 (And16 (Const16 [c]) _)) (Const32 [d]))
 10477  	// cond: 0 <= c && int32(c) < d
 10478  	// result: (ConstBool [true])
 10479  	for {
 10480  		if v_0.Op != OpZeroExt16to32 {
 10481  			break
 10482  		}
 10483  		v_0_0 := v_0.Args[0]
 10484  		if v_0_0.Op != OpAnd16 {
 10485  			break
 10486  		}
 10487  		v_0_0_0 := v_0_0.Args[0]
 10488  		v_0_0_1 := v_0_0.Args[1]
 10489  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10490  			if v_0_0_0.Op != OpConst16 {
 10491  				continue
 10492  			}
 10493  			c := auxIntToInt16(v_0_0_0.AuxInt)
 10494  			if v_1.Op != OpConst32 {
 10495  				continue
 10496  			}
 10497  			d := auxIntToInt32(v_1.AuxInt)
 10498  			if !(0 <= c && int32(c) < d) {
 10499  				continue
 10500  			}
 10501  			v.reset(OpConstBool)
 10502  			v.AuxInt = boolToAuxInt(true)
 10503  			return true
 10504  		}
 10505  		break
 10506  	}
 10507  	// match: (IsInBounds (ZeroExt16to64 (And16 (Const16 [c]) _)) (Const64 [d]))
 10508  	// cond: 0 <= c && int64(c) < d
 10509  	// result: (ConstBool [true])
 10510  	for {
 10511  		if v_0.Op != OpZeroExt16to64 {
 10512  			break
 10513  		}
 10514  		v_0_0 := v_0.Args[0]
 10515  		if v_0_0.Op != OpAnd16 {
 10516  			break
 10517  		}
 10518  		v_0_0_0 := v_0_0.Args[0]
 10519  		v_0_0_1 := v_0_0.Args[1]
 10520  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10521  			if v_0_0_0.Op != OpConst16 {
 10522  				continue
 10523  			}
 10524  			c := auxIntToInt16(v_0_0_0.AuxInt)
 10525  			if v_1.Op != OpConst64 {
 10526  				continue
 10527  			}
 10528  			d := auxIntToInt64(v_1.AuxInt)
 10529  			if !(0 <= c && int64(c) < d) {
 10530  				continue
 10531  			}
 10532  			v.reset(OpConstBool)
 10533  			v.AuxInt = boolToAuxInt(true)
 10534  			return true
 10535  		}
 10536  		break
 10537  	}
 10538  	// match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d]))
 10539  	// cond: 0 <= c && c < d
 10540  	// result: (ConstBool [true])
 10541  	for {
 10542  		if v_0.Op != OpAnd32 {
 10543  			break
 10544  		}
 10545  		v_0_0 := v_0.Args[0]
 10546  		v_0_1 := v_0.Args[1]
 10547  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10548  			if v_0_0.Op != OpConst32 {
 10549  				continue
 10550  			}
 10551  			c := auxIntToInt32(v_0_0.AuxInt)
 10552  			if v_1.Op != OpConst32 {
 10553  				continue
 10554  			}
 10555  			d := auxIntToInt32(v_1.AuxInt)
 10556  			if !(0 <= c && c < d) {
 10557  				continue
 10558  			}
 10559  			v.reset(OpConstBool)
 10560  			v.AuxInt = boolToAuxInt(true)
 10561  			return true
 10562  		}
 10563  		break
 10564  	}
 10565  	// match: (IsInBounds (ZeroExt32to64 (And32 (Const32 [c]) _)) (Const64 [d]))
 10566  	// cond: 0 <= c && int64(c) < d
 10567  	// result: (ConstBool [true])
 10568  	for {
 10569  		if v_0.Op != OpZeroExt32to64 {
 10570  			break
 10571  		}
 10572  		v_0_0 := v_0.Args[0]
 10573  		if v_0_0.Op != OpAnd32 {
 10574  			break
 10575  		}
 10576  		v_0_0_0 := v_0_0.Args[0]
 10577  		v_0_0_1 := v_0_0.Args[1]
 10578  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10579  			if v_0_0_0.Op != OpConst32 {
 10580  				continue
 10581  			}
 10582  			c := auxIntToInt32(v_0_0_0.AuxInt)
 10583  			if v_1.Op != OpConst64 {
 10584  				continue
 10585  			}
 10586  			d := auxIntToInt64(v_1.AuxInt)
 10587  			if !(0 <= c && int64(c) < d) {
 10588  				continue
 10589  			}
 10590  			v.reset(OpConstBool)
 10591  			v.AuxInt = boolToAuxInt(true)
 10592  			return true
 10593  		}
 10594  		break
 10595  	}
 10596  	// match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d]))
 10597  	// cond: 0 <= c && c < d
 10598  	// result: (ConstBool [true])
 10599  	for {
 10600  		if v_0.Op != OpAnd64 {
 10601  			break
 10602  		}
 10603  		v_0_0 := v_0.Args[0]
 10604  		v_0_1 := v_0.Args[1]
 10605  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10606  			if v_0_0.Op != OpConst64 {
 10607  				continue
 10608  			}
 10609  			c := auxIntToInt64(v_0_0.AuxInt)
 10610  			if v_1.Op != OpConst64 {
 10611  				continue
 10612  			}
 10613  			d := auxIntToInt64(v_1.AuxInt)
 10614  			if !(0 <= c && c < d) {
 10615  				continue
 10616  			}
 10617  			v.reset(OpConstBool)
 10618  			v.AuxInt = boolToAuxInt(true)
 10619  			return true
 10620  		}
 10621  		break
 10622  	}
 10623  	// match: (IsInBounds (Const32 [c]) (Const32 [d]))
 10624  	// result: (ConstBool [0 <= c && c < d])
 10625  	for {
 10626  		if v_0.Op != OpConst32 {
 10627  			break
 10628  		}
 10629  		c := auxIntToInt32(v_0.AuxInt)
 10630  		if v_1.Op != OpConst32 {
 10631  			break
 10632  		}
 10633  		d := auxIntToInt32(v_1.AuxInt)
 10634  		v.reset(OpConstBool)
 10635  		v.AuxInt = boolToAuxInt(0 <= c && c < d)
 10636  		return true
 10637  	}
 10638  	// match: (IsInBounds (Const64 [c]) (Const64 [d]))
 10639  	// result: (ConstBool [0 <= c && c < d])
 10640  	for {
 10641  		if v_0.Op != OpConst64 {
 10642  			break
 10643  		}
 10644  		c := auxIntToInt64(v_0.AuxInt)
 10645  		if v_1.Op != OpConst64 {
 10646  			break
 10647  		}
 10648  		d := auxIntToInt64(v_1.AuxInt)
 10649  		v.reset(OpConstBool)
 10650  		v.AuxInt = boolToAuxInt(0 <= c && c < d)
 10651  		return true
 10652  	}
 10653  	// match: (IsInBounds (Mod32u _ y) y)
 10654  	// result: (ConstBool [true])
 10655  	for {
 10656  		if v_0.Op != OpMod32u {
 10657  			break
 10658  		}
 10659  		y := v_0.Args[1]
 10660  		if y != v_1 {
 10661  			break
 10662  		}
 10663  		v.reset(OpConstBool)
 10664  		v.AuxInt = boolToAuxInt(true)
 10665  		return true
 10666  	}
 10667  	// match: (IsInBounds (Mod64u _ y) y)
 10668  	// result: (ConstBool [true])
 10669  	for {
 10670  		if v_0.Op != OpMod64u {
 10671  			break
 10672  		}
 10673  		y := v_0.Args[1]
 10674  		if y != v_1 {
 10675  			break
 10676  		}
 10677  		v.reset(OpConstBool)
 10678  		v.AuxInt = boolToAuxInt(true)
 10679  		return true
 10680  	}
 10681  	// match: (IsInBounds (ZeroExt8to64 (Rsh8Ux64 _ (Const64 [c]))) (Const64 [d]))
 10682  	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
 10683  	// result: (ConstBool [true])
 10684  	for {
 10685  		if v_0.Op != OpZeroExt8to64 {
 10686  			break
 10687  		}
 10688  		v_0_0 := v_0.Args[0]
 10689  		if v_0_0.Op != OpRsh8Ux64 {
 10690  			break
 10691  		}
 10692  		_ = v_0_0.Args[1]
 10693  		v_0_0_1 := v_0_0.Args[1]
 10694  		if v_0_0_1.Op != OpConst64 {
 10695  			break
 10696  		}
 10697  		c := auxIntToInt64(v_0_0_1.AuxInt)
 10698  		if v_1.Op != OpConst64 {
 10699  			break
 10700  		}
 10701  		d := auxIntToInt64(v_1.AuxInt)
 10702  		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
 10703  			break
 10704  		}
 10705  		v.reset(OpConstBool)
 10706  		v.AuxInt = boolToAuxInt(true)
 10707  		return true
 10708  	}
 10709  	// match: (IsInBounds (ZeroExt8to32 (Rsh8Ux64 _ (Const64 [c]))) (Const32 [d]))
 10710  	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
 10711  	// result: (ConstBool [true])
 10712  	for {
 10713  		if v_0.Op != OpZeroExt8to32 {
 10714  			break
 10715  		}
 10716  		v_0_0 := v_0.Args[0]
 10717  		if v_0_0.Op != OpRsh8Ux64 {
 10718  			break
 10719  		}
 10720  		_ = v_0_0.Args[1]
 10721  		v_0_0_1 := v_0_0.Args[1]
 10722  		if v_0_0_1.Op != OpConst64 {
 10723  			break
 10724  		}
 10725  		c := auxIntToInt64(v_0_0_1.AuxInt)
 10726  		if v_1.Op != OpConst32 {
 10727  			break
 10728  		}
 10729  		d := auxIntToInt32(v_1.AuxInt)
 10730  		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
 10731  			break
 10732  		}
 10733  		v.reset(OpConstBool)
 10734  		v.AuxInt = boolToAuxInt(true)
 10735  		return true
 10736  	}
 10737  	// match: (IsInBounds (ZeroExt8to16 (Rsh8Ux64 _ (Const64 [c]))) (Const16 [d]))
 10738  	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
 10739  	// result: (ConstBool [true])
 10740  	for {
 10741  		if v_0.Op != OpZeroExt8to16 {
 10742  			break
 10743  		}
 10744  		v_0_0 := v_0.Args[0]
 10745  		if v_0_0.Op != OpRsh8Ux64 {
 10746  			break
 10747  		}
 10748  		_ = v_0_0.Args[1]
 10749  		v_0_0_1 := v_0_0.Args[1]
 10750  		if v_0_0_1.Op != OpConst64 {
 10751  			break
 10752  		}
 10753  		c := auxIntToInt64(v_0_0_1.AuxInt)
 10754  		if v_1.Op != OpConst16 {
 10755  			break
 10756  		}
 10757  		d := auxIntToInt16(v_1.AuxInt)
 10758  		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
 10759  			break
 10760  		}
 10761  		v.reset(OpConstBool)
 10762  		v.AuxInt = boolToAuxInt(true)
 10763  		return true
 10764  	}
 10765  	// match: (IsInBounds (Rsh8Ux64 _ (Const64 [c])) (Const64 [d]))
 10766  	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
 10767  	// result: (ConstBool [true])
 10768  	for {
 10769  		if v_0.Op != OpRsh8Ux64 {
 10770  			break
 10771  		}
 10772  		_ = v_0.Args[1]
 10773  		v_0_1 := v_0.Args[1]
 10774  		if v_0_1.Op != OpConst64 {
 10775  			break
 10776  		}
 10777  		c := auxIntToInt64(v_0_1.AuxInt)
 10778  		if v_1.Op != OpConst64 {
 10779  			break
 10780  		}
 10781  		d := auxIntToInt64(v_1.AuxInt)
 10782  		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
 10783  			break
 10784  		}
 10785  		v.reset(OpConstBool)
 10786  		v.AuxInt = boolToAuxInt(true)
 10787  		return true
 10788  	}
 10789  	// match: (IsInBounds (ZeroExt16to64 (Rsh16Ux64 _ (Const64 [c]))) (Const64 [d]))
 10790  	// cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
 10791  	// result: (ConstBool [true])
 10792  	for {
 10793  		if v_0.Op != OpZeroExt16to64 {
 10794  			break
 10795  		}
 10796  		v_0_0 := v_0.Args[0]
 10797  		if v_0_0.Op != OpRsh16Ux64 {
 10798  			break
 10799  		}
 10800  		_ = v_0_0.Args[1]
 10801  		v_0_0_1 := v_0_0.Args[1]
 10802  		if v_0_0_1.Op != OpConst64 {
 10803  			break
 10804  		}
 10805  		c := auxIntToInt64(v_0_0_1.AuxInt)
 10806  		if v_1.Op != OpConst64 {
 10807  			break
 10808  		}
 10809  		d := auxIntToInt64(v_1.AuxInt)
 10810  		if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
 10811  			break
 10812  		}
 10813  		v.reset(OpConstBool)
 10814  		v.AuxInt = boolToAuxInt(true)
 10815  		return true
 10816  	}
 10817  	// match: (IsInBounds (ZeroExt16to32 (Rsh16Ux64 _ (Const64 [c]))) (Const64 [d]))
 10818  	// cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
 10819  	// result: (ConstBool [true])
 10820  	for {
 10821  		if v_0.Op != OpZeroExt16to32 {
 10822  			break
 10823  		}
 10824  		v_0_0 := v_0.Args[0]
 10825  		if v_0_0.Op != OpRsh16Ux64 {
 10826  			break
 10827  		}
 10828  		_ = v_0_0.Args[1]
 10829  		v_0_0_1 := v_0_0.Args[1]
 10830  		if v_0_0_1.Op != OpConst64 {
 10831  			break
 10832  		}
 10833  		c := auxIntToInt64(v_0_0_1.AuxInt)
 10834  		if v_1.Op != OpConst64 {
 10835  			break
 10836  		}
 10837  		d := auxIntToInt64(v_1.AuxInt)
 10838  		if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
 10839  			break
 10840  		}
 10841  		v.reset(OpConstBool)
 10842  		v.AuxInt = boolToAuxInt(true)
 10843  		return true
 10844  	}
 10845  	// match: (IsInBounds (Rsh16Ux64 _ (Const64 [c])) (Const64 [d]))
 10846  	// cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
 10847  	// result: (ConstBool [true])
 10848  	for {
 10849  		if v_0.Op != OpRsh16Ux64 {
 10850  			break
 10851  		}
 10852  		_ = v_0.Args[1]
 10853  		v_0_1 := v_0.Args[1]
 10854  		if v_0_1.Op != OpConst64 {
 10855  			break
 10856  		}
 10857  		c := auxIntToInt64(v_0_1.AuxInt)
 10858  		if v_1.Op != OpConst64 {
 10859  			break
 10860  		}
 10861  		d := auxIntToInt64(v_1.AuxInt)
 10862  		if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
 10863  			break
 10864  		}
 10865  		v.reset(OpConstBool)
 10866  		v.AuxInt = boolToAuxInt(true)
 10867  		return true
 10868  	}
 10869  	// match: (IsInBounds (ZeroExt32to64 (Rsh32Ux64 _ (Const64 [c]))) (Const64 [d]))
 10870  	// cond: 0 < c && c < 32 && 1<<uint(32-c)-1 < d
 10871  	// result: (ConstBool [true])
 10872  	for {
 10873  		if v_0.Op != OpZeroExt32to64 {
 10874  			break
 10875  		}
 10876  		v_0_0 := v_0.Args[0]
 10877  		if v_0_0.Op != OpRsh32Ux64 {
 10878  			break
 10879  		}
 10880  		_ = v_0_0.Args[1]
 10881  		v_0_0_1 := v_0_0.Args[1]
 10882  		if v_0_0_1.Op != OpConst64 {
 10883  			break
 10884  		}
 10885  		c := auxIntToInt64(v_0_0_1.AuxInt)
 10886  		if v_1.Op != OpConst64 {
 10887  			break
 10888  		}
 10889  		d := auxIntToInt64(v_1.AuxInt)
 10890  		if !(0 < c && c < 32 && 1<<uint(32-c)-1 < d) {
 10891  			break
 10892  		}
 10893  		v.reset(OpConstBool)
 10894  		v.AuxInt = boolToAuxInt(true)
 10895  		return true
 10896  	}
 10897  	// match: (IsInBounds (Rsh32Ux64 _ (Const64 [c])) (Const64 [d]))
 10898  	// cond: 0 < c && c < 32 && 1<<uint(32-c)-1 < d
 10899  	// result: (ConstBool [true])
 10900  	for {
 10901  		if v_0.Op != OpRsh32Ux64 {
 10902  			break
 10903  		}
 10904  		_ = v_0.Args[1]
 10905  		v_0_1 := v_0.Args[1]
 10906  		if v_0_1.Op != OpConst64 {
 10907  			break
 10908  		}
 10909  		c := auxIntToInt64(v_0_1.AuxInt)
 10910  		if v_1.Op != OpConst64 {
 10911  			break
 10912  		}
 10913  		d := auxIntToInt64(v_1.AuxInt)
 10914  		if !(0 < c && c < 32 && 1<<uint(32-c)-1 < d) {
 10915  			break
 10916  		}
 10917  		v.reset(OpConstBool)
 10918  		v.AuxInt = boolToAuxInt(true)
 10919  		return true
 10920  	}
 10921  	// match: (IsInBounds (Rsh64Ux64 _ (Const64 [c])) (Const64 [d]))
 10922  	// cond: 0 < c && c < 64 && 1<<uint(64-c)-1 < d
 10923  	// result: (ConstBool [true])
 10924  	for {
 10925  		if v_0.Op != OpRsh64Ux64 {
 10926  			break
 10927  		}
 10928  		_ = v_0.Args[1]
 10929  		v_0_1 := v_0.Args[1]
 10930  		if v_0_1.Op != OpConst64 {
 10931  			break
 10932  		}
 10933  		c := auxIntToInt64(v_0_1.AuxInt)
 10934  		if v_1.Op != OpConst64 {
 10935  			break
 10936  		}
 10937  		d := auxIntToInt64(v_1.AuxInt)
 10938  		if !(0 < c && c < 64 && 1<<uint(64-c)-1 < d) {
 10939  			break
 10940  		}
 10941  		v.reset(OpConstBool)
 10942  		v.AuxInt = boolToAuxInt(true)
 10943  		return true
 10944  	}
 10945  	return false
 10946  }
 10947  func rewriteValuegeneric_OpIsNonNil(v *Value) bool {
 10948  	v_0 := v.Args[0]
 10949  	// match: (IsNonNil (ConstNil))
 10950  	// result: (ConstBool [false])
 10951  	for {
 10952  		if v_0.Op != OpConstNil {
 10953  			break
 10954  		}
 10955  		v.reset(OpConstBool)
 10956  		v.AuxInt = boolToAuxInt(false)
 10957  		return true
 10958  	}
 10959  	// match: (IsNonNil (Const32 [c]))
 10960  	// result: (ConstBool [c != 0])
 10961  	for {
 10962  		if v_0.Op != OpConst32 {
 10963  			break
 10964  		}
 10965  		c := auxIntToInt32(v_0.AuxInt)
 10966  		v.reset(OpConstBool)
 10967  		v.AuxInt = boolToAuxInt(c != 0)
 10968  		return true
 10969  	}
 10970  	// match: (IsNonNil (Const64 [c]))
 10971  	// result: (ConstBool [c != 0])
 10972  	for {
 10973  		if v_0.Op != OpConst64 {
 10974  			break
 10975  		}
 10976  		c := auxIntToInt64(v_0.AuxInt)
 10977  		v.reset(OpConstBool)
 10978  		v.AuxInt = boolToAuxInt(c != 0)
 10979  		return true
 10980  	}
 10981  	// match: (IsNonNil (Addr _))
 10982  	// result: (ConstBool [true])
 10983  	for {
 10984  		if v_0.Op != OpAddr {
 10985  			break
 10986  		}
 10987  		v.reset(OpConstBool)
 10988  		v.AuxInt = boolToAuxInt(true)
 10989  		return true
 10990  	}
 10991  	// match: (IsNonNil (LocalAddr _ _))
 10992  	// result: (ConstBool [true])
 10993  	for {
 10994  		if v_0.Op != OpLocalAddr {
 10995  			break
 10996  		}
 10997  		v.reset(OpConstBool)
 10998  		v.AuxInt = boolToAuxInt(true)
 10999  		return true
 11000  	}
 11001  	return false
 11002  }
 11003  func rewriteValuegeneric_OpIsSliceInBounds(v *Value) bool {
 11004  	v_1 := v.Args[1]
 11005  	v_0 := v.Args[0]
 11006  	// match: (IsSliceInBounds x x)
 11007  	// result: (ConstBool [true])
 11008  	for {
 11009  		x := v_0
 11010  		if x != v_1 {
 11011  			break
 11012  		}
 11013  		v.reset(OpConstBool)
 11014  		v.AuxInt = boolToAuxInt(true)
 11015  		return true
 11016  	}
 11017  	// match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d]))
 11018  	// cond: 0 <= c && c <= d
 11019  	// result: (ConstBool [true])
 11020  	for {
 11021  		if v_0.Op != OpAnd32 {
 11022  			break
 11023  		}
 11024  		v_0_0 := v_0.Args[0]
 11025  		v_0_1 := v_0.Args[1]
 11026  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 11027  			if v_0_0.Op != OpConst32 {
 11028  				continue
 11029  			}
 11030  			c := auxIntToInt32(v_0_0.AuxInt)
 11031  			if v_1.Op != OpConst32 {
 11032  				continue
 11033  			}
 11034  			d := auxIntToInt32(v_1.AuxInt)
 11035  			if !(0 <= c && c <= d) {
 11036  				continue
 11037  			}
 11038  			v.reset(OpConstBool)
 11039  			v.AuxInt = boolToAuxInt(true)
 11040  			return true
 11041  		}
 11042  		break
 11043  	}
 11044  	// match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d]))
 11045  	// cond: 0 <= c && c <= d
 11046  	// result: (ConstBool [true])
 11047  	for {
 11048  		if v_0.Op != OpAnd64 {
 11049  			break
 11050  		}
 11051  		v_0_0 := v_0.Args[0]
 11052  		v_0_1 := v_0.Args[1]
 11053  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 11054  			if v_0_0.Op != OpConst64 {
 11055  				continue
 11056  			}
 11057  			c := auxIntToInt64(v_0_0.AuxInt)
 11058  			if v_1.Op != OpConst64 {
 11059  				continue
 11060  			}
 11061  			d := auxIntToInt64(v_1.AuxInt)
 11062  			if !(0 <= c && c <= d) {
 11063  				continue
 11064  			}
 11065  			v.reset(OpConstBool)
 11066  			v.AuxInt = boolToAuxInt(true)
 11067  			return true
 11068  		}
 11069  		break
 11070  	}
 11071  	// match: (IsSliceInBounds (Const32 [0]) _)
 11072  	// result: (ConstBool [true])
 11073  	for {
 11074  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 11075  			break
 11076  		}
 11077  		v.reset(OpConstBool)
 11078  		v.AuxInt = boolToAuxInt(true)
 11079  		return true
 11080  	}
 11081  	// match: (IsSliceInBounds (Const64 [0]) _)
 11082  	// result: (ConstBool [true])
 11083  	for {
 11084  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 11085  			break
 11086  		}
 11087  		v.reset(OpConstBool)
 11088  		v.AuxInt = boolToAuxInt(true)
 11089  		return true
 11090  	}
 11091  	// match: (IsSliceInBounds (Const32 [c]) (Const32 [d]))
 11092  	// result: (ConstBool [0 <= c && c <= d])
 11093  	for {
 11094  		if v_0.Op != OpConst32 {
 11095  			break
 11096  		}
 11097  		c := auxIntToInt32(v_0.AuxInt)
 11098  		if v_1.Op != OpConst32 {
 11099  			break
 11100  		}
 11101  		d := auxIntToInt32(v_1.AuxInt)
 11102  		v.reset(OpConstBool)
 11103  		v.AuxInt = boolToAuxInt(0 <= c && c <= d)
 11104  		return true
 11105  	}
 11106  	// match: (IsSliceInBounds (Const64 [c]) (Const64 [d]))
 11107  	// result: (ConstBool [0 <= c && c <= d])
 11108  	for {
 11109  		if v_0.Op != OpConst64 {
 11110  			break
 11111  		}
 11112  		c := auxIntToInt64(v_0.AuxInt)
 11113  		if v_1.Op != OpConst64 {
 11114  			break
 11115  		}
 11116  		d := auxIntToInt64(v_1.AuxInt)
 11117  		v.reset(OpConstBool)
 11118  		v.AuxInt = boolToAuxInt(0 <= c && c <= d)
 11119  		return true
 11120  	}
 11121  	// match: (IsSliceInBounds (SliceLen x) (SliceCap x))
 11122  	// result: (ConstBool [true])
 11123  	for {
 11124  		if v_0.Op != OpSliceLen {
 11125  			break
 11126  		}
 11127  		x := v_0.Args[0]
 11128  		if v_1.Op != OpSliceCap || x != v_1.Args[0] {
 11129  			break
 11130  		}
 11131  		v.reset(OpConstBool)
 11132  		v.AuxInt = boolToAuxInt(true)
 11133  		return true
 11134  	}
 11135  	return false
 11136  }
 11137  func rewriteValuegeneric_OpLeq16(v *Value) bool {
 11138  	v_1 := v.Args[1]
 11139  	v_0 := v.Args[0]
 11140  	// match: (Leq16 (Const16 [c]) (Const16 [d]))
 11141  	// result: (ConstBool [c <= d])
 11142  	for {
 11143  		if v_0.Op != OpConst16 {
 11144  			break
 11145  		}
 11146  		c := auxIntToInt16(v_0.AuxInt)
 11147  		if v_1.Op != OpConst16 {
 11148  			break
 11149  		}
 11150  		d := auxIntToInt16(v_1.AuxInt)
 11151  		v.reset(OpConstBool)
 11152  		v.AuxInt = boolToAuxInt(c <= d)
 11153  		return true
 11154  	}
 11155  	// match: (Leq16 (Const16 [0]) (And16 _ (Const16 [c])))
 11156  	// cond: c >= 0
 11157  	// result: (ConstBool [true])
 11158  	for {
 11159  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 || v_1.Op != OpAnd16 {
 11160  			break
 11161  		}
 11162  		_ = v_1.Args[1]
 11163  		v_1_0 := v_1.Args[0]
 11164  		v_1_1 := v_1.Args[1]
 11165  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 11166  			if v_1_1.Op != OpConst16 {
 11167  				continue
 11168  			}
 11169  			c := auxIntToInt16(v_1_1.AuxInt)
 11170  			if !(c >= 0) {
 11171  				continue
 11172  			}
 11173  			v.reset(OpConstBool)
 11174  			v.AuxInt = boolToAuxInt(true)
 11175  			return true
 11176  		}
 11177  		break
 11178  	}
 11179  	// match: (Leq16 (Const16 [0]) (Rsh16Ux64 _ (Const64 [c])))
 11180  	// cond: c > 0
 11181  	// result: (ConstBool [true])
 11182  	for {
 11183  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 || v_1.Op != OpRsh16Ux64 {
 11184  			break
 11185  		}
 11186  		_ = v_1.Args[1]
 11187  		v_1_1 := v_1.Args[1]
 11188  		if v_1_1.Op != OpConst64 {
 11189  			break
 11190  		}
 11191  		c := auxIntToInt64(v_1_1.AuxInt)
 11192  		if !(c > 0) {
 11193  			break
 11194  		}
 11195  		v.reset(OpConstBool)
 11196  		v.AuxInt = boolToAuxInt(true)
 11197  		return true
 11198  	}
 11199  	return false
 11200  }
 11201  func rewriteValuegeneric_OpLeq16U(v *Value) bool {
 11202  	v_1 := v.Args[1]
 11203  	v_0 := v.Args[0]
 11204  	// match: (Leq16U (Const16 [c]) (Const16 [d]))
 11205  	// result: (ConstBool [uint16(c) <= uint16(d)])
 11206  	for {
 11207  		if v_0.Op != OpConst16 {
 11208  			break
 11209  		}
 11210  		c := auxIntToInt16(v_0.AuxInt)
 11211  		if v_1.Op != OpConst16 {
 11212  			break
 11213  		}
 11214  		d := auxIntToInt16(v_1.AuxInt)
 11215  		v.reset(OpConstBool)
 11216  		v.AuxInt = boolToAuxInt(uint16(c) <= uint16(d))
 11217  		return true
 11218  	}
 11219  	// match: (Leq16U (Const16 [0]) _)
 11220  	// result: (ConstBool [true])
 11221  	for {
 11222  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 11223  			break
 11224  		}
 11225  		v.reset(OpConstBool)
 11226  		v.AuxInt = boolToAuxInt(true)
 11227  		return true
 11228  	}
 11229  	return false
 11230  }
 11231  func rewriteValuegeneric_OpLeq32(v *Value) bool {
 11232  	v_1 := v.Args[1]
 11233  	v_0 := v.Args[0]
 11234  	// match: (Leq32 (Const32 [c]) (Const32 [d]))
 11235  	// result: (ConstBool [c <= d])
 11236  	for {
 11237  		if v_0.Op != OpConst32 {
 11238  			break
 11239  		}
 11240  		c := auxIntToInt32(v_0.AuxInt)
 11241  		if v_1.Op != OpConst32 {
 11242  			break
 11243  		}
 11244  		d := auxIntToInt32(v_1.AuxInt)
 11245  		v.reset(OpConstBool)
 11246  		v.AuxInt = boolToAuxInt(c <= d)
 11247  		return true
 11248  	}
 11249  	// match: (Leq32 (Const32 [0]) (And32 _ (Const32 [c])))
 11250  	// cond: c >= 0
 11251  	// result: (ConstBool [true])
 11252  	for {
 11253  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 || v_1.Op != OpAnd32 {
 11254  			break
 11255  		}
 11256  		_ = v_1.Args[1]
 11257  		v_1_0 := v_1.Args[0]
 11258  		v_1_1 := v_1.Args[1]
 11259  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 11260  			if v_1_1.Op != OpConst32 {
 11261  				continue
 11262  			}
 11263  			c := auxIntToInt32(v_1_1.AuxInt)
 11264  			if !(c >= 0) {
 11265  				continue
 11266  			}
 11267  			v.reset(OpConstBool)
 11268  			v.AuxInt = boolToAuxInt(true)
 11269  			return true
 11270  		}
 11271  		break
 11272  	}
 11273  	// match: (Leq32 (Const32 [0]) (Rsh32Ux64 _ (Const64 [c])))
 11274  	// cond: c > 0
 11275  	// result: (ConstBool [true])
 11276  	for {
 11277  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 || v_1.Op != OpRsh32Ux64 {
 11278  			break
 11279  		}
 11280  		_ = v_1.Args[1]
 11281  		v_1_1 := v_1.Args[1]
 11282  		if v_1_1.Op != OpConst64 {
 11283  			break
 11284  		}
 11285  		c := auxIntToInt64(v_1_1.AuxInt)
 11286  		if !(c > 0) {
 11287  			break
 11288  		}
 11289  		v.reset(OpConstBool)
 11290  		v.AuxInt = boolToAuxInt(true)
 11291  		return true
 11292  	}
 11293  	return false
 11294  }
 11295  func rewriteValuegeneric_OpLeq32F(v *Value) bool {
 11296  	v_1 := v.Args[1]
 11297  	v_0 := v.Args[0]
 11298  	// match: (Leq32F (Const32F [c]) (Const32F [d]))
 11299  	// result: (ConstBool [c <= d])
 11300  	for {
 11301  		if v_0.Op != OpConst32F {
 11302  			break
 11303  		}
 11304  		c := auxIntToFloat32(v_0.AuxInt)
 11305  		if v_1.Op != OpConst32F {
 11306  			break
 11307  		}
 11308  		d := auxIntToFloat32(v_1.AuxInt)
 11309  		v.reset(OpConstBool)
 11310  		v.AuxInt = boolToAuxInt(c <= d)
 11311  		return true
 11312  	}
 11313  	return false
 11314  }
 11315  func rewriteValuegeneric_OpLeq32U(v *Value) bool {
 11316  	v_1 := v.Args[1]
 11317  	v_0 := v.Args[0]
 11318  	// match: (Leq32U (Const32 [c]) (Const32 [d]))
 11319  	// result: (ConstBool [uint32(c) <= uint32(d)])
 11320  	for {
 11321  		if v_0.Op != OpConst32 {
 11322  			break
 11323  		}
 11324  		c := auxIntToInt32(v_0.AuxInt)
 11325  		if v_1.Op != OpConst32 {
 11326  			break
 11327  		}
 11328  		d := auxIntToInt32(v_1.AuxInt)
 11329  		v.reset(OpConstBool)
 11330  		v.AuxInt = boolToAuxInt(uint32(c) <= uint32(d))
 11331  		return true
 11332  	}
 11333  	// match: (Leq32U (Const32 [0]) _)
 11334  	// result: (ConstBool [true])
 11335  	for {
 11336  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 11337  			break
 11338  		}
 11339  		v.reset(OpConstBool)
 11340  		v.AuxInt = boolToAuxInt(true)
 11341  		return true
 11342  	}
 11343  	return false
 11344  }
 11345  func rewriteValuegeneric_OpLeq64(v *Value) bool {
 11346  	v_1 := v.Args[1]
 11347  	v_0 := v.Args[0]
 11348  	// match: (Leq64 (Const64 [c]) (Const64 [d]))
 11349  	// result: (ConstBool [c <= d])
 11350  	for {
 11351  		if v_0.Op != OpConst64 {
 11352  			break
 11353  		}
 11354  		c := auxIntToInt64(v_0.AuxInt)
 11355  		if v_1.Op != OpConst64 {
 11356  			break
 11357  		}
 11358  		d := auxIntToInt64(v_1.AuxInt)
 11359  		v.reset(OpConstBool)
 11360  		v.AuxInt = boolToAuxInt(c <= d)
 11361  		return true
 11362  	}
 11363  	// match: (Leq64 (Const64 [0]) (And64 _ (Const64 [c])))
 11364  	// cond: c >= 0
 11365  	// result: (ConstBool [true])
 11366  	for {
 11367  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpAnd64 {
 11368  			break
 11369  		}
 11370  		_ = v_1.Args[1]
 11371  		v_1_0 := v_1.Args[0]
 11372  		v_1_1 := v_1.Args[1]
 11373  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 11374  			if v_1_1.Op != OpConst64 {
 11375  				continue
 11376  			}
 11377  			c := auxIntToInt64(v_1_1.AuxInt)
 11378  			if !(c >= 0) {
 11379  				continue
 11380  			}
 11381  			v.reset(OpConstBool)
 11382  			v.AuxInt = boolToAuxInt(true)
 11383  			return true
 11384  		}
 11385  		break
 11386  	}
 11387  	// match: (Leq64 (Const64 [0]) (Rsh64Ux64 _ (Const64 [c])))
 11388  	// cond: c > 0
 11389  	// result: (ConstBool [true])
 11390  	for {
 11391  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpRsh64Ux64 {
 11392  			break
 11393  		}
 11394  		_ = v_1.Args[1]
 11395  		v_1_1 := v_1.Args[1]
 11396  		if v_1_1.Op != OpConst64 {
 11397  			break
 11398  		}
 11399  		c := auxIntToInt64(v_1_1.AuxInt)
 11400  		if !(c > 0) {
 11401  			break
 11402  		}
 11403  		v.reset(OpConstBool)
 11404  		v.AuxInt = boolToAuxInt(true)
 11405  		return true
 11406  	}
 11407  	return false
 11408  }
 11409  func rewriteValuegeneric_OpLeq64F(v *Value) bool {
 11410  	v_1 := v.Args[1]
 11411  	v_0 := v.Args[0]
 11412  	// match: (Leq64F (Const64F [c]) (Const64F [d]))
 11413  	// result: (ConstBool [c <= d])
 11414  	for {
 11415  		if v_0.Op != OpConst64F {
 11416  			break
 11417  		}
 11418  		c := auxIntToFloat64(v_0.AuxInt)
 11419  		if v_1.Op != OpConst64F {
 11420  			break
 11421  		}
 11422  		d := auxIntToFloat64(v_1.AuxInt)
 11423  		v.reset(OpConstBool)
 11424  		v.AuxInt = boolToAuxInt(c <= d)
 11425  		return true
 11426  	}
 11427  	return false
 11428  }
 11429  func rewriteValuegeneric_OpLeq64U(v *Value) bool {
 11430  	v_1 := v.Args[1]
 11431  	v_0 := v.Args[0]
 11432  	// match: (Leq64U (Const64 [c]) (Const64 [d]))
 11433  	// result: (ConstBool [uint64(c) <= uint64(d)])
 11434  	for {
 11435  		if v_0.Op != OpConst64 {
 11436  			break
 11437  		}
 11438  		c := auxIntToInt64(v_0.AuxInt)
 11439  		if v_1.Op != OpConst64 {
 11440  			break
 11441  		}
 11442  		d := auxIntToInt64(v_1.AuxInt)
 11443  		v.reset(OpConstBool)
 11444  		v.AuxInt = boolToAuxInt(uint64(c) <= uint64(d))
 11445  		return true
 11446  	}
 11447  	// match: (Leq64U (Const64 [0]) _)
 11448  	// result: (ConstBool [true])
 11449  	for {
 11450  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 11451  			break
 11452  		}
 11453  		v.reset(OpConstBool)
 11454  		v.AuxInt = boolToAuxInt(true)
 11455  		return true
 11456  	}
 11457  	return false
 11458  }
 11459  func rewriteValuegeneric_OpLeq8(v *Value) bool {
 11460  	v_1 := v.Args[1]
 11461  	v_0 := v.Args[0]
 11462  	// match: (Leq8 (Const8 [c]) (Const8 [d]))
 11463  	// result: (ConstBool [c <= d])
 11464  	for {
 11465  		if v_0.Op != OpConst8 {
 11466  			break
 11467  		}
 11468  		c := auxIntToInt8(v_0.AuxInt)
 11469  		if v_1.Op != OpConst8 {
 11470  			break
 11471  		}
 11472  		d := auxIntToInt8(v_1.AuxInt)
 11473  		v.reset(OpConstBool)
 11474  		v.AuxInt = boolToAuxInt(c <= d)
 11475  		return true
 11476  	}
 11477  	// match: (Leq8 (Const8 [0]) (And8 _ (Const8 [c])))
 11478  	// cond: c >= 0
 11479  	// result: (ConstBool [true])
 11480  	for {
 11481  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 || v_1.Op != OpAnd8 {
 11482  			break
 11483  		}
 11484  		_ = v_1.Args[1]
 11485  		v_1_0 := v_1.Args[0]
 11486  		v_1_1 := v_1.Args[1]
 11487  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 11488  			if v_1_1.Op != OpConst8 {
 11489  				continue
 11490  			}
 11491  			c := auxIntToInt8(v_1_1.AuxInt)
 11492  			if !(c >= 0) {
 11493  				continue
 11494  			}
 11495  			v.reset(OpConstBool)
 11496  			v.AuxInt = boolToAuxInt(true)
 11497  			return true
 11498  		}
 11499  		break
 11500  	}
 11501  	// match: (Leq8 (Const8 [0]) (Rsh8Ux64 _ (Const64 [c])))
 11502  	// cond: c > 0
 11503  	// result: (ConstBool [true])
 11504  	for {
 11505  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 || v_1.Op != OpRsh8Ux64 {
 11506  			break
 11507  		}
 11508  		_ = v_1.Args[1]
 11509  		v_1_1 := v_1.Args[1]
 11510  		if v_1_1.Op != OpConst64 {
 11511  			break
 11512  		}
 11513  		c := auxIntToInt64(v_1_1.AuxInt)
 11514  		if !(c > 0) {
 11515  			break
 11516  		}
 11517  		v.reset(OpConstBool)
 11518  		v.AuxInt = boolToAuxInt(true)
 11519  		return true
 11520  	}
 11521  	return false
 11522  }
 11523  func rewriteValuegeneric_OpLeq8U(v *Value) bool {
 11524  	v_1 := v.Args[1]
 11525  	v_0 := v.Args[0]
 11526  	// match: (Leq8U (Const8 [c]) (Const8 [d]))
 11527  	// result: (ConstBool [ uint8(c) <= uint8(d)])
 11528  	for {
 11529  		if v_0.Op != OpConst8 {
 11530  			break
 11531  		}
 11532  		c := auxIntToInt8(v_0.AuxInt)
 11533  		if v_1.Op != OpConst8 {
 11534  			break
 11535  		}
 11536  		d := auxIntToInt8(v_1.AuxInt)
 11537  		v.reset(OpConstBool)
 11538  		v.AuxInt = boolToAuxInt(uint8(c) <= uint8(d))
 11539  		return true
 11540  	}
 11541  	// match: (Leq8U (Const8 [0]) _)
 11542  	// result: (ConstBool [true])
 11543  	for {
 11544  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 11545  			break
 11546  		}
 11547  		v.reset(OpConstBool)
 11548  		v.AuxInt = boolToAuxInt(true)
 11549  		return true
 11550  	}
 11551  	return false
 11552  }
 11553  func rewriteValuegeneric_OpLess16(v *Value) bool {
 11554  	v_1 := v.Args[1]
 11555  	v_0 := v.Args[0]
 11556  	b := v.Block
 11557  	// match: (Less16 (Const16 [c]) (Const16 [d]))
 11558  	// result: (ConstBool [c < d])
 11559  	for {
 11560  		if v_0.Op != OpConst16 {
 11561  			break
 11562  		}
 11563  		c := auxIntToInt16(v_0.AuxInt)
 11564  		if v_1.Op != OpConst16 {
 11565  			break
 11566  		}
 11567  		d := auxIntToInt16(v_1.AuxInt)
 11568  		v.reset(OpConstBool)
 11569  		v.AuxInt = boolToAuxInt(c < d)
 11570  		return true
 11571  	}
 11572  	// match: (Less16 (Const16 <t> [0]) x)
 11573  	// cond: isNonNegative(x)
 11574  	// result: (Neq16 (Const16 <t> [0]) x)
 11575  	for {
 11576  		if v_0.Op != OpConst16 {
 11577  			break
 11578  		}
 11579  		t := v_0.Type
 11580  		if auxIntToInt16(v_0.AuxInt) != 0 {
 11581  			break
 11582  		}
 11583  		x := v_1
 11584  		if !(isNonNegative(x)) {
 11585  			break
 11586  		}
 11587  		v.reset(OpNeq16)
 11588  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 11589  		v0.AuxInt = int16ToAuxInt(0)
 11590  		v.AddArg2(v0, x)
 11591  		return true
 11592  	}
 11593  	// match: (Less16 x (Const16 <t> [1]))
 11594  	// cond: isNonNegative(x)
 11595  	// result: (Eq16 (Const16 <t> [0]) x)
 11596  	for {
 11597  		x := v_0
 11598  		if v_1.Op != OpConst16 {
 11599  			break
 11600  		}
 11601  		t := v_1.Type
 11602  		if auxIntToInt16(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
 11603  			break
 11604  		}
 11605  		v.reset(OpEq16)
 11606  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 11607  		v0.AuxInt = int16ToAuxInt(0)
 11608  		v.AddArg2(v0, x)
 11609  		return true
 11610  	}
 11611  	return false
 11612  }
 11613  func rewriteValuegeneric_OpLess16U(v *Value) bool {
 11614  	v_1 := v.Args[1]
 11615  	v_0 := v.Args[0]
 11616  	// match: (Less16U (Const16 [c]) (Const16 [d]))
 11617  	// result: (ConstBool [uint16(c) < uint16(d)])
 11618  	for {
 11619  		if v_0.Op != OpConst16 {
 11620  			break
 11621  		}
 11622  		c := auxIntToInt16(v_0.AuxInt)
 11623  		if v_1.Op != OpConst16 {
 11624  			break
 11625  		}
 11626  		d := auxIntToInt16(v_1.AuxInt)
 11627  		v.reset(OpConstBool)
 11628  		v.AuxInt = boolToAuxInt(uint16(c) < uint16(d))
 11629  		return true
 11630  	}
 11631  	// match: (Less16U _ (Const16 [0]))
 11632  	// result: (ConstBool [false])
 11633  	for {
 11634  		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 {
 11635  			break
 11636  		}
 11637  		v.reset(OpConstBool)
 11638  		v.AuxInt = boolToAuxInt(false)
 11639  		return true
 11640  	}
 11641  	return false
 11642  }
 11643  func rewriteValuegeneric_OpLess32(v *Value) bool {
 11644  	v_1 := v.Args[1]
 11645  	v_0 := v.Args[0]
 11646  	b := v.Block
 11647  	// match: (Less32 (Const32 [c]) (Const32 [d]))
 11648  	// result: (ConstBool [c < d])
 11649  	for {
 11650  		if v_0.Op != OpConst32 {
 11651  			break
 11652  		}
 11653  		c := auxIntToInt32(v_0.AuxInt)
 11654  		if v_1.Op != OpConst32 {
 11655  			break
 11656  		}
 11657  		d := auxIntToInt32(v_1.AuxInt)
 11658  		v.reset(OpConstBool)
 11659  		v.AuxInt = boolToAuxInt(c < d)
 11660  		return true
 11661  	}
 11662  	// match: (Less32 (Const32 <t> [0]) x)
 11663  	// cond: isNonNegative(x)
 11664  	// result: (Neq32 (Const32 <t> [0]) x)
 11665  	for {
 11666  		if v_0.Op != OpConst32 {
 11667  			break
 11668  		}
 11669  		t := v_0.Type
 11670  		if auxIntToInt32(v_0.AuxInt) != 0 {
 11671  			break
 11672  		}
 11673  		x := v_1
 11674  		if !(isNonNegative(x)) {
 11675  			break
 11676  		}
 11677  		v.reset(OpNeq32)
 11678  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 11679  		v0.AuxInt = int32ToAuxInt(0)
 11680  		v.AddArg2(v0, x)
 11681  		return true
 11682  	}
 11683  	// match: (Less32 x (Const32 <t> [1]))
 11684  	// cond: isNonNegative(x)
 11685  	// result: (Eq32 (Const32 <t> [0]) x)
 11686  	for {
 11687  		x := v_0
 11688  		if v_1.Op != OpConst32 {
 11689  			break
 11690  		}
 11691  		t := v_1.Type
 11692  		if auxIntToInt32(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
 11693  			break
 11694  		}
 11695  		v.reset(OpEq32)
 11696  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 11697  		v0.AuxInt = int32ToAuxInt(0)
 11698  		v.AddArg2(v0, x)
 11699  		return true
 11700  	}
 11701  	return false
 11702  }
 11703  func rewriteValuegeneric_OpLess32F(v *Value) bool {
 11704  	v_1 := v.Args[1]
 11705  	v_0 := v.Args[0]
 11706  	// match: (Less32F (Const32F [c]) (Const32F [d]))
 11707  	// result: (ConstBool [c < d])
 11708  	for {
 11709  		if v_0.Op != OpConst32F {
 11710  			break
 11711  		}
 11712  		c := auxIntToFloat32(v_0.AuxInt)
 11713  		if v_1.Op != OpConst32F {
 11714  			break
 11715  		}
 11716  		d := auxIntToFloat32(v_1.AuxInt)
 11717  		v.reset(OpConstBool)
 11718  		v.AuxInt = boolToAuxInt(c < d)
 11719  		return true
 11720  	}
 11721  	return false
 11722  }
 11723  func rewriteValuegeneric_OpLess32U(v *Value) bool {
 11724  	v_1 := v.Args[1]
 11725  	v_0 := v.Args[0]
 11726  	// match: (Less32U (Const32 [c]) (Const32 [d]))
 11727  	// result: (ConstBool [uint32(c) < uint32(d)])
 11728  	for {
 11729  		if v_0.Op != OpConst32 {
 11730  			break
 11731  		}
 11732  		c := auxIntToInt32(v_0.AuxInt)
 11733  		if v_1.Op != OpConst32 {
 11734  			break
 11735  		}
 11736  		d := auxIntToInt32(v_1.AuxInt)
 11737  		v.reset(OpConstBool)
 11738  		v.AuxInt = boolToAuxInt(uint32(c) < uint32(d))
 11739  		return true
 11740  	}
 11741  	// match: (Less32U _ (Const32 [0]))
 11742  	// result: (ConstBool [false])
 11743  	for {
 11744  		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 {
 11745  			break
 11746  		}
 11747  		v.reset(OpConstBool)
 11748  		v.AuxInt = boolToAuxInt(false)
 11749  		return true
 11750  	}
 11751  	return false
 11752  }
 11753  func rewriteValuegeneric_OpLess64(v *Value) bool {
 11754  	v_1 := v.Args[1]
 11755  	v_0 := v.Args[0]
 11756  	b := v.Block
 11757  	// match: (Less64 (Const64 [c]) (Const64 [d]))
 11758  	// result: (ConstBool [c < d])
 11759  	for {
 11760  		if v_0.Op != OpConst64 {
 11761  			break
 11762  		}
 11763  		c := auxIntToInt64(v_0.AuxInt)
 11764  		if v_1.Op != OpConst64 {
 11765  			break
 11766  		}
 11767  		d := auxIntToInt64(v_1.AuxInt)
 11768  		v.reset(OpConstBool)
 11769  		v.AuxInt = boolToAuxInt(c < d)
 11770  		return true
 11771  	}
 11772  	// match: (Less64 (Const64 <t> [0]) x)
 11773  	// cond: isNonNegative(x)
 11774  	// result: (Neq64 (Const64 <t> [0]) x)
 11775  	for {
 11776  		if v_0.Op != OpConst64 {
 11777  			break
 11778  		}
 11779  		t := v_0.Type
 11780  		if auxIntToInt64(v_0.AuxInt) != 0 {
 11781  			break
 11782  		}
 11783  		x := v_1
 11784  		if !(isNonNegative(x)) {
 11785  			break
 11786  		}
 11787  		v.reset(OpNeq64)
 11788  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 11789  		v0.AuxInt = int64ToAuxInt(0)
 11790  		v.AddArg2(v0, x)
 11791  		return true
 11792  	}
 11793  	// match: (Less64 x (Const64 <t> [1]))
 11794  	// cond: isNonNegative(x)
 11795  	// result: (Eq64 (Const64 <t> [0]) x)
 11796  	for {
 11797  		x := v_0
 11798  		if v_1.Op != OpConst64 {
 11799  			break
 11800  		}
 11801  		t := v_1.Type
 11802  		if auxIntToInt64(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
 11803  			break
 11804  		}
 11805  		v.reset(OpEq64)
 11806  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 11807  		v0.AuxInt = int64ToAuxInt(0)
 11808  		v.AddArg2(v0, x)
 11809  		return true
 11810  	}
 11811  	return false
 11812  }
 11813  func rewriteValuegeneric_OpLess64F(v *Value) bool {
 11814  	v_1 := v.Args[1]
 11815  	v_0 := v.Args[0]
 11816  	// match: (Less64F (Const64F [c]) (Const64F [d]))
 11817  	// result: (ConstBool [c < d])
 11818  	for {
 11819  		if v_0.Op != OpConst64F {
 11820  			break
 11821  		}
 11822  		c := auxIntToFloat64(v_0.AuxInt)
 11823  		if v_1.Op != OpConst64F {
 11824  			break
 11825  		}
 11826  		d := auxIntToFloat64(v_1.AuxInt)
 11827  		v.reset(OpConstBool)
 11828  		v.AuxInt = boolToAuxInt(c < d)
 11829  		return true
 11830  	}
 11831  	return false
 11832  }
 11833  func rewriteValuegeneric_OpLess64U(v *Value) bool {
 11834  	v_1 := v.Args[1]
 11835  	v_0 := v.Args[0]
 11836  	// match: (Less64U (Const64 [c]) (Const64 [d]))
 11837  	// result: (ConstBool [uint64(c) < uint64(d)])
 11838  	for {
 11839  		if v_0.Op != OpConst64 {
 11840  			break
 11841  		}
 11842  		c := auxIntToInt64(v_0.AuxInt)
 11843  		if v_1.Op != OpConst64 {
 11844  			break
 11845  		}
 11846  		d := auxIntToInt64(v_1.AuxInt)
 11847  		v.reset(OpConstBool)
 11848  		v.AuxInt = boolToAuxInt(uint64(c) < uint64(d))
 11849  		return true
 11850  	}
 11851  	// match: (Less64U _ (Const64 [0]))
 11852  	// result: (ConstBool [false])
 11853  	for {
 11854  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 11855  			break
 11856  		}
 11857  		v.reset(OpConstBool)
 11858  		v.AuxInt = boolToAuxInt(false)
 11859  		return true
 11860  	}
 11861  	return false
 11862  }
 11863  func rewriteValuegeneric_OpLess8(v *Value) bool {
 11864  	v_1 := v.Args[1]
 11865  	v_0 := v.Args[0]
 11866  	b := v.Block
 11867  	// match: (Less8 (Const8 [c]) (Const8 [d]))
 11868  	// result: (ConstBool [c < d])
 11869  	for {
 11870  		if v_0.Op != OpConst8 {
 11871  			break
 11872  		}
 11873  		c := auxIntToInt8(v_0.AuxInt)
 11874  		if v_1.Op != OpConst8 {
 11875  			break
 11876  		}
 11877  		d := auxIntToInt8(v_1.AuxInt)
 11878  		v.reset(OpConstBool)
 11879  		v.AuxInt = boolToAuxInt(c < d)
 11880  		return true
 11881  	}
 11882  	// match: (Less8 (Const8 <t> [0]) x)
 11883  	// cond: isNonNegative(x)
 11884  	// result: (Neq8 (Const8 <t> [0]) x)
 11885  	for {
 11886  		if v_0.Op != OpConst8 {
 11887  			break
 11888  		}
 11889  		t := v_0.Type
 11890  		if auxIntToInt8(v_0.AuxInt) != 0 {
 11891  			break
 11892  		}
 11893  		x := v_1
 11894  		if !(isNonNegative(x)) {
 11895  			break
 11896  		}
 11897  		v.reset(OpNeq8)
 11898  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 11899  		v0.AuxInt = int8ToAuxInt(0)
 11900  		v.AddArg2(v0, x)
 11901  		return true
 11902  	}
 11903  	// match: (Less8 x (Const8 <t> [1]))
 11904  	// cond: isNonNegative(x)
 11905  	// result: (Eq8 (Const8 <t> [0]) x)
 11906  	for {
 11907  		x := v_0
 11908  		if v_1.Op != OpConst8 {
 11909  			break
 11910  		}
 11911  		t := v_1.Type
 11912  		if auxIntToInt8(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
 11913  			break
 11914  		}
 11915  		v.reset(OpEq8)
 11916  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 11917  		v0.AuxInt = int8ToAuxInt(0)
 11918  		v.AddArg2(v0, x)
 11919  		return true
 11920  	}
 11921  	return false
 11922  }
 11923  func rewriteValuegeneric_OpLess8U(v *Value) bool {
 11924  	v_1 := v.Args[1]
 11925  	v_0 := v.Args[0]
 11926  	// match: (Less8U (Const8 [c]) (Const8 [d]))
 11927  	// result: (ConstBool [ uint8(c) < uint8(d)])
 11928  	for {
 11929  		if v_0.Op != OpConst8 {
 11930  			break
 11931  		}
 11932  		c := auxIntToInt8(v_0.AuxInt)
 11933  		if v_1.Op != OpConst8 {
 11934  			break
 11935  		}
 11936  		d := auxIntToInt8(v_1.AuxInt)
 11937  		v.reset(OpConstBool)
 11938  		v.AuxInt = boolToAuxInt(uint8(c) < uint8(d))
 11939  		return true
 11940  	}
 11941  	// match: (Less8U _ (Const8 [0]))
 11942  	// result: (ConstBool [false])
 11943  	for {
 11944  		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 {
 11945  			break
 11946  		}
 11947  		v.reset(OpConstBool)
 11948  		v.AuxInt = boolToAuxInt(false)
 11949  		return true
 11950  	}
 11951  	return false
 11952  }
 11953  func rewriteValuegeneric_OpLoad(v *Value) bool {
 11954  	v_1 := v.Args[1]
 11955  	v_0 := v.Args[0]
 11956  	b := v.Block
 11957  	fe := b.Func.fe
 11958  	// match: (Load <t1> p1 (Store {t2} p2 x _))
 11959  	// cond: isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size()
 11960  	// result: x
 11961  	for {
 11962  		t1 := v.Type
 11963  		p1 := v_0
 11964  		if v_1.Op != OpStore {
 11965  			break
 11966  		}
 11967  		t2 := auxToType(v_1.Aux)
 11968  		x := v_1.Args[1]
 11969  		p2 := v_1.Args[0]
 11970  		if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size()) {
 11971  			break
 11972  		}
 11973  		v.copyOf(x)
 11974  		return true
 11975  	}
 11976  	// match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 x _)))
 11977  	// cond: isSamePtr(p1, p3) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p3, t3.Size(), p2, t2.Size())
 11978  	// result: x
 11979  	for {
 11980  		t1 := v.Type
 11981  		p1 := v_0
 11982  		if v_1.Op != OpStore {
 11983  			break
 11984  		}
 11985  		t2 := auxToType(v_1.Aux)
 11986  		_ = v_1.Args[2]
 11987  		p2 := v_1.Args[0]
 11988  		v_1_2 := v_1.Args[2]
 11989  		if v_1_2.Op != OpStore {
 11990  			break
 11991  		}
 11992  		t3 := auxToType(v_1_2.Aux)
 11993  		x := v_1_2.Args[1]
 11994  		p3 := v_1_2.Args[0]
 11995  		if !(isSamePtr(p1, p3) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p3, t3.Size(), p2, t2.Size())) {
 11996  			break
 11997  		}
 11998  		v.copyOf(x)
 11999  		return true
 12000  	}
 12001  	// match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 x _))))
 12002  	// cond: isSamePtr(p1, p4) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p4, t4.Size(), p2, t2.Size()) && disjoint(p4, t4.Size(), p3, t3.Size())
 12003  	// result: x
 12004  	for {
 12005  		t1 := v.Type
 12006  		p1 := v_0
 12007  		if v_1.Op != OpStore {
 12008  			break
 12009  		}
 12010  		t2 := auxToType(v_1.Aux)
 12011  		_ = v_1.Args[2]
 12012  		p2 := v_1.Args[0]
 12013  		v_1_2 := v_1.Args[2]
 12014  		if v_1_2.Op != OpStore {
 12015  			break
 12016  		}
 12017  		t3 := auxToType(v_1_2.Aux)
 12018  		_ = v_1_2.Args[2]
 12019  		p3 := v_1_2.Args[0]
 12020  		v_1_2_2 := v_1_2.Args[2]
 12021  		if v_1_2_2.Op != OpStore {
 12022  			break
 12023  		}
 12024  		t4 := auxToType(v_1_2_2.Aux)
 12025  		x := v_1_2_2.Args[1]
 12026  		p4 := v_1_2_2.Args[0]
 12027  		if !(isSamePtr(p1, p4) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p4, t4.Size(), p2, t2.Size()) && disjoint(p4, t4.Size(), p3, t3.Size())) {
 12028  			break
 12029  		}
 12030  		v.copyOf(x)
 12031  		return true
 12032  	}
 12033  	// match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 x _)))))
 12034  	// cond: isSamePtr(p1, p5) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p5, t5.Size(), p2, t2.Size()) && disjoint(p5, t5.Size(), p3, t3.Size()) && disjoint(p5, t5.Size(), p4, t4.Size())
 12035  	// result: x
 12036  	for {
 12037  		t1 := v.Type
 12038  		p1 := v_0
 12039  		if v_1.Op != OpStore {
 12040  			break
 12041  		}
 12042  		t2 := auxToType(v_1.Aux)
 12043  		_ = v_1.Args[2]
 12044  		p2 := v_1.Args[0]
 12045  		v_1_2 := v_1.Args[2]
 12046  		if v_1_2.Op != OpStore {
 12047  			break
 12048  		}
 12049  		t3 := auxToType(v_1_2.Aux)
 12050  		_ = v_1_2.Args[2]
 12051  		p3 := v_1_2.Args[0]
 12052  		v_1_2_2 := v_1_2.Args[2]
 12053  		if v_1_2_2.Op != OpStore {
 12054  			break
 12055  		}
 12056  		t4 := auxToType(v_1_2_2.Aux)
 12057  		_ = v_1_2_2.Args[2]
 12058  		p4 := v_1_2_2.Args[0]
 12059  		v_1_2_2_2 := v_1_2_2.Args[2]
 12060  		if v_1_2_2_2.Op != OpStore {
 12061  			break
 12062  		}
 12063  		t5 := auxToType(v_1_2_2_2.Aux)
 12064  		x := v_1_2_2_2.Args[1]
 12065  		p5 := v_1_2_2_2.Args[0]
 12066  		if !(isSamePtr(p1, p5) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p5, t5.Size(), p2, t2.Size()) && disjoint(p5, t5.Size(), p3, t3.Size()) && disjoint(p5, t5.Size(), p4, t4.Size())) {
 12067  			break
 12068  		}
 12069  		v.copyOf(x)
 12070  		return true
 12071  	}
 12072  	// match: (Load <t1> p1 (Store {t2} p2 (Const64 [x]) _))
 12073  	// cond: isSamePtr(p1,p2) && sizeof(t2) == 8 && is64BitFloat(t1) && !math.IsNaN(math.Float64frombits(uint64(x)))
 12074  	// result: (Const64F [math.Float64frombits(uint64(x))])
 12075  	for {
 12076  		t1 := v.Type
 12077  		p1 := v_0
 12078  		if v_1.Op != OpStore {
 12079  			break
 12080  		}
 12081  		t2 := auxToType(v_1.Aux)
 12082  		_ = v_1.Args[1]
 12083  		p2 := v_1.Args[0]
 12084  		v_1_1 := v_1.Args[1]
 12085  		if v_1_1.Op != OpConst64 {
 12086  			break
 12087  		}
 12088  		x := auxIntToInt64(v_1_1.AuxInt)
 12089  		if !(isSamePtr(p1, p2) && sizeof(t2) == 8 && is64BitFloat(t1) && !math.IsNaN(math.Float64frombits(uint64(x)))) {
 12090  			break
 12091  		}
 12092  		v.reset(OpConst64F)
 12093  		v.AuxInt = float64ToAuxInt(math.Float64frombits(uint64(x)))
 12094  		return true
 12095  	}
 12096  	// match: (Load <t1> p1 (Store {t2} p2 (Const32 [x]) _))
 12097  	// cond: isSamePtr(p1,p2) && sizeof(t2) == 4 && is32BitFloat(t1) && !math.IsNaN(float64(math.Float32frombits(uint32(x))))
 12098  	// result: (Const32F [math.Float32frombits(uint32(x))])
 12099  	for {
 12100  		t1 := v.Type
 12101  		p1 := v_0
 12102  		if v_1.Op != OpStore {
 12103  			break
 12104  		}
 12105  		t2 := auxToType(v_1.Aux)
 12106  		_ = v_1.Args[1]
 12107  		p2 := v_1.Args[0]
 12108  		v_1_1 := v_1.Args[1]
 12109  		if v_1_1.Op != OpConst32 {
 12110  			break
 12111  		}
 12112  		x := auxIntToInt32(v_1_1.AuxInt)
 12113  		if !(isSamePtr(p1, p2) && sizeof(t2) == 4 && is32BitFloat(t1) && !math.IsNaN(float64(math.Float32frombits(uint32(x))))) {
 12114  			break
 12115  		}
 12116  		v.reset(OpConst32F)
 12117  		v.AuxInt = float32ToAuxInt(math.Float32frombits(uint32(x)))
 12118  		return true
 12119  	}
 12120  	// match: (Load <t1> p1 (Store {t2} p2 (Const64F [x]) _))
 12121  	// cond: isSamePtr(p1,p2) && sizeof(t2) == 8 && is64BitInt(t1)
 12122  	// result: (Const64 [int64(math.Float64bits(x))])
 12123  	for {
 12124  		t1 := v.Type
 12125  		p1 := v_0
 12126  		if v_1.Op != OpStore {
 12127  			break
 12128  		}
 12129  		t2 := auxToType(v_1.Aux)
 12130  		_ = v_1.Args[1]
 12131  		p2 := v_1.Args[0]
 12132  		v_1_1 := v_1.Args[1]
 12133  		if v_1_1.Op != OpConst64F {
 12134  			break
 12135  		}
 12136  		x := auxIntToFloat64(v_1_1.AuxInt)
 12137  		if !(isSamePtr(p1, p2) && sizeof(t2) == 8 && is64BitInt(t1)) {
 12138  			break
 12139  		}
 12140  		v.reset(OpConst64)
 12141  		v.AuxInt = int64ToAuxInt(int64(math.Float64bits(x)))
 12142  		return true
 12143  	}
 12144  	// match: (Load <t1> p1 (Store {t2} p2 (Const32F [x]) _))
 12145  	// cond: isSamePtr(p1,p2) && sizeof(t2) == 4 && is32BitInt(t1)
 12146  	// result: (Const32 [int32(math.Float32bits(x))])
 12147  	for {
 12148  		t1 := v.Type
 12149  		p1 := v_0
 12150  		if v_1.Op != OpStore {
 12151  			break
 12152  		}
 12153  		t2 := auxToType(v_1.Aux)
 12154  		_ = v_1.Args[1]
 12155  		p2 := v_1.Args[0]
 12156  		v_1_1 := v_1.Args[1]
 12157  		if v_1_1.Op != OpConst32F {
 12158  			break
 12159  		}
 12160  		x := auxIntToFloat32(v_1_1.AuxInt)
 12161  		if !(isSamePtr(p1, p2) && sizeof(t2) == 4 && is32BitInt(t1)) {
 12162  			break
 12163  		}
 12164  		v.reset(OpConst32)
 12165  		v.AuxInt = int32ToAuxInt(int32(math.Float32bits(x)))
 12166  		return true
 12167  	}
 12168  	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ mem:(Zero [n] p3 _)))
 12169  	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size())
 12170  	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p3) mem)
 12171  	for {
 12172  		t1 := v.Type
 12173  		op := v_0
 12174  		if op.Op != OpOffPtr {
 12175  			break
 12176  		}
 12177  		o1 := auxIntToInt64(op.AuxInt)
 12178  		p1 := op.Args[0]
 12179  		if v_1.Op != OpStore {
 12180  			break
 12181  		}
 12182  		t2 := auxToType(v_1.Aux)
 12183  		_ = v_1.Args[2]
 12184  		p2 := v_1.Args[0]
 12185  		mem := v_1.Args[2]
 12186  		if mem.Op != OpZero {
 12187  			break
 12188  		}
 12189  		n := auxIntToInt64(mem.AuxInt)
 12190  		p3 := mem.Args[0]
 12191  		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size())) {
 12192  			break
 12193  		}
 12194  		b = mem.Block
 12195  		v0 := b.NewValue0(v.Pos, OpLoad, t1)
 12196  		v.copyOf(v0)
 12197  		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
 12198  		v1.AuxInt = int64ToAuxInt(o1)
 12199  		v1.AddArg(p3)
 12200  		v0.AddArg2(v1, mem)
 12201  		return true
 12202  	}
 12203  	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ mem:(Zero [n] p4 _))))
 12204  	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())
 12205  	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p4) mem)
 12206  	for {
 12207  		t1 := v.Type
 12208  		op := v_0
 12209  		if op.Op != OpOffPtr {
 12210  			break
 12211  		}
 12212  		o1 := auxIntToInt64(op.AuxInt)
 12213  		p1 := op.Args[0]
 12214  		if v_1.Op != OpStore {
 12215  			break
 12216  		}
 12217  		t2 := auxToType(v_1.Aux)
 12218  		_ = v_1.Args[2]
 12219  		p2 := v_1.Args[0]
 12220  		v_1_2 := v_1.Args[2]
 12221  		if v_1_2.Op != OpStore {
 12222  			break
 12223  		}
 12224  		t3 := auxToType(v_1_2.Aux)
 12225  		_ = v_1_2.Args[2]
 12226  		p3 := v_1_2.Args[0]
 12227  		mem := v_1_2.Args[2]
 12228  		if mem.Op != OpZero {
 12229  			break
 12230  		}
 12231  		n := auxIntToInt64(mem.AuxInt)
 12232  		p4 := mem.Args[0]
 12233  		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())) {
 12234  			break
 12235  		}
 12236  		b = mem.Block
 12237  		v0 := b.NewValue0(v.Pos, OpLoad, t1)
 12238  		v.copyOf(v0)
 12239  		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
 12240  		v1.AuxInt = int64ToAuxInt(o1)
 12241  		v1.AddArg(p4)
 12242  		v0.AddArg2(v1, mem)
 12243  		return true
 12244  	}
 12245  	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ mem:(Zero [n] p5 _)))))
 12246  	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())
 12247  	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p5) mem)
 12248  	for {
 12249  		t1 := v.Type
 12250  		op := v_0
 12251  		if op.Op != OpOffPtr {
 12252  			break
 12253  		}
 12254  		o1 := auxIntToInt64(op.AuxInt)
 12255  		p1 := op.Args[0]
 12256  		if v_1.Op != OpStore {
 12257  			break
 12258  		}
 12259  		t2 := auxToType(v_1.Aux)
 12260  		_ = v_1.Args[2]
 12261  		p2 := v_1.Args[0]
 12262  		v_1_2 := v_1.Args[2]
 12263  		if v_1_2.Op != OpStore {
 12264  			break
 12265  		}
 12266  		t3 := auxToType(v_1_2.Aux)
 12267  		_ = v_1_2.Args[2]
 12268  		p3 := v_1_2.Args[0]
 12269  		v_1_2_2 := v_1_2.Args[2]
 12270  		if v_1_2_2.Op != OpStore {
 12271  			break
 12272  		}
 12273  		t4 := auxToType(v_1_2_2.Aux)
 12274  		_ = v_1_2_2.Args[2]
 12275  		p4 := v_1_2_2.Args[0]
 12276  		mem := v_1_2_2.Args[2]
 12277  		if mem.Op != OpZero {
 12278  			break
 12279  		}
 12280  		n := auxIntToInt64(mem.AuxInt)
 12281  		p5 := mem.Args[0]
 12282  		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())) {
 12283  			break
 12284  		}
 12285  		b = mem.Block
 12286  		v0 := b.NewValue0(v.Pos, OpLoad, t1)
 12287  		v.copyOf(v0)
 12288  		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
 12289  		v1.AuxInt = int64ToAuxInt(o1)
 12290  		v1.AddArg(p5)
 12291  		v0.AddArg2(v1, mem)
 12292  		return true
 12293  	}
 12294  	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ mem:(Zero [n] p6 _))))))
 12295  	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size()) && disjoint(op, t1.Size(), p5, t5.Size())
 12296  	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p6) mem)
 12297  	for {
 12298  		t1 := v.Type
 12299  		op := v_0
 12300  		if op.Op != OpOffPtr {
 12301  			break
 12302  		}
 12303  		o1 := auxIntToInt64(op.AuxInt)
 12304  		p1 := op.Args[0]
 12305  		if v_1.Op != OpStore {
 12306  			break
 12307  		}
 12308  		t2 := auxToType(v_1.Aux)
 12309  		_ = v_1.Args[2]
 12310  		p2 := v_1.Args[0]
 12311  		v_1_2 := v_1.Args[2]
 12312  		if v_1_2.Op != OpStore {
 12313  			break
 12314  		}
 12315  		t3 := auxToType(v_1_2.Aux)
 12316  		_ = v_1_2.Args[2]
 12317  		p3 := v_1_2.Args[0]
 12318  		v_1_2_2 := v_1_2.Args[2]
 12319  		if v_1_2_2.Op != OpStore {
 12320  			break
 12321  		}
 12322  		t4 := auxToType(v_1_2_2.Aux)
 12323  		_ = v_1_2_2.Args[2]
 12324  		p4 := v_1_2_2.Args[0]
 12325  		v_1_2_2_2 := v_1_2_2.Args[2]
 12326  		if v_1_2_2_2.Op != OpStore {
 12327  			break
 12328  		}
 12329  		t5 := auxToType(v_1_2_2_2.Aux)
 12330  		_ = v_1_2_2_2.Args[2]
 12331  		p5 := v_1_2_2_2.Args[0]
 12332  		mem := v_1_2_2_2.Args[2]
 12333  		if mem.Op != OpZero {
 12334  			break
 12335  		}
 12336  		n := auxIntToInt64(mem.AuxInt)
 12337  		p6 := mem.Args[0]
 12338  		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size()) && disjoint(op, t1.Size(), p5, t5.Size())) {
 12339  			break
 12340  		}
 12341  		b = mem.Block
 12342  		v0 := b.NewValue0(v.Pos, OpLoad, t1)
 12343  		v.copyOf(v0)
 12344  		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
 12345  		v1.AuxInt = int64ToAuxInt(o1)
 12346  		v1.AddArg(p6)
 12347  		v0.AddArg2(v1, mem)
 12348  		return true
 12349  	}
 12350  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 12351  	// cond: t1.IsBoolean() && isSamePtr(p1, p2) && n >= o + 1
 12352  	// result: (ConstBool [false])
 12353  	for {
 12354  		t1 := v.Type
 12355  		if v_0.Op != OpOffPtr {
 12356  			break
 12357  		}
 12358  		o := auxIntToInt64(v_0.AuxInt)
 12359  		p1 := v_0.Args[0]
 12360  		if v_1.Op != OpZero {
 12361  			break
 12362  		}
 12363  		n := auxIntToInt64(v_1.AuxInt)
 12364  		p2 := v_1.Args[0]
 12365  		if !(t1.IsBoolean() && isSamePtr(p1, p2) && n >= o+1) {
 12366  			break
 12367  		}
 12368  		v.reset(OpConstBool)
 12369  		v.AuxInt = boolToAuxInt(false)
 12370  		return true
 12371  	}
 12372  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 12373  	// cond: is8BitInt(t1) && isSamePtr(p1, p2) && n >= o + 1
 12374  	// result: (Const8 [0])
 12375  	for {
 12376  		t1 := v.Type
 12377  		if v_0.Op != OpOffPtr {
 12378  			break
 12379  		}
 12380  		o := auxIntToInt64(v_0.AuxInt)
 12381  		p1 := v_0.Args[0]
 12382  		if v_1.Op != OpZero {
 12383  			break
 12384  		}
 12385  		n := auxIntToInt64(v_1.AuxInt)
 12386  		p2 := v_1.Args[0]
 12387  		if !(is8BitInt(t1) && isSamePtr(p1, p2) && n >= o+1) {
 12388  			break
 12389  		}
 12390  		v.reset(OpConst8)
 12391  		v.AuxInt = int8ToAuxInt(0)
 12392  		return true
 12393  	}
 12394  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 12395  	// cond: is16BitInt(t1) && isSamePtr(p1, p2) && n >= o + 2
 12396  	// result: (Const16 [0])
 12397  	for {
 12398  		t1 := v.Type
 12399  		if v_0.Op != OpOffPtr {
 12400  			break
 12401  		}
 12402  		o := auxIntToInt64(v_0.AuxInt)
 12403  		p1 := v_0.Args[0]
 12404  		if v_1.Op != OpZero {
 12405  			break
 12406  		}
 12407  		n := auxIntToInt64(v_1.AuxInt)
 12408  		p2 := v_1.Args[0]
 12409  		if !(is16BitInt(t1) && isSamePtr(p1, p2) && n >= o+2) {
 12410  			break
 12411  		}
 12412  		v.reset(OpConst16)
 12413  		v.AuxInt = int16ToAuxInt(0)
 12414  		return true
 12415  	}
 12416  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 12417  	// cond: is32BitInt(t1) && isSamePtr(p1, p2) && n >= o + 4
 12418  	// result: (Const32 [0])
 12419  	for {
 12420  		t1 := v.Type
 12421  		if v_0.Op != OpOffPtr {
 12422  			break
 12423  		}
 12424  		o := auxIntToInt64(v_0.AuxInt)
 12425  		p1 := v_0.Args[0]
 12426  		if v_1.Op != OpZero {
 12427  			break
 12428  		}
 12429  		n := auxIntToInt64(v_1.AuxInt)
 12430  		p2 := v_1.Args[0]
 12431  		if !(is32BitInt(t1) && isSamePtr(p1, p2) && n >= o+4) {
 12432  			break
 12433  		}
 12434  		v.reset(OpConst32)
 12435  		v.AuxInt = int32ToAuxInt(0)
 12436  		return true
 12437  	}
 12438  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 12439  	// cond: is64BitInt(t1) && isSamePtr(p1, p2) && n >= o + 8
 12440  	// result: (Const64 [0])
 12441  	for {
 12442  		t1 := v.Type
 12443  		if v_0.Op != OpOffPtr {
 12444  			break
 12445  		}
 12446  		o := auxIntToInt64(v_0.AuxInt)
 12447  		p1 := v_0.Args[0]
 12448  		if v_1.Op != OpZero {
 12449  			break
 12450  		}
 12451  		n := auxIntToInt64(v_1.AuxInt)
 12452  		p2 := v_1.Args[0]
 12453  		if !(is64BitInt(t1) && isSamePtr(p1, p2) && n >= o+8) {
 12454  			break
 12455  		}
 12456  		v.reset(OpConst64)
 12457  		v.AuxInt = int64ToAuxInt(0)
 12458  		return true
 12459  	}
 12460  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 12461  	// cond: is32BitFloat(t1) && isSamePtr(p1, p2) && n >= o + 4
 12462  	// result: (Const32F [0])
 12463  	for {
 12464  		t1 := v.Type
 12465  		if v_0.Op != OpOffPtr {
 12466  			break
 12467  		}
 12468  		o := auxIntToInt64(v_0.AuxInt)
 12469  		p1 := v_0.Args[0]
 12470  		if v_1.Op != OpZero {
 12471  			break
 12472  		}
 12473  		n := auxIntToInt64(v_1.AuxInt)
 12474  		p2 := v_1.Args[0]
 12475  		if !(is32BitFloat(t1) && isSamePtr(p1, p2) && n >= o+4) {
 12476  			break
 12477  		}
 12478  		v.reset(OpConst32F)
 12479  		v.AuxInt = float32ToAuxInt(0)
 12480  		return true
 12481  	}
 12482  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 12483  	// cond: is64BitFloat(t1) && isSamePtr(p1, p2) && n >= o + 8
 12484  	// result: (Const64F [0])
 12485  	for {
 12486  		t1 := v.Type
 12487  		if v_0.Op != OpOffPtr {
 12488  			break
 12489  		}
 12490  		o := auxIntToInt64(v_0.AuxInt)
 12491  		p1 := v_0.Args[0]
 12492  		if v_1.Op != OpZero {
 12493  			break
 12494  		}
 12495  		n := auxIntToInt64(v_1.AuxInt)
 12496  		p2 := v_1.Args[0]
 12497  		if !(is64BitFloat(t1) && isSamePtr(p1, p2) && n >= o+8) {
 12498  			break
 12499  		}
 12500  		v.reset(OpConst64F)
 12501  		v.AuxInt = float64ToAuxInt(0)
 12502  		return true
 12503  	}
 12504  	// match: (Load <t> _ _)
 12505  	// cond: t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)
 12506  	// result: (StructMake0)
 12507  	for {
 12508  		t := v.Type
 12509  		if !(t.IsStruct() && t.NumFields() == 0 && fe.CanSSA(t)) {
 12510  			break
 12511  		}
 12512  		v.reset(OpStructMake0)
 12513  		return true
 12514  	}
 12515  	// match: (Load <t> ptr mem)
 12516  	// cond: t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)
 12517  	// result: (StructMake1 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem))
 12518  	for {
 12519  		t := v.Type
 12520  		ptr := v_0
 12521  		mem := v_1
 12522  		if !(t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)) {
 12523  			break
 12524  		}
 12525  		v.reset(OpStructMake1)
 12526  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
 12527  		v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
 12528  		v1.AuxInt = int64ToAuxInt(0)
 12529  		v1.AddArg(ptr)
 12530  		v0.AddArg2(v1, mem)
 12531  		v.AddArg(v0)
 12532  		return true
 12533  	}
 12534  	// match: (Load <t> ptr mem)
 12535  	// cond: t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)
 12536  	// result: (StructMake2 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem))
 12537  	for {
 12538  		t := v.Type
 12539  		ptr := v_0
 12540  		mem := v_1
 12541  		if !(t.IsStruct() && t.NumFields() == 2 && fe.CanSSA(t)) {
 12542  			break
 12543  		}
 12544  		v.reset(OpStructMake2)
 12545  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
 12546  		v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
 12547  		v1.AuxInt = int64ToAuxInt(0)
 12548  		v1.AddArg(ptr)
 12549  		v0.AddArg2(v1, mem)
 12550  		v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
 12551  		v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 12552  		v3.AuxInt = int64ToAuxInt(t.FieldOff(1))
 12553  		v3.AddArg(ptr)
 12554  		v2.AddArg2(v3, mem)
 12555  		v.AddArg2(v0, v2)
 12556  		return true
 12557  	}
 12558  	// match: (Load <t> ptr mem)
 12559  	// cond: t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)
 12560  	// result: (StructMake3 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem))
 12561  	for {
 12562  		t := v.Type
 12563  		ptr := v_0
 12564  		mem := v_1
 12565  		if !(t.IsStruct() && t.NumFields() == 3 && fe.CanSSA(t)) {
 12566  			break
 12567  		}
 12568  		v.reset(OpStructMake3)
 12569  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
 12570  		v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
 12571  		v1.AuxInt = int64ToAuxInt(0)
 12572  		v1.AddArg(ptr)
 12573  		v0.AddArg2(v1, mem)
 12574  		v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
 12575  		v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 12576  		v3.AuxInt = int64ToAuxInt(t.FieldOff(1))
 12577  		v3.AddArg(ptr)
 12578  		v2.AddArg2(v3, mem)
 12579  		v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
 12580  		v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
 12581  		v5.AuxInt = int64ToAuxInt(t.FieldOff(2))
 12582  		v5.AddArg(ptr)
 12583  		v4.AddArg2(v5, mem)
 12584  		v.AddArg3(v0, v2, v4)
 12585  		return true
 12586  	}
 12587  	// match: (Load <t> ptr mem)
 12588  	// cond: t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)
 12589  	// result: (StructMake4 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem))
 12590  	for {
 12591  		t := v.Type
 12592  		ptr := v_0
 12593  		mem := v_1
 12594  		if !(t.IsStruct() && t.NumFields() == 4 && fe.CanSSA(t)) {
 12595  			break
 12596  		}
 12597  		v.reset(OpStructMake4)
 12598  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
 12599  		v1 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
 12600  		v1.AuxInt = int64ToAuxInt(0)
 12601  		v1.AddArg(ptr)
 12602  		v0.AddArg2(v1, mem)
 12603  		v2 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
 12604  		v3 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 12605  		v3.AuxInt = int64ToAuxInt(t.FieldOff(1))
 12606  		v3.AddArg(ptr)
 12607  		v2.AddArg2(v3, mem)
 12608  		v4 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
 12609  		v5 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
 12610  		v5.AuxInt = int64ToAuxInt(t.FieldOff(2))
 12611  		v5.AddArg(ptr)
 12612  		v4.AddArg2(v5, mem)
 12613  		v6 := b.NewValue0(v.Pos, OpLoad, t.FieldType(3))
 12614  		v7 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
 12615  		v7.AuxInt = int64ToAuxInt(t.FieldOff(3))
 12616  		v7.AddArg(ptr)
 12617  		v6.AddArg2(v7, mem)
 12618  		v.AddArg4(v0, v2, v4, v6)
 12619  		return true
 12620  	}
 12621  	// match: (Load <t> _ _)
 12622  	// cond: t.IsArray() && t.NumElem() == 0
 12623  	// result: (ArrayMake0)
 12624  	for {
 12625  		t := v.Type
 12626  		if !(t.IsArray() && t.NumElem() == 0) {
 12627  			break
 12628  		}
 12629  		v.reset(OpArrayMake0)
 12630  		return true
 12631  	}
 12632  	// match: (Load <t> ptr mem)
 12633  	// cond: t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)
 12634  	// result: (ArrayMake1 (Load <t.Elem()> ptr mem))
 12635  	for {
 12636  		t := v.Type
 12637  		ptr := v_0
 12638  		mem := v_1
 12639  		if !(t.IsArray() && t.NumElem() == 1 && fe.CanSSA(t)) {
 12640  			break
 12641  		}
 12642  		v.reset(OpArrayMake1)
 12643  		v0 := b.NewValue0(v.Pos, OpLoad, t.Elem())
 12644  		v0.AddArg2(ptr, mem)
 12645  		v.AddArg(v0)
 12646  		return true
 12647  	}
 12648  	return false
 12649  }
 12650  func rewriteValuegeneric_OpLsh16x16(v *Value) bool {
 12651  	v_1 := v.Args[1]
 12652  	v_0 := v.Args[0]
 12653  	b := v.Block
 12654  	// match: (Lsh16x16 <t> x (Const16 [c]))
 12655  	// result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))]))
 12656  	for {
 12657  		t := v.Type
 12658  		x := v_0
 12659  		if v_1.Op != OpConst16 {
 12660  			break
 12661  		}
 12662  		c := auxIntToInt16(v_1.AuxInt)
 12663  		v.reset(OpLsh16x64)
 12664  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12665  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 12666  		v.AddArg2(x, v0)
 12667  		return true
 12668  	}
 12669  	// match: (Lsh16x16 (Const16 [0]) _)
 12670  	// result: (Const16 [0])
 12671  	for {
 12672  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 12673  			break
 12674  		}
 12675  		v.reset(OpConst16)
 12676  		v.AuxInt = int16ToAuxInt(0)
 12677  		return true
 12678  	}
 12679  	return false
 12680  }
 12681  func rewriteValuegeneric_OpLsh16x32(v *Value) bool {
 12682  	v_1 := v.Args[1]
 12683  	v_0 := v.Args[0]
 12684  	b := v.Block
 12685  	// match: (Lsh16x32 <t> x (Const32 [c]))
 12686  	// result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))]))
 12687  	for {
 12688  		t := v.Type
 12689  		x := v_0
 12690  		if v_1.Op != OpConst32 {
 12691  			break
 12692  		}
 12693  		c := auxIntToInt32(v_1.AuxInt)
 12694  		v.reset(OpLsh16x64)
 12695  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12696  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 12697  		v.AddArg2(x, v0)
 12698  		return true
 12699  	}
 12700  	// match: (Lsh16x32 (Const16 [0]) _)
 12701  	// result: (Const16 [0])
 12702  	for {
 12703  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 12704  			break
 12705  		}
 12706  		v.reset(OpConst16)
 12707  		v.AuxInt = int16ToAuxInt(0)
 12708  		return true
 12709  	}
 12710  	return false
 12711  }
 12712  func rewriteValuegeneric_OpLsh16x64(v *Value) bool {
 12713  	v_1 := v.Args[1]
 12714  	v_0 := v.Args[0]
 12715  	b := v.Block
 12716  	typ := &b.Func.Config.Types
 12717  	// match: (Lsh16x64 (Const16 [c]) (Const64 [d]))
 12718  	// result: (Const16 [c << uint64(d)])
 12719  	for {
 12720  		if v_0.Op != OpConst16 {
 12721  			break
 12722  		}
 12723  		c := auxIntToInt16(v_0.AuxInt)
 12724  		if v_1.Op != OpConst64 {
 12725  			break
 12726  		}
 12727  		d := auxIntToInt64(v_1.AuxInt)
 12728  		v.reset(OpConst16)
 12729  		v.AuxInt = int16ToAuxInt(c << uint64(d))
 12730  		return true
 12731  	}
 12732  	// match: (Lsh16x64 x (Const64 [0]))
 12733  	// result: x
 12734  	for {
 12735  		x := v_0
 12736  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 12737  			break
 12738  		}
 12739  		v.copyOf(x)
 12740  		return true
 12741  	}
 12742  	// match: (Lsh16x64 (Const16 [0]) _)
 12743  	// result: (Const16 [0])
 12744  	for {
 12745  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 12746  			break
 12747  		}
 12748  		v.reset(OpConst16)
 12749  		v.AuxInt = int16ToAuxInt(0)
 12750  		return true
 12751  	}
 12752  	// match: (Lsh16x64 _ (Const64 [c]))
 12753  	// cond: uint64(c) >= 16
 12754  	// result: (Const16 [0])
 12755  	for {
 12756  		if v_1.Op != OpConst64 {
 12757  			break
 12758  		}
 12759  		c := auxIntToInt64(v_1.AuxInt)
 12760  		if !(uint64(c) >= 16) {
 12761  			break
 12762  		}
 12763  		v.reset(OpConst16)
 12764  		v.AuxInt = int16ToAuxInt(0)
 12765  		return true
 12766  	}
 12767  	// match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d]))
 12768  	// cond: !uaddOvf(c,d)
 12769  	// result: (Lsh16x64 x (Const64 <t> [c+d]))
 12770  	for {
 12771  		t := v.Type
 12772  		if v_0.Op != OpLsh16x64 {
 12773  			break
 12774  		}
 12775  		_ = v_0.Args[1]
 12776  		x := v_0.Args[0]
 12777  		v_0_1 := v_0.Args[1]
 12778  		if v_0_1.Op != OpConst64 {
 12779  			break
 12780  		}
 12781  		c := auxIntToInt64(v_0_1.AuxInt)
 12782  		if v_1.Op != OpConst64 {
 12783  			break
 12784  		}
 12785  		d := auxIntToInt64(v_1.AuxInt)
 12786  		if !(!uaddOvf(c, d)) {
 12787  			break
 12788  		}
 12789  		v.reset(OpLsh16x64)
 12790  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12791  		v0.AuxInt = int64ToAuxInt(c + d)
 12792  		v.AddArg2(x, v0)
 12793  		return true
 12794  	}
 12795  	// match: (Lsh16x64 i:(Rsh16x64 x (Const64 [c])) (Const64 [c]))
 12796  	// cond: c >= 0 && c < 16 && i.Uses == 1
 12797  	// result: (And16 x (Const16 <v.Type> [int16(-1) << c]))
 12798  	for {
 12799  		i := v_0
 12800  		if i.Op != OpRsh16x64 {
 12801  			break
 12802  		}
 12803  		_ = i.Args[1]
 12804  		x := i.Args[0]
 12805  		i_1 := i.Args[1]
 12806  		if i_1.Op != OpConst64 {
 12807  			break
 12808  		}
 12809  		c := auxIntToInt64(i_1.AuxInt)
 12810  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
 12811  			break
 12812  		}
 12813  		v.reset(OpAnd16)
 12814  		v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
 12815  		v0.AuxInt = int16ToAuxInt(int16(-1) << c)
 12816  		v.AddArg2(x, v0)
 12817  		return true
 12818  	}
 12819  	// match: (Lsh16x64 i:(Rsh16Ux64 x (Const64 [c])) (Const64 [c]))
 12820  	// cond: c >= 0 && c < 16 && i.Uses == 1
 12821  	// result: (And16 x (Const16 <v.Type> [int16(-1) << c]))
 12822  	for {
 12823  		i := v_0
 12824  		if i.Op != OpRsh16Ux64 {
 12825  			break
 12826  		}
 12827  		_ = i.Args[1]
 12828  		x := i.Args[0]
 12829  		i_1 := i.Args[1]
 12830  		if i_1.Op != OpConst64 {
 12831  			break
 12832  		}
 12833  		c := auxIntToInt64(i_1.AuxInt)
 12834  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
 12835  			break
 12836  		}
 12837  		v.reset(OpAnd16)
 12838  		v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
 12839  		v0.AuxInt = int16ToAuxInt(int16(-1) << c)
 12840  		v.AddArg2(x, v0)
 12841  		return true
 12842  	}
 12843  	// match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 12844  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 12845  	// result: (Lsh16x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 12846  	for {
 12847  		if v_0.Op != OpRsh16Ux64 {
 12848  			break
 12849  		}
 12850  		_ = v_0.Args[1]
 12851  		v_0_0 := v_0.Args[0]
 12852  		if v_0_0.Op != OpLsh16x64 {
 12853  			break
 12854  		}
 12855  		_ = v_0_0.Args[1]
 12856  		x := v_0_0.Args[0]
 12857  		v_0_0_1 := v_0_0.Args[1]
 12858  		if v_0_0_1.Op != OpConst64 {
 12859  			break
 12860  		}
 12861  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 12862  		v_0_1 := v_0.Args[1]
 12863  		if v_0_1.Op != OpConst64 {
 12864  			break
 12865  		}
 12866  		c2 := auxIntToInt64(v_0_1.AuxInt)
 12867  		if v_1.Op != OpConst64 {
 12868  			break
 12869  		}
 12870  		c3 := auxIntToInt64(v_1.AuxInt)
 12871  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 12872  			break
 12873  		}
 12874  		v.reset(OpLsh16x64)
 12875  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 12876  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 12877  		v.AddArg2(x, v0)
 12878  		return true
 12879  	}
 12880  	return false
 12881  }
 12882  func rewriteValuegeneric_OpLsh16x8(v *Value) bool {
 12883  	v_1 := v.Args[1]
 12884  	v_0 := v.Args[0]
 12885  	b := v.Block
 12886  	// match: (Lsh16x8 <t> x (Const8 [c]))
 12887  	// result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))]))
 12888  	for {
 12889  		t := v.Type
 12890  		x := v_0
 12891  		if v_1.Op != OpConst8 {
 12892  			break
 12893  		}
 12894  		c := auxIntToInt8(v_1.AuxInt)
 12895  		v.reset(OpLsh16x64)
 12896  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12897  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 12898  		v.AddArg2(x, v0)
 12899  		return true
 12900  	}
 12901  	// match: (Lsh16x8 (Const16 [0]) _)
 12902  	// result: (Const16 [0])
 12903  	for {
 12904  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 12905  			break
 12906  		}
 12907  		v.reset(OpConst16)
 12908  		v.AuxInt = int16ToAuxInt(0)
 12909  		return true
 12910  	}
 12911  	return false
 12912  }
 12913  func rewriteValuegeneric_OpLsh32x16(v *Value) bool {
 12914  	v_1 := v.Args[1]
 12915  	v_0 := v.Args[0]
 12916  	b := v.Block
 12917  	// match: (Lsh32x16 <t> x (Const16 [c]))
 12918  	// result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))]))
 12919  	for {
 12920  		t := v.Type
 12921  		x := v_0
 12922  		if v_1.Op != OpConst16 {
 12923  			break
 12924  		}
 12925  		c := auxIntToInt16(v_1.AuxInt)
 12926  		v.reset(OpLsh32x64)
 12927  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12928  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 12929  		v.AddArg2(x, v0)
 12930  		return true
 12931  	}
 12932  	// match: (Lsh32x16 (Const32 [0]) _)
 12933  	// result: (Const32 [0])
 12934  	for {
 12935  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 12936  			break
 12937  		}
 12938  		v.reset(OpConst32)
 12939  		v.AuxInt = int32ToAuxInt(0)
 12940  		return true
 12941  	}
 12942  	return false
 12943  }
 12944  func rewriteValuegeneric_OpLsh32x32(v *Value) bool {
 12945  	v_1 := v.Args[1]
 12946  	v_0 := v.Args[0]
 12947  	b := v.Block
 12948  	// match: (Lsh32x32 <t> x (Const32 [c]))
 12949  	// result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))]))
 12950  	for {
 12951  		t := v.Type
 12952  		x := v_0
 12953  		if v_1.Op != OpConst32 {
 12954  			break
 12955  		}
 12956  		c := auxIntToInt32(v_1.AuxInt)
 12957  		v.reset(OpLsh32x64)
 12958  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12959  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 12960  		v.AddArg2(x, v0)
 12961  		return true
 12962  	}
 12963  	// match: (Lsh32x32 (Const32 [0]) _)
 12964  	// result: (Const32 [0])
 12965  	for {
 12966  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 12967  			break
 12968  		}
 12969  		v.reset(OpConst32)
 12970  		v.AuxInt = int32ToAuxInt(0)
 12971  		return true
 12972  	}
 12973  	return false
 12974  }
 12975  func rewriteValuegeneric_OpLsh32x64(v *Value) bool {
 12976  	v_1 := v.Args[1]
 12977  	v_0 := v.Args[0]
 12978  	b := v.Block
 12979  	typ := &b.Func.Config.Types
 12980  	// match: (Lsh32x64 (Const32 [c]) (Const64 [d]))
 12981  	// result: (Const32 [c << uint64(d)])
 12982  	for {
 12983  		if v_0.Op != OpConst32 {
 12984  			break
 12985  		}
 12986  		c := auxIntToInt32(v_0.AuxInt)
 12987  		if v_1.Op != OpConst64 {
 12988  			break
 12989  		}
 12990  		d := auxIntToInt64(v_1.AuxInt)
 12991  		v.reset(OpConst32)
 12992  		v.AuxInt = int32ToAuxInt(c << uint64(d))
 12993  		return true
 12994  	}
 12995  	// match: (Lsh32x64 x (Const64 [0]))
 12996  	// result: x
 12997  	for {
 12998  		x := v_0
 12999  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 13000  			break
 13001  		}
 13002  		v.copyOf(x)
 13003  		return true
 13004  	}
 13005  	// match: (Lsh32x64 (Const32 [0]) _)
 13006  	// result: (Const32 [0])
 13007  	for {
 13008  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 13009  			break
 13010  		}
 13011  		v.reset(OpConst32)
 13012  		v.AuxInt = int32ToAuxInt(0)
 13013  		return true
 13014  	}
 13015  	// match: (Lsh32x64 _ (Const64 [c]))
 13016  	// cond: uint64(c) >= 32
 13017  	// result: (Const32 [0])
 13018  	for {
 13019  		if v_1.Op != OpConst64 {
 13020  			break
 13021  		}
 13022  		c := auxIntToInt64(v_1.AuxInt)
 13023  		if !(uint64(c) >= 32) {
 13024  			break
 13025  		}
 13026  		v.reset(OpConst32)
 13027  		v.AuxInt = int32ToAuxInt(0)
 13028  		return true
 13029  	}
 13030  	// match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d]))
 13031  	// cond: !uaddOvf(c,d)
 13032  	// result: (Lsh32x64 x (Const64 <t> [c+d]))
 13033  	for {
 13034  		t := v.Type
 13035  		if v_0.Op != OpLsh32x64 {
 13036  			break
 13037  		}
 13038  		_ = v_0.Args[1]
 13039  		x := v_0.Args[0]
 13040  		v_0_1 := v_0.Args[1]
 13041  		if v_0_1.Op != OpConst64 {
 13042  			break
 13043  		}
 13044  		c := auxIntToInt64(v_0_1.AuxInt)
 13045  		if v_1.Op != OpConst64 {
 13046  			break
 13047  		}
 13048  		d := auxIntToInt64(v_1.AuxInt)
 13049  		if !(!uaddOvf(c, d)) {
 13050  			break
 13051  		}
 13052  		v.reset(OpLsh32x64)
 13053  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13054  		v0.AuxInt = int64ToAuxInt(c + d)
 13055  		v.AddArg2(x, v0)
 13056  		return true
 13057  	}
 13058  	// match: (Lsh32x64 i:(Rsh32x64 x (Const64 [c])) (Const64 [c]))
 13059  	// cond: c >= 0 && c < 32 && i.Uses == 1
 13060  	// result: (And32 x (Const32 <v.Type> [int32(-1) << c]))
 13061  	for {
 13062  		i := v_0
 13063  		if i.Op != OpRsh32x64 {
 13064  			break
 13065  		}
 13066  		_ = i.Args[1]
 13067  		x := i.Args[0]
 13068  		i_1 := i.Args[1]
 13069  		if i_1.Op != OpConst64 {
 13070  			break
 13071  		}
 13072  		c := auxIntToInt64(i_1.AuxInt)
 13073  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
 13074  			break
 13075  		}
 13076  		v.reset(OpAnd32)
 13077  		v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
 13078  		v0.AuxInt = int32ToAuxInt(int32(-1) << c)
 13079  		v.AddArg2(x, v0)
 13080  		return true
 13081  	}
 13082  	// match: (Lsh32x64 i:(Rsh32Ux64 x (Const64 [c])) (Const64 [c]))
 13083  	// cond: c >= 0 && c < 32 && i.Uses == 1
 13084  	// result: (And32 x (Const32 <v.Type> [int32(-1) << c]))
 13085  	for {
 13086  		i := v_0
 13087  		if i.Op != OpRsh32Ux64 {
 13088  			break
 13089  		}
 13090  		_ = i.Args[1]
 13091  		x := i.Args[0]
 13092  		i_1 := i.Args[1]
 13093  		if i_1.Op != OpConst64 {
 13094  			break
 13095  		}
 13096  		c := auxIntToInt64(i_1.AuxInt)
 13097  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
 13098  			break
 13099  		}
 13100  		v.reset(OpAnd32)
 13101  		v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
 13102  		v0.AuxInt = int32ToAuxInt(int32(-1) << c)
 13103  		v.AddArg2(x, v0)
 13104  		return true
 13105  	}
 13106  	// match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 13107  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 13108  	// result: (Lsh32x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 13109  	for {
 13110  		if v_0.Op != OpRsh32Ux64 {
 13111  			break
 13112  		}
 13113  		_ = v_0.Args[1]
 13114  		v_0_0 := v_0.Args[0]
 13115  		if v_0_0.Op != OpLsh32x64 {
 13116  			break
 13117  		}
 13118  		_ = v_0_0.Args[1]
 13119  		x := v_0_0.Args[0]
 13120  		v_0_0_1 := v_0_0.Args[1]
 13121  		if v_0_0_1.Op != OpConst64 {
 13122  			break
 13123  		}
 13124  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 13125  		v_0_1 := v_0.Args[1]
 13126  		if v_0_1.Op != OpConst64 {
 13127  			break
 13128  		}
 13129  		c2 := auxIntToInt64(v_0_1.AuxInt)
 13130  		if v_1.Op != OpConst64 {
 13131  			break
 13132  		}
 13133  		c3 := auxIntToInt64(v_1.AuxInt)
 13134  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 13135  			break
 13136  		}
 13137  		v.reset(OpLsh32x64)
 13138  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 13139  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 13140  		v.AddArg2(x, v0)
 13141  		return true
 13142  	}
 13143  	return false
 13144  }
 13145  func rewriteValuegeneric_OpLsh32x8(v *Value) bool {
 13146  	v_1 := v.Args[1]
 13147  	v_0 := v.Args[0]
 13148  	b := v.Block
 13149  	// match: (Lsh32x8 <t> x (Const8 [c]))
 13150  	// result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))]))
 13151  	for {
 13152  		t := v.Type
 13153  		x := v_0
 13154  		if v_1.Op != OpConst8 {
 13155  			break
 13156  		}
 13157  		c := auxIntToInt8(v_1.AuxInt)
 13158  		v.reset(OpLsh32x64)
 13159  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13160  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 13161  		v.AddArg2(x, v0)
 13162  		return true
 13163  	}
 13164  	// match: (Lsh32x8 (Const32 [0]) _)
 13165  	// result: (Const32 [0])
 13166  	for {
 13167  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 13168  			break
 13169  		}
 13170  		v.reset(OpConst32)
 13171  		v.AuxInt = int32ToAuxInt(0)
 13172  		return true
 13173  	}
 13174  	return false
 13175  }
 13176  func rewriteValuegeneric_OpLsh64x16(v *Value) bool {
 13177  	v_1 := v.Args[1]
 13178  	v_0 := v.Args[0]
 13179  	b := v.Block
 13180  	// match: (Lsh64x16 <t> x (Const16 [c]))
 13181  	// result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))]))
 13182  	for {
 13183  		t := v.Type
 13184  		x := v_0
 13185  		if v_1.Op != OpConst16 {
 13186  			break
 13187  		}
 13188  		c := auxIntToInt16(v_1.AuxInt)
 13189  		v.reset(OpLsh64x64)
 13190  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13191  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 13192  		v.AddArg2(x, v0)
 13193  		return true
 13194  	}
 13195  	// match: (Lsh64x16 (Const64 [0]) _)
 13196  	// result: (Const64 [0])
 13197  	for {
 13198  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 13199  			break
 13200  		}
 13201  		v.reset(OpConst64)
 13202  		v.AuxInt = int64ToAuxInt(0)
 13203  		return true
 13204  	}
 13205  	return false
 13206  }
 13207  func rewriteValuegeneric_OpLsh64x32(v *Value) bool {
 13208  	v_1 := v.Args[1]
 13209  	v_0 := v.Args[0]
 13210  	b := v.Block
 13211  	// match: (Lsh64x32 <t> x (Const32 [c]))
 13212  	// result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))]))
 13213  	for {
 13214  		t := v.Type
 13215  		x := v_0
 13216  		if v_1.Op != OpConst32 {
 13217  			break
 13218  		}
 13219  		c := auxIntToInt32(v_1.AuxInt)
 13220  		v.reset(OpLsh64x64)
 13221  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13222  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 13223  		v.AddArg2(x, v0)
 13224  		return true
 13225  	}
 13226  	// match: (Lsh64x32 (Const64 [0]) _)
 13227  	// result: (Const64 [0])
 13228  	for {
 13229  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 13230  			break
 13231  		}
 13232  		v.reset(OpConst64)
 13233  		v.AuxInt = int64ToAuxInt(0)
 13234  		return true
 13235  	}
 13236  	return false
 13237  }
 13238  func rewriteValuegeneric_OpLsh64x64(v *Value) bool {
 13239  	v_1 := v.Args[1]
 13240  	v_0 := v.Args[0]
 13241  	b := v.Block
 13242  	typ := &b.Func.Config.Types
 13243  	// match: (Lsh64x64 (Const64 [c]) (Const64 [d]))
 13244  	// result: (Const64 [c << uint64(d)])
 13245  	for {
 13246  		if v_0.Op != OpConst64 {
 13247  			break
 13248  		}
 13249  		c := auxIntToInt64(v_0.AuxInt)
 13250  		if v_1.Op != OpConst64 {
 13251  			break
 13252  		}
 13253  		d := auxIntToInt64(v_1.AuxInt)
 13254  		v.reset(OpConst64)
 13255  		v.AuxInt = int64ToAuxInt(c << uint64(d))
 13256  		return true
 13257  	}
 13258  	// match: (Lsh64x64 x (Const64 [0]))
 13259  	// result: x
 13260  	for {
 13261  		x := v_0
 13262  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 13263  			break
 13264  		}
 13265  		v.copyOf(x)
 13266  		return true
 13267  	}
 13268  	// match: (Lsh64x64 (Const64 [0]) _)
 13269  	// result: (Const64 [0])
 13270  	for {
 13271  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 13272  			break
 13273  		}
 13274  		v.reset(OpConst64)
 13275  		v.AuxInt = int64ToAuxInt(0)
 13276  		return true
 13277  	}
 13278  	// match: (Lsh64x64 _ (Const64 [c]))
 13279  	// cond: uint64(c) >= 64
 13280  	// result: (Const64 [0])
 13281  	for {
 13282  		if v_1.Op != OpConst64 {
 13283  			break
 13284  		}
 13285  		c := auxIntToInt64(v_1.AuxInt)
 13286  		if !(uint64(c) >= 64) {
 13287  			break
 13288  		}
 13289  		v.reset(OpConst64)
 13290  		v.AuxInt = int64ToAuxInt(0)
 13291  		return true
 13292  	}
 13293  	// match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d]))
 13294  	// cond: !uaddOvf(c,d)
 13295  	// result: (Lsh64x64 x (Const64 <t> [c+d]))
 13296  	for {
 13297  		t := v.Type
 13298  		if v_0.Op != OpLsh64x64 {
 13299  			break
 13300  		}
 13301  		_ = v_0.Args[1]
 13302  		x := v_0.Args[0]
 13303  		v_0_1 := v_0.Args[1]
 13304  		if v_0_1.Op != OpConst64 {
 13305  			break
 13306  		}
 13307  		c := auxIntToInt64(v_0_1.AuxInt)
 13308  		if v_1.Op != OpConst64 {
 13309  			break
 13310  		}
 13311  		d := auxIntToInt64(v_1.AuxInt)
 13312  		if !(!uaddOvf(c, d)) {
 13313  			break
 13314  		}
 13315  		v.reset(OpLsh64x64)
 13316  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13317  		v0.AuxInt = int64ToAuxInt(c + d)
 13318  		v.AddArg2(x, v0)
 13319  		return true
 13320  	}
 13321  	// match: (Lsh64x64 i:(Rsh64x64 x (Const64 [c])) (Const64 [c]))
 13322  	// cond: c >= 0 && c < 64 && i.Uses == 1
 13323  	// result: (And64 x (Const64 <v.Type> [int64(-1) << c]))
 13324  	for {
 13325  		i := v_0
 13326  		if i.Op != OpRsh64x64 {
 13327  			break
 13328  		}
 13329  		_ = i.Args[1]
 13330  		x := i.Args[0]
 13331  		i_1 := i.Args[1]
 13332  		if i_1.Op != OpConst64 {
 13333  			break
 13334  		}
 13335  		c := auxIntToInt64(i_1.AuxInt)
 13336  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
 13337  			break
 13338  		}
 13339  		v.reset(OpAnd64)
 13340  		v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
 13341  		v0.AuxInt = int64ToAuxInt(int64(-1) << c)
 13342  		v.AddArg2(x, v0)
 13343  		return true
 13344  	}
 13345  	// match: (Lsh64x64 i:(Rsh64Ux64 x (Const64 [c])) (Const64 [c]))
 13346  	// cond: c >= 0 && c < 64 && i.Uses == 1
 13347  	// result: (And64 x (Const64 <v.Type> [int64(-1) << c]))
 13348  	for {
 13349  		i := v_0
 13350  		if i.Op != OpRsh64Ux64 {
 13351  			break
 13352  		}
 13353  		_ = i.Args[1]
 13354  		x := i.Args[0]
 13355  		i_1 := i.Args[1]
 13356  		if i_1.Op != OpConst64 {
 13357  			break
 13358  		}
 13359  		c := auxIntToInt64(i_1.AuxInt)
 13360  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
 13361  			break
 13362  		}
 13363  		v.reset(OpAnd64)
 13364  		v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
 13365  		v0.AuxInt = int64ToAuxInt(int64(-1) << c)
 13366  		v.AddArg2(x, v0)
 13367  		return true
 13368  	}
 13369  	// match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 13370  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 13371  	// result: (Lsh64x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 13372  	for {
 13373  		if v_0.Op != OpRsh64Ux64 {
 13374  			break
 13375  		}
 13376  		_ = v_0.Args[1]
 13377  		v_0_0 := v_0.Args[0]
 13378  		if v_0_0.Op != OpLsh64x64 {
 13379  			break
 13380  		}
 13381  		_ = v_0_0.Args[1]
 13382  		x := v_0_0.Args[0]
 13383  		v_0_0_1 := v_0_0.Args[1]
 13384  		if v_0_0_1.Op != OpConst64 {
 13385  			break
 13386  		}
 13387  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 13388  		v_0_1 := v_0.Args[1]
 13389  		if v_0_1.Op != OpConst64 {
 13390  			break
 13391  		}
 13392  		c2 := auxIntToInt64(v_0_1.AuxInt)
 13393  		if v_1.Op != OpConst64 {
 13394  			break
 13395  		}
 13396  		c3 := auxIntToInt64(v_1.AuxInt)
 13397  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 13398  			break
 13399  		}
 13400  		v.reset(OpLsh64x64)
 13401  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 13402  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 13403  		v.AddArg2(x, v0)
 13404  		return true
 13405  	}
 13406  	return false
 13407  }
 13408  func rewriteValuegeneric_OpLsh64x8(v *Value) bool {
 13409  	v_1 := v.Args[1]
 13410  	v_0 := v.Args[0]
 13411  	b := v.Block
 13412  	// match: (Lsh64x8 <t> x (Const8 [c]))
 13413  	// result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))]))
 13414  	for {
 13415  		t := v.Type
 13416  		x := v_0
 13417  		if v_1.Op != OpConst8 {
 13418  			break
 13419  		}
 13420  		c := auxIntToInt8(v_1.AuxInt)
 13421  		v.reset(OpLsh64x64)
 13422  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13423  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 13424  		v.AddArg2(x, v0)
 13425  		return true
 13426  	}
 13427  	// match: (Lsh64x8 (Const64 [0]) _)
 13428  	// result: (Const64 [0])
 13429  	for {
 13430  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 13431  			break
 13432  		}
 13433  		v.reset(OpConst64)
 13434  		v.AuxInt = int64ToAuxInt(0)
 13435  		return true
 13436  	}
 13437  	return false
 13438  }
 13439  func rewriteValuegeneric_OpLsh8x16(v *Value) bool {
 13440  	v_1 := v.Args[1]
 13441  	v_0 := v.Args[0]
 13442  	b := v.Block
 13443  	// match: (Lsh8x16 <t> x (Const16 [c]))
 13444  	// result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))]))
 13445  	for {
 13446  		t := v.Type
 13447  		x := v_0
 13448  		if v_1.Op != OpConst16 {
 13449  			break
 13450  		}
 13451  		c := auxIntToInt16(v_1.AuxInt)
 13452  		v.reset(OpLsh8x64)
 13453  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13454  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 13455  		v.AddArg2(x, v0)
 13456  		return true
 13457  	}
 13458  	// match: (Lsh8x16 (Const8 [0]) _)
 13459  	// result: (Const8 [0])
 13460  	for {
 13461  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 13462  			break
 13463  		}
 13464  		v.reset(OpConst8)
 13465  		v.AuxInt = int8ToAuxInt(0)
 13466  		return true
 13467  	}
 13468  	return false
 13469  }
 13470  func rewriteValuegeneric_OpLsh8x32(v *Value) bool {
 13471  	v_1 := v.Args[1]
 13472  	v_0 := v.Args[0]
 13473  	b := v.Block
 13474  	// match: (Lsh8x32 <t> x (Const32 [c]))
 13475  	// result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))]))
 13476  	for {
 13477  		t := v.Type
 13478  		x := v_0
 13479  		if v_1.Op != OpConst32 {
 13480  			break
 13481  		}
 13482  		c := auxIntToInt32(v_1.AuxInt)
 13483  		v.reset(OpLsh8x64)
 13484  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13485  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 13486  		v.AddArg2(x, v0)
 13487  		return true
 13488  	}
 13489  	// match: (Lsh8x32 (Const8 [0]) _)
 13490  	// result: (Const8 [0])
 13491  	for {
 13492  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 13493  			break
 13494  		}
 13495  		v.reset(OpConst8)
 13496  		v.AuxInt = int8ToAuxInt(0)
 13497  		return true
 13498  	}
 13499  	return false
 13500  }
 13501  func rewriteValuegeneric_OpLsh8x64(v *Value) bool {
 13502  	v_1 := v.Args[1]
 13503  	v_0 := v.Args[0]
 13504  	b := v.Block
 13505  	typ := &b.Func.Config.Types
 13506  	// match: (Lsh8x64 (Const8 [c]) (Const64 [d]))
 13507  	// result: (Const8 [c << uint64(d)])
 13508  	for {
 13509  		if v_0.Op != OpConst8 {
 13510  			break
 13511  		}
 13512  		c := auxIntToInt8(v_0.AuxInt)
 13513  		if v_1.Op != OpConst64 {
 13514  			break
 13515  		}
 13516  		d := auxIntToInt64(v_1.AuxInt)
 13517  		v.reset(OpConst8)
 13518  		v.AuxInt = int8ToAuxInt(c << uint64(d))
 13519  		return true
 13520  	}
 13521  	// match: (Lsh8x64 x (Const64 [0]))
 13522  	// result: x
 13523  	for {
 13524  		x := v_0
 13525  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 13526  			break
 13527  		}
 13528  		v.copyOf(x)
 13529  		return true
 13530  	}
 13531  	// match: (Lsh8x64 (Const8 [0]) _)
 13532  	// result: (Const8 [0])
 13533  	for {
 13534  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 13535  			break
 13536  		}
 13537  		v.reset(OpConst8)
 13538  		v.AuxInt = int8ToAuxInt(0)
 13539  		return true
 13540  	}
 13541  	// match: (Lsh8x64 _ (Const64 [c]))
 13542  	// cond: uint64(c) >= 8
 13543  	// result: (Const8 [0])
 13544  	for {
 13545  		if v_1.Op != OpConst64 {
 13546  			break
 13547  		}
 13548  		c := auxIntToInt64(v_1.AuxInt)
 13549  		if !(uint64(c) >= 8) {
 13550  			break
 13551  		}
 13552  		v.reset(OpConst8)
 13553  		v.AuxInt = int8ToAuxInt(0)
 13554  		return true
 13555  	}
 13556  	// match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d]))
 13557  	// cond: !uaddOvf(c,d)
 13558  	// result: (Lsh8x64 x (Const64 <t> [c+d]))
 13559  	for {
 13560  		t := v.Type
 13561  		if v_0.Op != OpLsh8x64 {
 13562  			break
 13563  		}
 13564  		_ = v_0.Args[1]
 13565  		x := v_0.Args[0]
 13566  		v_0_1 := v_0.Args[1]
 13567  		if v_0_1.Op != OpConst64 {
 13568  			break
 13569  		}
 13570  		c := auxIntToInt64(v_0_1.AuxInt)
 13571  		if v_1.Op != OpConst64 {
 13572  			break
 13573  		}
 13574  		d := auxIntToInt64(v_1.AuxInt)
 13575  		if !(!uaddOvf(c, d)) {
 13576  			break
 13577  		}
 13578  		v.reset(OpLsh8x64)
 13579  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13580  		v0.AuxInt = int64ToAuxInt(c + d)
 13581  		v.AddArg2(x, v0)
 13582  		return true
 13583  	}
 13584  	// match: (Lsh8x64 i:(Rsh8x64 x (Const64 [c])) (Const64 [c]))
 13585  	// cond: c >= 0 && c < 8 && i.Uses == 1
 13586  	// result: (And8 x (Const8 <v.Type> [int8(-1) << c]))
 13587  	for {
 13588  		i := v_0
 13589  		if i.Op != OpRsh8x64 {
 13590  			break
 13591  		}
 13592  		_ = i.Args[1]
 13593  		x := i.Args[0]
 13594  		i_1 := i.Args[1]
 13595  		if i_1.Op != OpConst64 {
 13596  			break
 13597  		}
 13598  		c := auxIntToInt64(i_1.AuxInt)
 13599  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
 13600  			break
 13601  		}
 13602  		v.reset(OpAnd8)
 13603  		v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
 13604  		v0.AuxInt = int8ToAuxInt(int8(-1) << c)
 13605  		v.AddArg2(x, v0)
 13606  		return true
 13607  	}
 13608  	// match: (Lsh8x64 i:(Rsh8Ux64 x (Const64 [c])) (Const64 [c]))
 13609  	// cond: c >= 0 && c < 8 && i.Uses == 1
 13610  	// result: (And8 x (Const8 <v.Type> [int8(-1) << c]))
 13611  	for {
 13612  		i := v_0
 13613  		if i.Op != OpRsh8Ux64 {
 13614  			break
 13615  		}
 13616  		_ = i.Args[1]
 13617  		x := i.Args[0]
 13618  		i_1 := i.Args[1]
 13619  		if i_1.Op != OpConst64 {
 13620  			break
 13621  		}
 13622  		c := auxIntToInt64(i_1.AuxInt)
 13623  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
 13624  			break
 13625  		}
 13626  		v.reset(OpAnd8)
 13627  		v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
 13628  		v0.AuxInt = int8ToAuxInt(int8(-1) << c)
 13629  		v.AddArg2(x, v0)
 13630  		return true
 13631  	}
 13632  	// match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 13633  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 13634  	// result: (Lsh8x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 13635  	for {
 13636  		if v_0.Op != OpRsh8Ux64 {
 13637  			break
 13638  		}
 13639  		_ = v_0.Args[1]
 13640  		v_0_0 := v_0.Args[0]
 13641  		if v_0_0.Op != OpLsh8x64 {
 13642  			break
 13643  		}
 13644  		_ = v_0_0.Args[1]
 13645  		x := v_0_0.Args[0]
 13646  		v_0_0_1 := v_0_0.Args[1]
 13647  		if v_0_0_1.Op != OpConst64 {
 13648  			break
 13649  		}
 13650  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 13651  		v_0_1 := v_0.Args[1]
 13652  		if v_0_1.Op != OpConst64 {
 13653  			break
 13654  		}
 13655  		c2 := auxIntToInt64(v_0_1.AuxInt)
 13656  		if v_1.Op != OpConst64 {
 13657  			break
 13658  		}
 13659  		c3 := auxIntToInt64(v_1.AuxInt)
 13660  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 13661  			break
 13662  		}
 13663  		v.reset(OpLsh8x64)
 13664  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 13665  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 13666  		v.AddArg2(x, v0)
 13667  		return true
 13668  	}
 13669  	return false
 13670  }
 13671  func rewriteValuegeneric_OpLsh8x8(v *Value) bool {
 13672  	v_1 := v.Args[1]
 13673  	v_0 := v.Args[0]
 13674  	b := v.Block
 13675  	// match: (Lsh8x8 <t> x (Const8 [c]))
 13676  	// result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))]))
 13677  	for {
 13678  		t := v.Type
 13679  		x := v_0
 13680  		if v_1.Op != OpConst8 {
 13681  			break
 13682  		}
 13683  		c := auxIntToInt8(v_1.AuxInt)
 13684  		v.reset(OpLsh8x64)
 13685  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13686  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 13687  		v.AddArg2(x, v0)
 13688  		return true
 13689  	}
 13690  	// match: (Lsh8x8 (Const8 [0]) _)
 13691  	// result: (Const8 [0])
 13692  	for {
 13693  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 13694  			break
 13695  		}
 13696  		v.reset(OpConst8)
 13697  		v.AuxInt = int8ToAuxInt(0)
 13698  		return true
 13699  	}
 13700  	return false
 13701  }
 13702  func rewriteValuegeneric_OpMod16(v *Value) bool {
 13703  	v_1 := v.Args[1]
 13704  	v_0 := v.Args[0]
 13705  	b := v.Block
 13706  	// match: (Mod16 (Const16 [c]) (Const16 [d]))
 13707  	// cond: d != 0
 13708  	// result: (Const16 [c % d])
 13709  	for {
 13710  		if v_0.Op != OpConst16 {
 13711  			break
 13712  		}
 13713  		c := auxIntToInt16(v_0.AuxInt)
 13714  		if v_1.Op != OpConst16 {
 13715  			break
 13716  		}
 13717  		d := auxIntToInt16(v_1.AuxInt)
 13718  		if !(d != 0) {
 13719  			break
 13720  		}
 13721  		v.reset(OpConst16)
 13722  		v.AuxInt = int16ToAuxInt(c % d)
 13723  		return true
 13724  	}
 13725  	// match: (Mod16 <t> n (Const16 [c]))
 13726  	// cond: isNonNegative(n) && isPowerOfTwo16(c)
 13727  	// result: (And16 n (Const16 <t> [c-1]))
 13728  	for {
 13729  		t := v.Type
 13730  		n := v_0
 13731  		if v_1.Op != OpConst16 {
 13732  			break
 13733  		}
 13734  		c := auxIntToInt16(v_1.AuxInt)
 13735  		if !(isNonNegative(n) && isPowerOfTwo16(c)) {
 13736  			break
 13737  		}
 13738  		v.reset(OpAnd16)
 13739  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 13740  		v0.AuxInt = int16ToAuxInt(c - 1)
 13741  		v.AddArg2(n, v0)
 13742  		return true
 13743  	}
 13744  	// match: (Mod16 <t> n (Const16 [c]))
 13745  	// cond: c < 0 && c != -1<<15
 13746  	// result: (Mod16 <t> n (Const16 <t> [-c]))
 13747  	for {
 13748  		t := v.Type
 13749  		n := v_0
 13750  		if v_1.Op != OpConst16 {
 13751  			break
 13752  		}
 13753  		c := auxIntToInt16(v_1.AuxInt)
 13754  		if !(c < 0 && c != -1<<15) {
 13755  			break
 13756  		}
 13757  		v.reset(OpMod16)
 13758  		v.Type = t
 13759  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 13760  		v0.AuxInt = int16ToAuxInt(-c)
 13761  		v.AddArg2(n, v0)
 13762  		return true
 13763  	}
 13764  	// match: (Mod16 <t> x (Const16 [c]))
 13765  	// cond: x.Op != OpConst16 && (c > 0 || c == -1<<15)
 13766  	// result: (Sub16 x (Mul16 <t> (Div16 <t> x (Const16 <t> [c])) (Const16 <t> [c])))
 13767  	for {
 13768  		t := v.Type
 13769  		x := v_0
 13770  		if v_1.Op != OpConst16 {
 13771  			break
 13772  		}
 13773  		c := auxIntToInt16(v_1.AuxInt)
 13774  		if !(x.Op != OpConst16 && (c > 0 || c == -1<<15)) {
 13775  			break
 13776  		}
 13777  		v.reset(OpSub16)
 13778  		v0 := b.NewValue0(v.Pos, OpMul16, t)
 13779  		v1 := b.NewValue0(v.Pos, OpDiv16, t)
 13780  		v2 := b.NewValue0(v.Pos, OpConst16, t)
 13781  		v2.AuxInt = int16ToAuxInt(c)
 13782  		v1.AddArg2(x, v2)
 13783  		v0.AddArg2(v1, v2)
 13784  		v.AddArg2(x, v0)
 13785  		return true
 13786  	}
 13787  	return false
 13788  }
 13789  func rewriteValuegeneric_OpMod16u(v *Value) bool {
 13790  	v_1 := v.Args[1]
 13791  	v_0 := v.Args[0]
 13792  	b := v.Block
 13793  	// match: (Mod16u (Const16 [c]) (Const16 [d]))
 13794  	// cond: d != 0
 13795  	// result: (Const16 [int16(uint16(c) % uint16(d))])
 13796  	for {
 13797  		if v_0.Op != OpConst16 {
 13798  			break
 13799  		}
 13800  		c := auxIntToInt16(v_0.AuxInt)
 13801  		if v_1.Op != OpConst16 {
 13802  			break
 13803  		}
 13804  		d := auxIntToInt16(v_1.AuxInt)
 13805  		if !(d != 0) {
 13806  			break
 13807  		}
 13808  		v.reset(OpConst16)
 13809  		v.AuxInt = int16ToAuxInt(int16(uint16(c) % uint16(d)))
 13810  		return true
 13811  	}
 13812  	// match: (Mod16u <t> n (Const16 [c]))
 13813  	// cond: isPowerOfTwo16(c)
 13814  	// result: (And16 n (Const16 <t> [c-1]))
 13815  	for {
 13816  		t := v.Type
 13817  		n := v_0
 13818  		if v_1.Op != OpConst16 {
 13819  			break
 13820  		}
 13821  		c := auxIntToInt16(v_1.AuxInt)
 13822  		if !(isPowerOfTwo16(c)) {
 13823  			break
 13824  		}
 13825  		v.reset(OpAnd16)
 13826  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 13827  		v0.AuxInt = int16ToAuxInt(c - 1)
 13828  		v.AddArg2(n, v0)
 13829  		return true
 13830  	}
 13831  	// match: (Mod16u <t> x (Const16 [c]))
 13832  	// cond: x.Op != OpConst16 && c > 0 && umagicOK16(c)
 13833  	// result: (Sub16 x (Mul16 <t> (Div16u <t> x (Const16 <t> [c])) (Const16 <t> [c])))
 13834  	for {
 13835  		t := v.Type
 13836  		x := v_0
 13837  		if v_1.Op != OpConst16 {
 13838  			break
 13839  		}
 13840  		c := auxIntToInt16(v_1.AuxInt)
 13841  		if !(x.Op != OpConst16 && c > 0 && umagicOK16(c)) {
 13842  			break
 13843  		}
 13844  		v.reset(OpSub16)
 13845  		v0 := b.NewValue0(v.Pos, OpMul16, t)
 13846  		v1 := b.NewValue0(v.Pos, OpDiv16u, t)
 13847  		v2 := b.NewValue0(v.Pos, OpConst16, t)
 13848  		v2.AuxInt = int16ToAuxInt(c)
 13849  		v1.AddArg2(x, v2)
 13850  		v0.AddArg2(v1, v2)
 13851  		v.AddArg2(x, v0)
 13852  		return true
 13853  	}
 13854  	return false
 13855  }
 13856  func rewriteValuegeneric_OpMod32(v *Value) bool {
 13857  	v_1 := v.Args[1]
 13858  	v_0 := v.Args[0]
 13859  	b := v.Block
 13860  	// match: (Mod32 (Const32 [c]) (Const32 [d]))
 13861  	// cond: d != 0
 13862  	// result: (Const32 [c % d])
 13863  	for {
 13864  		if v_0.Op != OpConst32 {
 13865  			break
 13866  		}
 13867  		c := auxIntToInt32(v_0.AuxInt)
 13868  		if v_1.Op != OpConst32 {
 13869  			break
 13870  		}
 13871  		d := auxIntToInt32(v_1.AuxInt)
 13872  		if !(d != 0) {
 13873  			break
 13874  		}
 13875  		v.reset(OpConst32)
 13876  		v.AuxInt = int32ToAuxInt(c % d)
 13877  		return true
 13878  	}
 13879  	// match: (Mod32 <t> n (Const32 [c]))
 13880  	// cond: isNonNegative(n) && isPowerOfTwo32(c)
 13881  	// result: (And32 n (Const32 <t> [c-1]))
 13882  	for {
 13883  		t := v.Type
 13884  		n := v_0
 13885  		if v_1.Op != OpConst32 {
 13886  			break
 13887  		}
 13888  		c := auxIntToInt32(v_1.AuxInt)
 13889  		if !(isNonNegative(n) && isPowerOfTwo32(c)) {
 13890  			break
 13891  		}
 13892  		v.reset(OpAnd32)
 13893  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 13894  		v0.AuxInt = int32ToAuxInt(c - 1)
 13895  		v.AddArg2(n, v0)
 13896  		return true
 13897  	}
 13898  	// match: (Mod32 <t> n (Const32 [c]))
 13899  	// cond: c < 0 && c != -1<<31
 13900  	// result: (Mod32 <t> n (Const32 <t> [-c]))
 13901  	for {
 13902  		t := v.Type
 13903  		n := v_0
 13904  		if v_1.Op != OpConst32 {
 13905  			break
 13906  		}
 13907  		c := auxIntToInt32(v_1.AuxInt)
 13908  		if !(c < 0 && c != -1<<31) {
 13909  			break
 13910  		}
 13911  		v.reset(OpMod32)
 13912  		v.Type = t
 13913  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 13914  		v0.AuxInt = int32ToAuxInt(-c)
 13915  		v.AddArg2(n, v0)
 13916  		return true
 13917  	}
 13918  	// match: (Mod32 <t> x (Const32 [c]))
 13919  	// cond: x.Op != OpConst32 && (c > 0 || c == -1<<31)
 13920  	// result: (Sub32 x (Mul32 <t> (Div32 <t> x (Const32 <t> [c])) (Const32 <t> [c])))
 13921  	for {
 13922  		t := v.Type
 13923  		x := v_0
 13924  		if v_1.Op != OpConst32 {
 13925  			break
 13926  		}
 13927  		c := auxIntToInt32(v_1.AuxInt)
 13928  		if !(x.Op != OpConst32 && (c > 0 || c == -1<<31)) {
 13929  			break
 13930  		}
 13931  		v.reset(OpSub32)
 13932  		v0 := b.NewValue0(v.Pos, OpMul32, t)
 13933  		v1 := b.NewValue0(v.Pos, OpDiv32, t)
 13934  		v2 := b.NewValue0(v.Pos, OpConst32, t)
 13935  		v2.AuxInt = int32ToAuxInt(c)
 13936  		v1.AddArg2(x, v2)
 13937  		v0.AddArg2(v1, v2)
 13938  		v.AddArg2(x, v0)
 13939  		return true
 13940  	}
 13941  	return false
 13942  }
 13943  func rewriteValuegeneric_OpMod32u(v *Value) bool {
 13944  	v_1 := v.Args[1]
 13945  	v_0 := v.Args[0]
 13946  	b := v.Block
 13947  	// match: (Mod32u (Const32 [c]) (Const32 [d]))
 13948  	// cond: d != 0
 13949  	// result: (Const32 [int32(uint32(c) % uint32(d))])
 13950  	for {
 13951  		if v_0.Op != OpConst32 {
 13952  			break
 13953  		}
 13954  		c := auxIntToInt32(v_0.AuxInt)
 13955  		if v_1.Op != OpConst32 {
 13956  			break
 13957  		}
 13958  		d := auxIntToInt32(v_1.AuxInt)
 13959  		if !(d != 0) {
 13960  			break
 13961  		}
 13962  		v.reset(OpConst32)
 13963  		v.AuxInt = int32ToAuxInt(int32(uint32(c) % uint32(d)))
 13964  		return true
 13965  	}
 13966  	// match: (Mod32u <t> n (Const32 [c]))
 13967  	// cond: isPowerOfTwo32(c)
 13968  	// result: (And32 n (Const32 <t> [c-1]))
 13969  	for {
 13970  		t := v.Type
 13971  		n := v_0
 13972  		if v_1.Op != OpConst32 {
 13973  			break
 13974  		}
 13975  		c := auxIntToInt32(v_1.AuxInt)
 13976  		if !(isPowerOfTwo32(c)) {
 13977  			break
 13978  		}
 13979  		v.reset(OpAnd32)
 13980  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 13981  		v0.AuxInt = int32ToAuxInt(c - 1)
 13982  		v.AddArg2(n, v0)
 13983  		return true
 13984  	}
 13985  	// match: (Mod32u <t> x (Const32 [c]))
 13986  	// cond: x.Op != OpConst32 && c > 0 && umagicOK32(c)
 13987  	// result: (Sub32 x (Mul32 <t> (Div32u <t> x (Const32 <t> [c])) (Const32 <t> [c])))
 13988  	for {
 13989  		t := v.Type
 13990  		x := v_0
 13991  		if v_1.Op != OpConst32 {
 13992  			break
 13993  		}
 13994  		c := auxIntToInt32(v_1.AuxInt)
 13995  		if !(x.Op != OpConst32 && c > 0 && umagicOK32(c)) {
 13996  			break
 13997  		}
 13998  		v.reset(OpSub32)
 13999  		v0 := b.NewValue0(v.Pos, OpMul32, t)
 14000  		v1 := b.NewValue0(v.Pos, OpDiv32u, t)
 14001  		v2 := b.NewValue0(v.Pos, OpConst32, t)
 14002  		v2.AuxInt = int32ToAuxInt(c)
 14003  		v1.AddArg2(x, v2)
 14004  		v0.AddArg2(v1, v2)
 14005  		v.AddArg2(x, v0)
 14006  		return true
 14007  	}
 14008  	return false
 14009  }
 14010  func rewriteValuegeneric_OpMod64(v *Value) bool {
 14011  	v_1 := v.Args[1]
 14012  	v_0 := v.Args[0]
 14013  	b := v.Block
 14014  	// match: (Mod64 (Const64 [c]) (Const64 [d]))
 14015  	// cond: d != 0
 14016  	// result: (Const64 [c % d])
 14017  	for {
 14018  		if v_0.Op != OpConst64 {
 14019  			break
 14020  		}
 14021  		c := auxIntToInt64(v_0.AuxInt)
 14022  		if v_1.Op != OpConst64 {
 14023  			break
 14024  		}
 14025  		d := auxIntToInt64(v_1.AuxInt)
 14026  		if !(d != 0) {
 14027  			break
 14028  		}
 14029  		v.reset(OpConst64)
 14030  		v.AuxInt = int64ToAuxInt(c % d)
 14031  		return true
 14032  	}
 14033  	// match: (Mod64 <t> n (Const64 [c]))
 14034  	// cond: isNonNegative(n) && isPowerOfTwo64(c)
 14035  	// result: (And64 n (Const64 <t> [c-1]))
 14036  	for {
 14037  		t := v.Type
 14038  		n := v_0
 14039  		if v_1.Op != OpConst64 {
 14040  			break
 14041  		}
 14042  		c := auxIntToInt64(v_1.AuxInt)
 14043  		if !(isNonNegative(n) && isPowerOfTwo64(c)) {
 14044  			break
 14045  		}
 14046  		v.reset(OpAnd64)
 14047  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14048  		v0.AuxInt = int64ToAuxInt(c - 1)
 14049  		v.AddArg2(n, v0)
 14050  		return true
 14051  	}
 14052  	// match: (Mod64 n (Const64 [-1<<63]))
 14053  	// cond: isNonNegative(n)
 14054  	// result: n
 14055  	for {
 14056  		n := v_0
 14057  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 || !(isNonNegative(n)) {
 14058  			break
 14059  		}
 14060  		v.copyOf(n)
 14061  		return true
 14062  	}
 14063  	// match: (Mod64 <t> n (Const64 [c]))
 14064  	// cond: c < 0 && c != -1<<63
 14065  	// result: (Mod64 <t> n (Const64 <t> [-c]))
 14066  	for {
 14067  		t := v.Type
 14068  		n := v_0
 14069  		if v_1.Op != OpConst64 {
 14070  			break
 14071  		}
 14072  		c := auxIntToInt64(v_1.AuxInt)
 14073  		if !(c < 0 && c != -1<<63) {
 14074  			break
 14075  		}
 14076  		v.reset(OpMod64)
 14077  		v.Type = t
 14078  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14079  		v0.AuxInt = int64ToAuxInt(-c)
 14080  		v.AddArg2(n, v0)
 14081  		return true
 14082  	}
 14083  	// match: (Mod64 <t> x (Const64 [c]))
 14084  	// cond: x.Op != OpConst64 && (c > 0 || c == -1<<63)
 14085  	// result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c])))
 14086  	for {
 14087  		t := v.Type
 14088  		x := v_0
 14089  		if v_1.Op != OpConst64 {
 14090  			break
 14091  		}
 14092  		c := auxIntToInt64(v_1.AuxInt)
 14093  		if !(x.Op != OpConst64 && (c > 0 || c == -1<<63)) {
 14094  			break
 14095  		}
 14096  		v.reset(OpSub64)
 14097  		v0 := b.NewValue0(v.Pos, OpMul64, t)
 14098  		v1 := b.NewValue0(v.Pos, OpDiv64, t)
 14099  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 14100  		v2.AuxInt = int64ToAuxInt(c)
 14101  		v1.AddArg2(x, v2)
 14102  		v0.AddArg2(v1, v2)
 14103  		v.AddArg2(x, v0)
 14104  		return true
 14105  	}
 14106  	return false
 14107  }
 14108  func rewriteValuegeneric_OpMod64u(v *Value) bool {
 14109  	v_1 := v.Args[1]
 14110  	v_0 := v.Args[0]
 14111  	b := v.Block
 14112  	// match: (Mod64u (Const64 [c]) (Const64 [d]))
 14113  	// cond: d != 0
 14114  	// result: (Const64 [int64(uint64(c) % uint64(d))])
 14115  	for {
 14116  		if v_0.Op != OpConst64 {
 14117  			break
 14118  		}
 14119  		c := auxIntToInt64(v_0.AuxInt)
 14120  		if v_1.Op != OpConst64 {
 14121  			break
 14122  		}
 14123  		d := auxIntToInt64(v_1.AuxInt)
 14124  		if !(d != 0) {
 14125  			break
 14126  		}
 14127  		v.reset(OpConst64)
 14128  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
 14129  		return true
 14130  	}
 14131  	// match: (Mod64u <t> n (Const64 [c]))
 14132  	// cond: isPowerOfTwo64(c)
 14133  	// result: (And64 n (Const64 <t> [c-1]))
 14134  	for {
 14135  		t := v.Type
 14136  		n := v_0
 14137  		if v_1.Op != OpConst64 {
 14138  			break
 14139  		}
 14140  		c := auxIntToInt64(v_1.AuxInt)
 14141  		if !(isPowerOfTwo64(c)) {
 14142  			break
 14143  		}
 14144  		v.reset(OpAnd64)
 14145  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14146  		v0.AuxInt = int64ToAuxInt(c - 1)
 14147  		v.AddArg2(n, v0)
 14148  		return true
 14149  	}
 14150  	// match: (Mod64u <t> n (Const64 [-1<<63]))
 14151  	// result: (And64 n (Const64 <t> [1<<63-1]))
 14152  	for {
 14153  		t := v.Type
 14154  		n := v_0
 14155  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
 14156  			break
 14157  		}
 14158  		v.reset(OpAnd64)
 14159  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14160  		v0.AuxInt = int64ToAuxInt(1<<63 - 1)
 14161  		v.AddArg2(n, v0)
 14162  		return true
 14163  	}
 14164  	// match: (Mod64u <t> x (Const64 [c]))
 14165  	// cond: x.Op != OpConst64 && c > 0 && umagicOK64(c)
 14166  	// result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))
 14167  	for {
 14168  		t := v.Type
 14169  		x := v_0
 14170  		if v_1.Op != OpConst64 {
 14171  			break
 14172  		}
 14173  		c := auxIntToInt64(v_1.AuxInt)
 14174  		if !(x.Op != OpConst64 && c > 0 && umagicOK64(c)) {
 14175  			break
 14176  		}
 14177  		v.reset(OpSub64)
 14178  		v0 := b.NewValue0(v.Pos, OpMul64, t)
 14179  		v1 := b.NewValue0(v.Pos, OpDiv64u, t)
 14180  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 14181  		v2.AuxInt = int64ToAuxInt(c)
 14182  		v1.AddArg2(x, v2)
 14183  		v0.AddArg2(v1, v2)
 14184  		v.AddArg2(x, v0)
 14185  		return true
 14186  	}
 14187  	return false
 14188  }
 14189  func rewriteValuegeneric_OpMod8(v *Value) bool {
 14190  	v_1 := v.Args[1]
 14191  	v_0 := v.Args[0]
 14192  	b := v.Block
 14193  	// match: (Mod8 (Const8 [c]) (Const8 [d]))
 14194  	// cond: d != 0
 14195  	// result: (Const8 [c % d])
 14196  	for {
 14197  		if v_0.Op != OpConst8 {
 14198  			break
 14199  		}
 14200  		c := auxIntToInt8(v_0.AuxInt)
 14201  		if v_1.Op != OpConst8 {
 14202  			break
 14203  		}
 14204  		d := auxIntToInt8(v_1.AuxInt)
 14205  		if !(d != 0) {
 14206  			break
 14207  		}
 14208  		v.reset(OpConst8)
 14209  		v.AuxInt = int8ToAuxInt(c % d)
 14210  		return true
 14211  	}
 14212  	// match: (Mod8 <t> n (Const8 [c]))
 14213  	// cond: isNonNegative(n) && isPowerOfTwo8(c)
 14214  	// result: (And8 n (Const8 <t> [c-1]))
 14215  	for {
 14216  		t := v.Type
 14217  		n := v_0
 14218  		if v_1.Op != OpConst8 {
 14219  			break
 14220  		}
 14221  		c := auxIntToInt8(v_1.AuxInt)
 14222  		if !(isNonNegative(n) && isPowerOfTwo8(c)) {
 14223  			break
 14224  		}
 14225  		v.reset(OpAnd8)
 14226  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 14227  		v0.AuxInt = int8ToAuxInt(c - 1)
 14228  		v.AddArg2(n, v0)
 14229  		return true
 14230  	}
 14231  	// match: (Mod8 <t> n (Const8 [c]))
 14232  	// cond: c < 0 && c != -1<<7
 14233  	// result: (Mod8 <t> n (Const8 <t> [-c]))
 14234  	for {
 14235  		t := v.Type
 14236  		n := v_0
 14237  		if v_1.Op != OpConst8 {
 14238  			break
 14239  		}
 14240  		c := auxIntToInt8(v_1.AuxInt)
 14241  		if !(c < 0 && c != -1<<7) {
 14242  			break
 14243  		}
 14244  		v.reset(OpMod8)
 14245  		v.Type = t
 14246  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 14247  		v0.AuxInt = int8ToAuxInt(-c)
 14248  		v.AddArg2(n, v0)
 14249  		return true
 14250  	}
 14251  	// match: (Mod8 <t> x (Const8 [c]))
 14252  	// cond: x.Op != OpConst8 && (c > 0 || c == -1<<7)
 14253  	// result: (Sub8 x (Mul8 <t> (Div8 <t> x (Const8 <t> [c])) (Const8 <t> [c])))
 14254  	for {
 14255  		t := v.Type
 14256  		x := v_0
 14257  		if v_1.Op != OpConst8 {
 14258  			break
 14259  		}
 14260  		c := auxIntToInt8(v_1.AuxInt)
 14261  		if !(x.Op != OpConst8 && (c > 0 || c == -1<<7)) {
 14262  			break
 14263  		}
 14264  		v.reset(OpSub8)
 14265  		v0 := b.NewValue0(v.Pos, OpMul8, t)
 14266  		v1 := b.NewValue0(v.Pos, OpDiv8, t)
 14267  		v2 := b.NewValue0(v.Pos, OpConst8, t)
 14268  		v2.AuxInt = int8ToAuxInt(c)
 14269  		v1.AddArg2(x, v2)
 14270  		v0.AddArg2(v1, v2)
 14271  		v.AddArg2(x, v0)
 14272  		return true
 14273  	}
 14274  	return false
 14275  }
 14276  func rewriteValuegeneric_OpMod8u(v *Value) bool {
 14277  	v_1 := v.Args[1]
 14278  	v_0 := v.Args[0]
 14279  	b := v.Block
 14280  	// match: (Mod8u (Const8 [c]) (Const8 [d]))
 14281  	// cond: d != 0
 14282  	// result: (Const8 [int8(uint8(c) % uint8(d))])
 14283  	for {
 14284  		if v_0.Op != OpConst8 {
 14285  			break
 14286  		}
 14287  		c := auxIntToInt8(v_0.AuxInt)
 14288  		if v_1.Op != OpConst8 {
 14289  			break
 14290  		}
 14291  		d := auxIntToInt8(v_1.AuxInt)
 14292  		if !(d != 0) {
 14293  			break
 14294  		}
 14295  		v.reset(OpConst8)
 14296  		v.AuxInt = int8ToAuxInt(int8(uint8(c) % uint8(d)))
 14297  		return true
 14298  	}
 14299  	// match: (Mod8u <t> n (Const8 [c]))
 14300  	// cond: isPowerOfTwo8(c)
 14301  	// result: (And8 n (Const8 <t> [c-1]))
 14302  	for {
 14303  		t := v.Type
 14304  		n := v_0
 14305  		if v_1.Op != OpConst8 {
 14306  			break
 14307  		}
 14308  		c := auxIntToInt8(v_1.AuxInt)
 14309  		if !(isPowerOfTwo8(c)) {
 14310  			break
 14311  		}
 14312  		v.reset(OpAnd8)
 14313  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 14314  		v0.AuxInt = int8ToAuxInt(c - 1)
 14315  		v.AddArg2(n, v0)
 14316  		return true
 14317  	}
 14318  	// match: (Mod8u <t> x (Const8 [c]))
 14319  	// cond: x.Op != OpConst8 && c > 0 && umagicOK8( c)
 14320  	// result: (Sub8 x (Mul8 <t> (Div8u <t> x (Const8 <t> [c])) (Const8 <t> [c])))
 14321  	for {
 14322  		t := v.Type
 14323  		x := v_0
 14324  		if v_1.Op != OpConst8 {
 14325  			break
 14326  		}
 14327  		c := auxIntToInt8(v_1.AuxInt)
 14328  		if !(x.Op != OpConst8 && c > 0 && umagicOK8(c)) {
 14329  			break
 14330  		}
 14331  		v.reset(OpSub8)
 14332  		v0 := b.NewValue0(v.Pos, OpMul8, t)
 14333  		v1 := b.NewValue0(v.Pos, OpDiv8u, t)
 14334  		v2 := b.NewValue0(v.Pos, OpConst8, t)
 14335  		v2.AuxInt = int8ToAuxInt(c)
 14336  		v1.AddArg2(x, v2)
 14337  		v0.AddArg2(v1, v2)
 14338  		v.AddArg2(x, v0)
 14339  		return true
 14340  	}
 14341  	return false
 14342  }
 14343  func rewriteValuegeneric_OpMove(v *Value) bool {
 14344  	v_2 := v.Args[2]
 14345  	v_1 := v.Args[1]
 14346  	v_0 := v.Args[0]
 14347  	b := v.Block
 14348  	config := b.Func.Config
 14349  	// match: (Move {t} [n] dst1 src mem:(Zero {t} [n] dst2 _))
 14350  	// cond: isSamePtr(src, dst2)
 14351  	// result: (Zero {t} [n] dst1 mem)
 14352  	for {
 14353  		n := auxIntToInt64(v.AuxInt)
 14354  		t := auxToType(v.Aux)
 14355  		dst1 := v_0
 14356  		src := v_1
 14357  		mem := v_2
 14358  		if mem.Op != OpZero || auxIntToInt64(mem.AuxInt) != n || auxToType(mem.Aux) != t {
 14359  			break
 14360  		}
 14361  		dst2 := mem.Args[0]
 14362  		if !(isSamePtr(src, dst2)) {
 14363  			break
 14364  		}
 14365  		v.reset(OpZero)
 14366  		v.AuxInt = int64ToAuxInt(n)
 14367  		v.Aux = typeToAux(t)
 14368  		v.AddArg2(dst1, mem)
 14369  		return true
 14370  	}
 14371  	// match: (Move {t} [n] dst1 src mem:(VarDef (Zero {t} [n] dst0 _)))
 14372  	// cond: isSamePtr(src, dst0)
 14373  	// result: (Zero {t} [n] dst1 mem)
 14374  	for {
 14375  		n := auxIntToInt64(v.AuxInt)
 14376  		t := auxToType(v.Aux)
 14377  		dst1 := v_0
 14378  		src := v_1
 14379  		mem := v_2
 14380  		if mem.Op != OpVarDef {
 14381  			break
 14382  		}
 14383  		mem_0 := mem.Args[0]
 14384  		if mem_0.Op != OpZero || auxIntToInt64(mem_0.AuxInt) != n || auxToType(mem_0.Aux) != t {
 14385  			break
 14386  		}
 14387  		dst0 := mem_0.Args[0]
 14388  		if !(isSamePtr(src, dst0)) {
 14389  			break
 14390  		}
 14391  		v.reset(OpZero)
 14392  		v.AuxInt = int64ToAuxInt(n)
 14393  		v.Aux = typeToAux(t)
 14394  		v.AddArg2(dst1, mem)
 14395  		return true
 14396  	}
 14397  	// match: (Move {t} [n] dst (Addr {sym} (SB)) mem)
 14398  	// cond: symIsROZero(sym)
 14399  	// result: (Zero {t} [n] dst mem)
 14400  	for {
 14401  		n := auxIntToInt64(v.AuxInt)
 14402  		t := auxToType(v.Aux)
 14403  		dst := v_0
 14404  		if v_1.Op != OpAddr {
 14405  			break
 14406  		}
 14407  		sym := auxToSym(v_1.Aux)
 14408  		v_1_0 := v_1.Args[0]
 14409  		if v_1_0.Op != OpSB {
 14410  			break
 14411  		}
 14412  		mem := v_2
 14413  		if !(symIsROZero(sym)) {
 14414  			break
 14415  		}
 14416  		v.reset(OpZero)
 14417  		v.AuxInt = int64ToAuxInt(n)
 14418  		v.Aux = typeToAux(t)
 14419  		v.AddArg2(dst, mem)
 14420  		return true
 14421  	}
 14422  	// match: (Move {t1} [n] dst1 src1 store:(Store {t2} op:(OffPtr [o2] dst2) _ mem))
 14423  	// cond: isSamePtr(dst1, dst2) && store.Uses == 1 && n >= o2 + t2.Size() && disjoint(src1, n, op, t2.Size()) && clobber(store)
 14424  	// result: (Move {t1} [n] dst1 src1 mem)
 14425  	for {
 14426  		n := auxIntToInt64(v.AuxInt)
 14427  		t1 := auxToType(v.Aux)
 14428  		dst1 := v_0
 14429  		src1 := v_1
 14430  		store := v_2
 14431  		if store.Op != OpStore {
 14432  			break
 14433  		}
 14434  		t2 := auxToType(store.Aux)
 14435  		mem := store.Args[2]
 14436  		op := store.Args[0]
 14437  		if op.Op != OpOffPtr {
 14438  			break
 14439  		}
 14440  		o2 := auxIntToInt64(op.AuxInt)
 14441  		dst2 := op.Args[0]
 14442  		if !(isSamePtr(dst1, dst2) && store.Uses == 1 && n >= o2+t2.Size() && disjoint(src1, n, op, t2.Size()) && clobber(store)) {
 14443  			break
 14444  		}
 14445  		v.reset(OpMove)
 14446  		v.AuxInt = int64ToAuxInt(n)
 14447  		v.Aux = typeToAux(t1)
 14448  		v.AddArg3(dst1, src1, mem)
 14449  		return true
 14450  	}
 14451  	// match: (Move {t} [n] dst1 src1 move:(Move {t} [n] dst2 _ mem))
 14452  	// cond: move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)
 14453  	// result: (Move {t} [n] dst1 src1 mem)
 14454  	for {
 14455  		n := auxIntToInt64(v.AuxInt)
 14456  		t := auxToType(v.Aux)
 14457  		dst1 := v_0
 14458  		src1 := v_1
 14459  		move := v_2
 14460  		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
 14461  			break
 14462  		}
 14463  		mem := move.Args[2]
 14464  		dst2 := move.Args[0]
 14465  		if !(move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)) {
 14466  			break
 14467  		}
 14468  		v.reset(OpMove)
 14469  		v.AuxInt = int64ToAuxInt(n)
 14470  		v.Aux = typeToAux(t)
 14471  		v.AddArg3(dst1, src1, mem)
 14472  		return true
 14473  	}
 14474  	// match: (Move {t} [n] dst1 src1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
 14475  	// cond: move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move, vardef)
 14476  	// result: (Move {t} [n] dst1 src1 (VarDef {x} mem))
 14477  	for {
 14478  		n := auxIntToInt64(v.AuxInt)
 14479  		t := auxToType(v.Aux)
 14480  		dst1 := v_0
 14481  		src1 := v_1
 14482  		vardef := v_2
 14483  		if vardef.Op != OpVarDef {
 14484  			break
 14485  		}
 14486  		x := auxToSym(vardef.Aux)
 14487  		move := vardef.Args[0]
 14488  		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
 14489  			break
 14490  		}
 14491  		mem := move.Args[2]
 14492  		dst2 := move.Args[0]
 14493  		if !(move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move, vardef)) {
 14494  			break
 14495  		}
 14496  		v.reset(OpMove)
 14497  		v.AuxInt = int64ToAuxInt(n)
 14498  		v.Aux = typeToAux(t)
 14499  		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
 14500  		v0.Aux = symToAux(x)
 14501  		v0.AddArg(mem)
 14502  		v.AddArg3(dst1, src1, v0)
 14503  		return true
 14504  	}
 14505  	// match: (Move {t} [n] dst1 src1 zero:(Zero {t} [n] dst2 mem))
 14506  	// cond: zero.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero)
 14507  	// result: (Move {t} [n] dst1 src1 mem)
 14508  	for {
 14509  		n := auxIntToInt64(v.AuxInt)
 14510  		t := auxToType(v.Aux)
 14511  		dst1 := v_0
 14512  		src1 := v_1
 14513  		zero := v_2
 14514  		if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != n || auxToType(zero.Aux) != t {
 14515  			break
 14516  		}
 14517  		mem := zero.Args[1]
 14518  		dst2 := zero.Args[0]
 14519  		if !(zero.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero)) {
 14520  			break
 14521  		}
 14522  		v.reset(OpMove)
 14523  		v.AuxInt = int64ToAuxInt(n)
 14524  		v.Aux = typeToAux(t)
 14525  		v.AddArg3(dst1, src1, mem)
 14526  		return true
 14527  	}
 14528  	// match: (Move {t} [n] dst1 src1 vardef:(VarDef {x} zero:(Zero {t} [n] dst2 mem)))
 14529  	// cond: zero.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero, vardef)
 14530  	// result: (Move {t} [n] dst1 src1 (VarDef {x} mem))
 14531  	for {
 14532  		n := auxIntToInt64(v.AuxInt)
 14533  		t := auxToType(v.Aux)
 14534  		dst1 := v_0
 14535  		src1 := v_1
 14536  		vardef := v_2
 14537  		if vardef.Op != OpVarDef {
 14538  			break
 14539  		}
 14540  		x := auxToSym(vardef.Aux)
 14541  		zero := vardef.Args[0]
 14542  		if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != n || auxToType(zero.Aux) != t {
 14543  			break
 14544  		}
 14545  		mem := zero.Args[1]
 14546  		dst2 := zero.Args[0]
 14547  		if !(zero.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero, vardef)) {
 14548  			break
 14549  		}
 14550  		v.reset(OpMove)
 14551  		v.AuxInt = int64ToAuxInt(n)
 14552  		v.Aux = typeToAux(t)
 14553  		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
 14554  		v0.Aux = symToAux(x)
 14555  		v0.AddArg(mem)
 14556  		v.AddArg3(dst1, src1, v0)
 14557  		return true
 14558  	}
 14559  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [0] p3) d2 _)))
 14560  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size() + t3.Size()
 14561  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [0] dst) d2 mem))
 14562  	for {
 14563  		n := auxIntToInt64(v.AuxInt)
 14564  		t1 := auxToType(v.Aux)
 14565  		dst := v_0
 14566  		p1 := v_1
 14567  		mem := v_2
 14568  		if mem.Op != OpStore {
 14569  			break
 14570  		}
 14571  		t2 := auxToType(mem.Aux)
 14572  		_ = mem.Args[2]
 14573  		op2 := mem.Args[0]
 14574  		if op2.Op != OpOffPtr {
 14575  			break
 14576  		}
 14577  		tt2 := op2.Type
 14578  		o2 := auxIntToInt64(op2.AuxInt)
 14579  		p2 := op2.Args[0]
 14580  		d1 := mem.Args[1]
 14581  		mem_2 := mem.Args[2]
 14582  		if mem_2.Op != OpStore {
 14583  			break
 14584  		}
 14585  		t3 := auxToType(mem_2.Aux)
 14586  		d2 := mem_2.Args[1]
 14587  		op3 := mem_2.Args[0]
 14588  		if op3.Op != OpOffPtr {
 14589  			break
 14590  		}
 14591  		tt3 := op3.Type
 14592  		if auxIntToInt64(op3.AuxInt) != 0 {
 14593  			break
 14594  		}
 14595  		p3 := op3.Args[0]
 14596  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size()+t3.Size()) {
 14597  			break
 14598  		}
 14599  		v.reset(OpStore)
 14600  		v.Aux = typeToAux(t2)
 14601  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 14602  		v0.AuxInt = int64ToAuxInt(o2)
 14603  		v0.AddArg(dst)
 14604  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 14605  		v1.Aux = typeToAux(t3)
 14606  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 14607  		v2.AuxInt = int64ToAuxInt(0)
 14608  		v2.AddArg(dst)
 14609  		v1.AddArg3(v2, d2, mem)
 14610  		v.AddArg3(v0, d1, v1)
 14611  		return true
 14612  	}
 14613  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [0] p4) d3 _))))
 14614  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size()
 14615  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [0] dst) d3 mem)))
 14616  	for {
 14617  		n := auxIntToInt64(v.AuxInt)
 14618  		t1 := auxToType(v.Aux)
 14619  		dst := v_0
 14620  		p1 := v_1
 14621  		mem := v_2
 14622  		if mem.Op != OpStore {
 14623  			break
 14624  		}
 14625  		t2 := auxToType(mem.Aux)
 14626  		_ = mem.Args[2]
 14627  		op2 := mem.Args[0]
 14628  		if op2.Op != OpOffPtr {
 14629  			break
 14630  		}
 14631  		tt2 := op2.Type
 14632  		o2 := auxIntToInt64(op2.AuxInt)
 14633  		p2 := op2.Args[0]
 14634  		d1 := mem.Args[1]
 14635  		mem_2 := mem.Args[2]
 14636  		if mem_2.Op != OpStore {
 14637  			break
 14638  		}
 14639  		t3 := auxToType(mem_2.Aux)
 14640  		_ = mem_2.Args[2]
 14641  		op3 := mem_2.Args[0]
 14642  		if op3.Op != OpOffPtr {
 14643  			break
 14644  		}
 14645  		tt3 := op3.Type
 14646  		o3 := auxIntToInt64(op3.AuxInt)
 14647  		p3 := op3.Args[0]
 14648  		d2 := mem_2.Args[1]
 14649  		mem_2_2 := mem_2.Args[2]
 14650  		if mem_2_2.Op != OpStore {
 14651  			break
 14652  		}
 14653  		t4 := auxToType(mem_2_2.Aux)
 14654  		d3 := mem_2_2.Args[1]
 14655  		op4 := mem_2_2.Args[0]
 14656  		if op4.Op != OpOffPtr {
 14657  			break
 14658  		}
 14659  		tt4 := op4.Type
 14660  		if auxIntToInt64(op4.AuxInt) != 0 {
 14661  			break
 14662  		}
 14663  		p4 := op4.Args[0]
 14664  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()) {
 14665  			break
 14666  		}
 14667  		v.reset(OpStore)
 14668  		v.Aux = typeToAux(t2)
 14669  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 14670  		v0.AuxInt = int64ToAuxInt(o2)
 14671  		v0.AddArg(dst)
 14672  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 14673  		v1.Aux = typeToAux(t3)
 14674  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 14675  		v2.AuxInt = int64ToAuxInt(o3)
 14676  		v2.AddArg(dst)
 14677  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 14678  		v3.Aux = typeToAux(t4)
 14679  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 14680  		v4.AuxInt = int64ToAuxInt(0)
 14681  		v4.AddArg(dst)
 14682  		v3.AddArg3(v4, d3, mem)
 14683  		v1.AddArg3(v2, d2, v3)
 14684  		v.AddArg3(v0, d1, v1)
 14685  		return true
 14686  	}
 14687  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [o4] p4) d3 (Store {t5} op5:(OffPtr <tt5> [0] p5) d4 _)))))
 14688  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size() + t5.Size()
 14689  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [0] dst) d4 mem))))
 14690  	for {
 14691  		n := auxIntToInt64(v.AuxInt)
 14692  		t1 := auxToType(v.Aux)
 14693  		dst := v_0
 14694  		p1 := v_1
 14695  		mem := v_2
 14696  		if mem.Op != OpStore {
 14697  			break
 14698  		}
 14699  		t2 := auxToType(mem.Aux)
 14700  		_ = mem.Args[2]
 14701  		op2 := mem.Args[0]
 14702  		if op2.Op != OpOffPtr {
 14703  			break
 14704  		}
 14705  		tt2 := op2.Type
 14706  		o2 := auxIntToInt64(op2.AuxInt)
 14707  		p2 := op2.Args[0]
 14708  		d1 := mem.Args[1]
 14709  		mem_2 := mem.Args[2]
 14710  		if mem_2.Op != OpStore {
 14711  			break
 14712  		}
 14713  		t3 := auxToType(mem_2.Aux)
 14714  		_ = mem_2.Args[2]
 14715  		op3 := mem_2.Args[0]
 14716  		if op3.Op != OpOffPtr {
 14717  			break
 14718  		}
 14719  		tt3 := op3.Type
 14720  		o3 := auxIntToInt64(op3.AuxInt)
 14721  		p3 := op3.Args[0]
 14722  		d2 := mem_2.Args[1]
 14723  		mem_2_2 := mem_2.Args[2]
 14724  		if mem_2_2.Op != OpStore {
 14725  			break
 14726  		}
 14727  		t4 := auxToType(mem_2_2.Aux)
 14728  		_ = mem_2_2.Args[2]
 14729  		op4 := mem_2_2.Args[0]
 14730  		if op4.Op != OpOffPtr {
 14731  			break
 14732  		}
 14733  		tt4 := op4.Type
 14734  		o4 := auxIntToInt64(op4.AuxInt)
 14735  		p4 := op4.Args[0]
 14736  		d3 := mem_2_2.Args[1]
 14737  		mem_2_2_2 := mem_2_2.Args[2]
 14738  		if mem_2_2_2.Op != OpStore {
 14739  			break
 14740  		}
 14741  		t5 := auxToType(mem_2_2_2.Aux)
 14742  		d4 := mem_2_2_2.Args[1]
 14743  		op5 := mem_2_2_2.Args[0]
 14744  		if op5.Op != OpOffPtr {
 14745  			break
 14746  		}
 14747  		tt5 := op5.Type
 14748  		if auxIntToInt64(op5.AuxInt) != 0 {
 14749  			break
 14750  		}
 14751  		p5 := op5.Args[0]
 14752  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()+t5.Size()) {
 14753  			break
 14754  		}
 14755  		v.reset(OpStore)
 14756  		v.Aux = typeToAux(t2)
 14757  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 14758  		v0.AuxInt = int64ToAuxInt(o2)
 14759  		v0.AddArg(dst)
 14760  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 14761  		v1.Aux = typeToAux(t3)
 14762  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 14763  		v2.AuxInt = int64ToAuxInt(o3)
 14764  		v2.AddArg(dst)
 14765  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 14766  		v3.Aux = typeToAux(t4)
 14767  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 14768  		v4.AuxInt = int64ToAuxInt(o4)
 14769  		v4.AddArg(dst)
 14770  		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 14771  		v5.Aux = typeToAux(t5)
 14772  		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
 14773  		v6.AuxInt = int64ToAuxInt(0)
 14774  		v6.AddArg(dst)
 14775  		v5.AddArg3(v6, d4, mem)
 14776  		v3.AddArg3(v4, d3, v5)
 14777  		v1.AddArg3(v2, d2, v3)
 14778  		v.AddArg3(v0, d1, v1)
 14779  		return true
 14780  	}
 14781  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [0] p3) d2 _))))
 14782  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size() + t3.Size()
 14783  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [0] dst) d2 mem))
 14784  	for {
 14785  		n := auxIntToInt64(v.AuxInt)
 14786  		t1 := auxToType(v.Aux)
 14787  		dst := v_0
 14788  		p1 := v_1
 14789  		mem := v_2
 14790  		if mem.Op != OpVarDef {
 14791  			break
 14792  		}
 14793  		mem_0 := mem.Args[0]
 14794  		if mem_0.Op != OpStore {
 14795  			break
 14796  		}
 14797  		t2 := auxToType(mem_0.Aux)
 14798  		_ = mem_0.Args[2]
 14799  		op2 := mem_0.Args[0]
 14800  		if op2.Op != OpOffPtr {
 14801  			break
 14802  		}
 14803  		tt2 := op2.Type
 14804  		o2 := auxIntToInt64(op2.AuxInt)
 14805  		p2 := op2.Args[0]
 14806  		d1 := mem_0.Args[1]
 14807  		mem_0_2 := mem_0.Args[2]
 14808  		if mem_0_2.Op != OpStore {
 14809  			break
 14810  		}
 14811  		t3 := auxToType(mem_0_2.Aux)
 14812  		d2 := mem_0_2.Args[1]
 14813  		op3 := mem_0_2.Args[0]
 14814  		if op3.Op != OpOffPtr {
 14815  			break
 14816  		}
 14817  		tt3 := op3.Type
 14818  		if auxIntToInt64(op3.AuxInt) != 0 {
 14819  			break
 14820  		}
 14821  		p3 := op3.Args[0]
 14822  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size()+t3.Size()) {
 14823  			break
 14824  		}
 14825  		v.reset(OpStore)
 14826  		v.Aux = typeToAux(t2)
 14827  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 14828  		v0.AuxInt = int64ToAuxInt(o2)
 14829  		v0.AddArg(dst)
 14830  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 14831  		v1.Aux = typeToAux(t3)
 14832  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 14833  		v2.AuxInt = int64ToAuxInt(0)
 14834  		v2.AddArg(dst)
 14835  		v1.AddArg3(v2, d2, mem)
 14836  		v.AddArg3(v0, d1, v1)
 14837  		return true
 14838  	}
 14839  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [0] p4) d3 _)))))
 14840  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size()
 14841  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [0] dst) d3 mem)))
 14842  	for {
 14843  		n := auxIntToInt64(v.AuxInt)
 14844  		t1 := auxToType(v.Aux)
 14845  		dst := v_0
 14846  		p1 := v_1
 14847  		mem := v_2
 14848  		if mem.Op != OpVarDef {
 14849  			break
 14850  		}
 14851  		mem_0 := mem.Args[0]
 14852  		if mem_0.Op != OpStore {
 14853  			break
 14854  		}
 14855  		t2 := auxToType(mem_0.Aux)
 14856  		_ = mem_0.Args[2]
 14857  		op2 := mem_0.Args[0]
 14858  		if op2.Op != OpOffPtr {
 14859  			break
 14860  		}
 14861  		tt2 := op2.Type
 14862  		o2 := auxIntToInt64(op2.AuxInt)
 14863  		p2 := op2.Args[0]
 14864  		d1 := mem_0.Args[1]
 14865  		mem_0_2 := mem_0.Args[2]
 14866  		if mem_0_2.Op != OpStore {
 14867  			break
 14868  		}
 14869  		t3 := auxToType(mem_0_2.Aux)
 14870  		_ = mem_0_2.Args[2]
 14871  		op3 := mem_0_2.Args[0]
 14872  		if op3.Op != OpOffPtr {
 14873  			break
 14874  		}
 14875  		tt3 := op3.Type
 14876  		o3 := auxIntToInt64(op3.AuxInt)
 14877  		p3 := op3.Args[0]
 14878  		d2 := mem_0_2.Args[1]
 14879  		mem_0_2_2 := mem_0_2.Args[2]
 14880  		if mem_0_2_2.Op != OpStore {
 14881  			break
 14882  		}
 14883  		t4 := auxToType(mem_0_2_2.Aux)
 14884  		d3 := mem_0_2_2.Args[1]
 14885  		op4 := mem_0_2_2.Args[0]
 14886  		if op4.Op != OpOffPtr {
 14887  			break
 14888  		}
 14889  		tt4 := op4.Type
 14890  		if auxIntToInt64(op4.AuxInt) != 0 {
 14891  			break
 14892  		}
 14893  		p4 := op4.Args[0]
 14894  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()) {
 14895  			break
 14896  		}
 14897  		v.reset(OpStore)
 14898  		v.Aux = typeToAux(t2)
 14899  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 14900  		v0.AuxInt = int64ToAuxInt(o2)
 14901  		v0.AddArg(dst)
 14902  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 14903  		v1.Aux = typeToAux(t3)
 14904  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 14905  		v2.AuxInt = int64ToAuxInt(o3)
 14906  		v2.AddArg(dst)
 14907  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 14908  		v3.Aux = typeToAux(t4)
 14909  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 14910  		v4.AuxInt = int64ToAuxInt(0)
 14911  		v4.AddArg(dst)
 14912  		v3.AddArg3(v4, d3, mem)
 14913  		v1.AddArg3(v2, d2, v3)
 14914  		v.AddArg3(v0, d1, v1)
 14915  		return true
 14916  	}
 14917  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [o4] p4) d3 (Store {t5} op5:(OffPtr <tt5> [0] p5) d4 _))))))
 14918  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size() + t5.Size()
 14919  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [0] dst) d4 mem))))
 14920  	for {
 14921  		n := auxIntToInt64(v.AuxInt)
 14922  		t1 := auxToType(v.Aux)
 14923  		dst := v_0
 14924  		p1 := v_1
 14925  		mem := v_2
 14926  		if mem.Op != OpVarDef {
 14927  			break
 14928  		}
 14929  		mem_0 := mem.Args[0]
 14930  		if mem_0.Op != OpStore {
 14931  			break
 14932  		}
 14933  		t2 := auxToType(mem_0.Aux)
 14934  		_ = mem_0.Args[2]
 14935  		op2 := mem_0.Args[0]
 14936  		if op2.Op != OpOffPtr {
 14937  			break
 14938  		}
 14939  		tt2 := op2.Type
 14940  		o2 := auxIntToInt64(op2.AuxInt)
 14941  		p2 := op2.Args[0]
 14942  		d1 := mem_0.Args[1]
 14943  		mem_0_2 := mem_0.Args[2]
 14944  		if mem_0_2.Op != OpStore {
 14945  			break
 14946  		}
 14947  		t3 := auxToType(mem_0_2.Aux)
 14948  		_ = mem_0_2.Args[2]
 14949  		op3 := mem_0_2.Args[0]
 14950  		if op3.Op != OpOffPtr {
 14951  			break
 14952  		}
 14953  		tt3 := op3.Type
 14954  		o3 := auxIntToInt64(op3.AuxInt)
 14955  		p3 := op3.Args[0]
 14956  		d2 := mem_0_2.Args[1]
 14957  		mem_0_2_2 := mem_0_2.Args[2]
 14958  		if mem_0_2_2.Op != OpStore {
 14959  			break
 14960  		}
 14961  		t4 := auxToType(mem_0_2_2.Aux)
 14962  		_ = mem_0_2_2.Args[2]
 14963  		op4 := mem_0_2_2.Args[0]
 14964  		if op4.Op != OpOffPtr {
 14965  			break
 14966  		}
 14967  		tt4 := op4.Type
 14968  		o4 := auxIntToInt64(op4.AuxInt)
 14969  		p4 := op4.Args[0]
 14970  		d3 := mem_0_2_2.Args[1]
 14971  		mem_0_2_2_2 := mem_0_2_2.Args[2]
 14972  		if mem_0_2_2_2.Op != OpStore {
 14973  			break
 14974  		}
 14975  		t5 := auxToType(mem_0_2_2_2.Aux)
 14976  		d4 := mem_0_2_2_2.Args[1]
 14977  		op5 := mem_0_2_2_2.Args[0]
 14978  		if op5.Op != OpOffPtr {
 14979  			break
 14980  		}
 14981  		tt5 := op5.Type
 14982  		if auxIntToInt64(op5.AuxInt) != 0 {
 14983  			break
 14984  		}
 14985  		p5 := op5.Args[0]
 14986  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()+t5.Size()) {
 14987  			break
 14988  		}
 14989  		v.reset(OpStore)
 14990  		v.Aux = typeToAux(t2)
 14991  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 14992  		v0.AuxInt = int64ToAuxInt(o2)
 14993  		v0.AddArg(dst)
 14994  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 14995  		v1.Aux = typeToAux(t3)
 14996  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 14997  		v2.AuxInt = int64ToAuxInt(o3)
 14998  		v2.AddArg(dst)
 14999  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15000  		v3.Aux = typeToAux(t4)
 15001  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 15002  		v4.AuxInt = int64ToAuxInt(o4)
 15003  		v4.AddArg(dst)
 15004  		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15005  		v5.Aux = typeToAux(t5)
 15006  		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
 15007  		v6.AuxInt = int64ToAuxInt(0)
 15008  		v6.AddArg(dst)
 15009  		v5.AddArg3(v6, d4, mem)
 15010  		v3.AddArg3(v4, d3, v5)
 15011  		v1.AddArg3(v2, d2, v3)
 15012  		v.AddArg3(v0, d1, v1)
 15013  		return true
 15014  	}
 15015  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Zero {t3} [n] p3 _)))
 15016  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2 + t2.Size()
 15017  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Zero {t1} [n] dst mem))
 15018  	for {
 15019  		n := auxIntToInt64(v.AuxInt)
 15020  		t1 := auxToType(v.Aux)
 15021  		dst := v_0
 15022  		p1 := v_1
 15023  		mem := v_2
 15024  		if mem.Op != OpStore {
 15025  			break
 15026  		}
 15027  		t2 := auxToType(mem.Aux)
 15028  		_ = mem.Args[2]
 15029  		op2 := mem.Args[0]
 15030  		if op2.Op != OpOffPtr {
 15031  			break
 15032  		}
 15033  		tt2 := op2.Type
 15034  		o2 := auxIntToInt64(op2.AuxInt)
 15035  		p2 := op2.Args[0]
 15036  		d1 := mem.Args[1]
 15037  		mem_2 := mem.Args[2]
 15038  		if mem_2.Op != OpZero || auxIntToInt64(mem_2.AuxInt) != n {
 15039  			break
 15040  		}
 15041  		t3 := auxToType(mem_2.Aux)
 15042  		p3 := mem_2.Args[0]
 15043  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2+t2.Size()) {
 15044  			break
 15045  		}
 15046  		v.reset(OpStore)
 15047  		v.Aux = typeToAux(t2)
 15048  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 15049  		v0.AuxInt = int64ToAuxInt(o2)
 15050  		v0.AddArg(dst)
 15051  		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 15052  		v1.AuxInt = int64ToAuxInt(n)
 15053  		v1.Aux = typeToAux(t1)
 15054  		v1.AddArg2(dst, mem)
 15055  		v.AddArg3(v0, d1, v1)
 15056  		return true
 15057  	}
 15058  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Zero {t4} [n] p4 _))))
 15059  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2 + t2.Size() && n >= o3 + t3.Size()
 15060  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Zero {t1} [n] dst mem)))
 15061  	for {
 15062  		n := auxIntToInt64(v.AuxInt)
 15063  		t1 := auxToType(v.Aux)
 15064  		dst := v_0
 15065  		p1 := v_1
 15066  		mem := v_2
 15067  		if mem.Op != OpStore {
 15068  			break
 15069  		}
 15070  		t2 := auxToType(mem.Aux)
 15071  		_ = mem.Args[2]
 15072  		mem_0 := mem.Args[0]
 15073  		if mem_0.Op != OpOffPtr {
 15074  			break
 15075  		}
 15076  		tt2 := mem_0.Type
 15077  		o2 := auxIntToInt64(mem_0.AuxInt)
 15078  		p2 := mem_0.Args[0]
 15079  		d1 := mem.Args[1]
 15080  		mem_2 := mem.Args[2]
 15081  		if mem_2.Op != OpStore {
 15082  			break
 15083  		}
 15084  		t3 := auxToType(mem_2.Aux)
 15085  		_ = mem_2.Args[2]
 15086  		mem_2_0 := mem_2.Args[0]
 15087  		if mem_2_0.Op != OpOffPtr {
 15088  			break
 15089  		}
 15090  		tt3 := mem_2_0.Type
 15091  		o3 := auxIntToInt64(mem_2_0.AuxInt)
 15092  		p3 := mem_2_0.Args[0]
 15093  		d2 := mem_2.Args[1]
 15094  		mem_2_2 := mem_2.Args[2]
 15095  		if mem_2_2.Op != OpZero || auxIntToInt64(mem_2_2.AuxInt) != n {
 15096  			break
 15097  		}
 15098  		t4 := auxToType(mem_2_2.Aux)
 15099  		p4 := mem_2_2.Args[0]
 15100  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2+t2.Size() && n >= o3+t3.Size()) {
 15101  			break
 15102  		}
 15103  		v.reset(OpStore)
 15104  		v.Aux = typeToAux(t2)
 15105  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 15106  		v0.AuxInt = int64ToAuxInt(o2)
 15107  		v0.AddArg(dst)
 15108  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15109  		v1.Aux = typeToAux(t3)
 15110  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 15111  		v2.AuxInt = int64ToAuxInt(o3)
 15112  		v2.AddArg(dst)
 15113  		v3 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 15114  		v3.AuxInt = int64ToAuxInt(n)
 15115  		v3.Aux = typeToAux(t1)
 15116  		v3.AddArg2(dst, mem)
 15117  		v1.AddArg3(v2, d2, v3)
 15118  		v.AddArg3(v0, d1, v1)
 15119  		return true
 15120  	}
 15121  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Zero {t5} [n] p5 _)))))
 15122  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size()
 15123  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Zero {t1} [n] dst mem))))
 15124  	for {
 15125  		n := auxIntToInt64(v.AuxInt)
 15126  		t1 := auxToType(v.Aux)
 15127  		dst := v_0
 15128  		p1 := v_1
 15129  		mem := v_2
 15130  		if mem.Op != OpStore {
 15131  			break
 15132  		}
 15133  		t2 := auxToType(mem.Aux)
 15134  		_ = mem.Args[2]
 15135  		mem_0 := mem.Args[0]
 15136  		if mem_0.Op != OpOffPtr {
 15137  			break
 15138  		}
 15139  		tt2 := mem_0.Type
 15140  		o2 := auxIntToInt64(mem_0.AuxInt)
 15141  		p2 := mem_0.Args[0]
 15142  		d1 := mem.Args[1]
 15143  		mem_2 := mem.Args[2]
 15144  		if mem_2.Op != OpStore {
 15145  			break
 15146  		}
 15147  		t3 := auxToType(mem_2.Aux)
 15148  		_ = mem_2.Args[2]
 15149  		mem_2_0 := mem_2.Args[0]
 15150  		if mem_2_0.Op != OpOffPtr {
 15151  			break
 15152  		}
 15153  		tt3 := mem_2_0.Type
 15154  		o3 := auxIntToInt64(mem_2_0.AuxInt)
 15155  		p3 := mem_2_0.Args[0]
 15156  		d2 := mem_2.Args[1]
 15157  		mem_2_2 := mem_2.Args[2]
 15158  		if mem_2_2.Op != OpStore {
 15159  			break
 15160  		}
 15161  		t4 := auxToType(mem_2_2.Aux)
 15162  		_ = mem_2_2.Args[2]
 15163  		mem_2_2_0 := mem_2_2.Args[0]
 15164  		if mem_2_2_0.Op != OpOffPtr {
 15165  			break
 15166  		}
 15167  		tt4 := mem_2_2_0.Type
 15168  		o4 := auxIntToInt64(mem_2_2_0.AuxInt)
 15169  		p4 := mem_2_2_0.Args[0]
 15170  		d3 := mem_2_2.Args[1]
 15171  		mem_2_2_2 := mem_2_2.Args[2]
 15172  		if mem_2_2_2.Op != OpZero || auxIntToInt64(mem_2_2_2.AuxInt) != n {
 15173  			break
 15174  		}
 15175  		t5 := auxToType(mem_2_2_2.Aux)
 15176  		p5 := mem_2_2_2.Args[0]
 15177  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size()) {
 15178  			break
 15179  		}
 15180  		v.reset(OpStore)
 15181  		v.Aux = typeToAux(t2)
 15182  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 15183  		v0.AuxInt = int64ToAuxInt(o2)
 15184  		v0.AddArg(dst)
 15185  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15186  		v1.Aux = typeToAux(t3)
 15187  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 15188  		v2.AuxInt = int64ToAuxInt(o3)
 15189  		v2.AddArg(dst)
 15190  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15191  		v3.Aux = typeToAux(t4)
 15192  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 15193  		v4.AuxInt = int64ToAuxInt(o4)
 15194  		v4.AddArg(dst)
 15195  		v5 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 15196  		v5.AuxInt = int64ToAuxInt(n)
 15197  		v5.Aux = typeToAux(t1)
 15198  		v5.AddArg2(dst, mem)
 15199  		v3.AddArg3(v4, d3, v5)
 15200  		v1.AddArg3(v2, d2, v3)
 15201  		v.AddArg3(v0, d1, v1)
 15202  		return true
 15203  	}
 15204  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _))))))
 15205  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size() && n >= o5 + t5.Size()
 15206  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
 15207  	for {
 15208  		n := auxIntToInt64(v.AuxInt)
 15209  		t1 := auxToType(v.Aux)
 15210  		dst := v_0
 15211  		p1 := v_1
 15212  		mem := v_2
 15213  		if mem.Op != OpStore {
 15214  			break
 15215  		}
 15216  		t2 := auxToType(mem.Aux)
 15217  		_ = mem.Args[2]
 15218  		mem_0 := mem.Args[0]
 15219  		if mem_0.Op != OpOffPtr {
 15220  			break
 15221  		}
 15222  		tt2 := mem_0.Type
 15223  		o2 := auxIntToInt64(mem_0.AuxInt)
 15224  		p2 := mem_0.Args[0]
 15225  		d1 := mem.Args[1]
 15226  		mem_2 := mem.Args[2]
 15227  		if mem_2.Op != OpStore {
 15228  			break
 15229  		}
 15230  		t3 := auxToType(mem_2.Aux)
 15231  		_ = mem_2.Args[2]
 15232  		mem_2_0 := mem_2.Args[0]
 15233  		if mem_2_0.Op != OpOffPtr {
 15234  			break
 15235  		}
 15236  		tt3 := mem_2_0.Type
 15237  		o3 := auxIntToInt64(mem_2_0.AuxInt)
 15238  		p3 := mem_2_0.Args[0]
 15239  		d2 := mem_2.Args[1]
 15240  		mem_2_2 := mem_2.Args[2]
 15241  		if mem_2_2.Op != OpStore {
 15242  			break
 15243  		}
 15244  		t4 := auxToType(mem_2_2.Aux)
 15245  		_ = mem_2_2.Args[2]
 15246  		mem_2_2_0 := mem_2_2.Args[0]
 15247  		if mem_2_2_0.Op != OpOffPtr {
 15248  			break
 15249  		}
 15250  		tt4 := mem_2_2_0.Type
 15251  		o4 := auxIntToInt64(mem_2_2_0.AuxInt)
 15252  		p4 := mem_2_2_0.Args[0]
 15253  		d3 := mem_2_2.Args[1]
 15254  		mem_2_2_2 := mem_2_2.Args[2]
 15255  		if mem_2_2_2.Op != OpStore {
 15256  			break
 15257  		}
 15258  		t5 := auxToType(mem_2_2_2.Aux)
 15259  		_ = mem_2_2_2.Args[2]
 15260  		mem_2_2_2_0 := mem_2_2_2.Args[0]
 15261  		if mem_2_2_2_0.Op != OpOffPtr {
 15262  			break
 15263  		}
 15264  		tt5 := mem_2_2_2_0.Type
 15265  		o5 := auxIntToInt64(mem_2_2_2_0.AuxInt)
 15266  		p5 := mem_2_2_2_0.Args[0]
 15267  		d4 := mem_2_2_2.Args[1]
 15268  		mem_2_2_2_2 := mem_2_2_2.Args[2]
 15269  		if mem_2_2_2_2.Op != OpZero || auxIntToInt64(mem_2_2_2_2.AuxInt) != n {
 15270  			break
 15271  		}
 15272  		t6 := auxToType(mem_2_2_2_2.Aux)
 15273  		p6 := mem_2_2_2_2.Args[0]
 15274  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size() && n >= o5+t5.Size()) {
 15275  			break
 15276  		}
 15277  		v.reset(OpStore)
 15278  		v.Aux = typeToAux(t2)
 15279  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 15280  		v0.AuxInt = int64ToAuxInt(o2)
 15281  		v0.AddArg(dst)
 15282  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15283  		v1.Aux = typeToAux(t3)
 15284  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 15285  		v2.AuxInt = int64ToAuxInt(o3)
 15286  		v2.AddArg(dst)
 15287  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15288  		v3.Aux = typeToAux(t4)
 15289  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 15290  		v4.AuxInt = int64ToAuxInt(o4)
 15291  		v4.AddArg(dst)
 15292  		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15293  		v5.Aux = typeToAux(t5)
 15294  		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
 15295  		v6.AuxInt = int64ToAuxInt(o5)
 15296  		v6.AddArg(dst)
 15297  		v7 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 15298  		v7.AuxInt = int64ToAuxInt(n)
 15299  		v7.Aux = typeToAux(t1)
 15300  		v7.AddArg2(dst, mem)
 15301  		v5.AddArg3(v6, d4, v7)
 15302  		v3.AddArg3(v4, d3, v5)
 15303  		v1.AddArg3(v2, d2, v3)
 15304  		v.AddArg3(v0, d1, v1)
 15305  		return true
 15306  	}
 15307  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Zero {t3} [n] p3 _))))
 15308  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2 + t2.Size()
 15309  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Zero {t1} [n] dst mem))
 15310  	for {
 15311  		n := auxIntToInt64(v.AuxInt)
 15312  		t1 := auxToType(v.Aux)
 15313  		dst := v_0
 15314  		p1 := v_1
 15315  		mem := v_2
 15316  		if mem.Op != OpVarDef {
 15317  			break
 15318  		}
 15319  		mem_0 := mem.Args[0]
 15320  		if mem_0.Op != OpStore {
 15321  			break
 15322  		}
 15323  		t2 := auxToType(mem_0.Aux)
 15324  		_ = mem_0.Args[2]
 15325  		op2 := mem_0.Args[0]
 15326  		if op2.Op != OpOffPtr {
 15327  			break
 15328  		}
 15329  		tt2 := op2.Type
 15330  		o2 := auxIntToInt64(op2.AuxInt)
 15331  		p2 := op2.Args[0]
 15332  		d1 := mem_0.Args[1]
 15333  		mem_0_2 := mem_0.Args[2]
 15334  		if mem_0_2.Op != OpZero || auxIntToInt64(mem_0_2.AuxInt) != n {
 15335  			break
 15336  		}
 15337  		t3 := auxToType(mem_0_2.Aux)
 15338  		p3 := mem_0_2.Args[0]
 15339  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2+t2.Size()) {
 15340  			break
 15341  		}
 15342  		v.reset(OpStore)
 15343  		v.Aux = typeToAux(t2)
 15344  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 15345  		v0.AuxInt = int64ToAuxInt(o2)
 15346  		v0.AddArg(dst)
 15347  		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 15348  		v1.AuxInt = int64ToAuxInt(n)
 15349  		v1.Aux = typeToAux(t1)
 15350  		v1.AddArg2(dst, mem)
 15351  		v.AddArg3(v0, d1, v1)
 15352  		return true
 15353  	}
 15354  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Zero {t4} [n] p4 _)))))
 15355  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2 + t2.Size() && n >= o3 + t3.Size()
 15356  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Zero {t1} [n] dst mem)))
 15357  	for {
 15358  		n := auxIntToInt64(v.AuxInt)
 15359  		t1 := auxToType(v.Aux)
 15360  		dst := v_0
 15361  		p1 := v_1
 15362  		mem := v_2
 15363  		if mem.Op != OpVarDef {
 15364  			break
 15365  		}
 15366  		mem_0 := mem.Args[0]
 15367  		if mem_0.Op != OpStore {
 15368  			break
 15369  		}
 15370  		t2 := auxToType(mem_0.Aux)
 15371  		_ = mem_0.Args[2]
 15372  		mem_0_0 := mem_0.Args[0]
 15373  		if mem_0_0.Op != OpOffPtr {
 15374  			break
 15375  		}
 15376  		tt2 := mem_0_0.Type
 15377  		o2 := auxIntToInt64(mem_0_0.AuxInt)
 15378  		p2 := mem_0_0.Args[0]
 15379  		d1 := mem_0.Args[1]
 15380  		mem_0_2 := mem_0.Args[2]
 15381  		if mem_0_2.Op != OpStore {
 15382  			break
 15383  		}
 15384  		t3 := auxToType(mem_0_2.Aux)
 15385  		_ = mem_0_2.Args[2]
 15386  		mem_0_2_0 := mem_0_2.Args[0]
 15387  		if mem_0_2_0.Op != OpOffPtr {
 15388  			break
 15389  		}
 15390  		tt3 := mem_0_2_0.Type
 15391  		o3 := auxIntToInt64(mem_0_2_0.AuxInt)
 15392  		p3 := mem_0_2_0.Args[0]
 15393  		d2 := mem_0_2.Args[1]
 15394  		mem_0_2_2 := mem_0_2.Args[2]
 15395  		if mem_0_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2.AuxInt) != n {
 15396  			break
 15397  		}
 15398  		t4 := auxToType(mem_0_2_2.Aux)
 15399  		p4 := mem_0_2_2.Args[0]
 15400  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2+t2.Size() && n >= o3+t3.Size()) {
 15401  			break
 15402  		}
 15403  		v.reset(OpStore)
 15404  		v.Aux = typeToAux(t2)
 15405  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 15406  		v0.AuxInt = int64ToAuxInt(o2)
 15407  		v0.AddArg(dst)
 15408  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15409  		v1.Aux = typeToAux(t3)
 15410  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 15411  		v2.AuxInt = int64ToAuxInt(o3)
 15412  		v2.AddArg(dst)
 15413  		v3 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 15414  		v3.AuxInt = int64ToAuxInt(n)
 15415  		v3.Aux = typeToAux(t1)
 15416  		v3.AddArg2(dst, mem)
 15417  		v1.AddArg3(v2, d2, v3)
 15418  		v.AddArg3(v0, d1, v1)
 15419  		return true
 15420  	}
 15421  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Zero {t5} [n] p5 _))))))
 15422  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size()
 15423  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Zero {t1} [n] dst mem))))
 15424  	for {
 15425  		n := auxIntToInt64(v.AuxInt)
 15426  		t1 := auxToType(v.Aux)
 15427  		dst := v_0
 15428  		p1 := v_1
 15429  		mem := v_2
 15430  		if mem.Op != OpVarDef {
 15431  			break
 15432  		}
 15433  		mem_0 := mem.Args[0]
 15434  		if mem_0.Op != OpStore {
 15435  			break
 15436  		}
 15437  		t2 := auxToType(mem_0.Aux)
 15438  		_ = mem_0.Args[2]
 15439  		mem_0_0 := mem_0.Args[0]
 15440  		if mem_0_0.Op != OpOffPtr {
 15441  			break
 15442  		}
 15443  		tt2 := mem_0_0.Type
 15444  		o2 := auxIntToInt64(mem_0_0.AuxInt)
 15445  		p2 := mem_0_0.Args[0]
 15446  		d1 := mem_0.Args[1]
 15447  		mem_0_2 := mem_0.Args[2]
 15448  		if mem_0_2.Op != OpStore {
 15449  			break
 15450  		}
 15451  		t3 := auxToType(mem_0_2.Aux)
 15452  		_ = mem_0_2.Args[2]
 15453  		mem_0_2_0 := mem_0_2.Args[0]
 15454  		if mem_0_2_0.Op != OpOffPtr {
 15455  			break
 15456  		}
 15457  		tt3 := mem_0_2_0.Type
 15458  		o3 := auxIntToInt64(mem_0_2_0.AuxInt)
 15459  		p3 := mem_0_2_0.Args[0]
 15460  		d2 := mem_0_2.Args[1]
 15461  		mem_0_2_2 := mem_0_2.Args[2]
 15462  		if mem_0_2_2.Op != OpStore {
 15463  			break
 15464  		}
 15465  		t4 := auxToType(mem_0_2_2.Aux)
 15466  		_ = mem_0_2_2.Args[2]
 15467  		mem_0_2_2_0 := mem_0_2_2.Args[0]
 15468  		if mem_0_2_2_0.Op != OpOffPtr {
 15469  			break
 15470  		}
 15471  		tt4 := mem_0_2_2_0.Type
 15472  		o4 := auxIntToInt64(mem_0_2_2_0.AuxInt)
 15473  		p4 := mem_0_2_2_0.Args[0]
 15474  		d3 := mem_0_2_2.Args[1]
 15475  		mem_0_2_2_2 := mem_0_2_2.Args[2]
 15476  		if mem_0_2_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2_2.AuxInt) != n {
 15477  			break
 15478  		}
 15479  		t5 := auxToType(mem_0_2_2_2.Aux)
 15480  		p5 := mem_0_2_2_2.Args[0]
 15481  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size()) {
 15482  			break
 15483  		}
 15484  		v.reset(OpStore)
 15485  		v.Aux = typeToAux(t2)
 15486  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 15487  		v0.AuxInt = int64ToAuxInt(o2)
 15488  		v0.AddArg(dst)
 15489  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15490  		v1.Aux = typeToAux(t3)
 15491  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 15492  		v2.AuxInt = int64ToAuxInt(o3)
 15493  		v2.AddArg(dst)
 15494  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15495  		v3.Aux = typeToAux(t4)
 15496  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 15497  		v4.AuxInt = int64ToAuxInt(o4)
 15498  		v4.AddArg(dst)
 15499  		v5 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 15500  		v5.AuxInt = int64ToAuxInt(n)
 15501  		v5.Aux = typeToAux(t1)
 15502  		v5.AddArg2(dst, mem)
 15503  		v3.AddArg3(v4, d3, v5)
 15504  		v1.AddArg3(v2, d2, v3)
 15505  		v.AddArg3(v0, d1, v1)
 15506  		return true
 15507  	}
 15508  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _)))))))
 15509  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size() && n >= o5 + t5.Size()
 15510  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
 15511  	for {
 15512  		n := auxIntToInt64(v.AuxInt)
 15513  		t1 := auxToType(v.Aux)
 15514  		dst := v_0
 15515  		p1 := v_1
 15516  		mem := v_2
 15517  		if mem.Op != OpVarDef {
 15518  			break
 15519  		}
 15520  		mem_0 := mem.Args[0]
 15521  		if mem_0.Op != OpStore {
 15522  			break
 15523  		}
 15524  		t2 := auxToType(mem_0.Aux)
 15525  		_ = mem_0.Args[2]
 15526  		mem_0_0 := mem_0.Args[0]
 15527  		if mem_0_0.Op != OpOffPtr {
 15528  			break
 15529  		}
 15530  		tt2 := mem_0_0.Type
 15531  		o2 := auxIntToInt64(mem_0_0.AuxInt)
 15532  		p2 := mem_0_0.Args[0]
 15533  		d1 := mem_0.Args[1]
 15534  		mem_0_2 := mem_0.Args[2]
 15535  		if mem_0_2.Op != OpStore {
 15536  			break
 15537  		}
 15538  		t3 := auxToType(mem_0_2.Aux)
 15539  		_ = mem_0_2.Args[2]
 15540  		mem_0_2_0 := mem_0_2.Args[0]
 15541  		if mem_0_2_0.Op != OpOffPtr {
 15542  			break
 15543  		}
 15544  		tt3 := mem_0_2_0.Type
 15545  		o3 := auxIntToInt64(mem_0_2_0.AuxInt)
 15546  		p3 := mem_0_2_0.Args[0]
 15547  		d2 := mem_0_2.Args[1]
 15548  		mem_0_2_2 := mem_0_2.Args[2]
 15549  		if mem_0_2_2.Op != OpStore {
 15550  			break
 15551  		}
 15552  		t4 := auxToType(mem_0_2_2.Aux)
 15553  		_ = mem_0_2_2.Args[2]
 15554  		mem_0_2_2_0 := mem_0_2_2.Args[0]
 15555  		if mem_0_2_2_0.Op != OpOffPtr {
 15556  			break
 15557  		}
 15558  		tt4 := mem_0_2_2_0.Type
 15559  		o4 := auxIntToInt64(mem_0_2_2_0.AuxInt)
 15560  		p4 := mem_0_2_2_0.Args[0]
 15561  		d3 := mem_0_2_2.Args[1]
 15562  		mem_0_2_2_2 := mem_0_2_2.Args[2]
 15563  		if mem_0_2_2_2.Op != OpStore {
 15564  			break
 15565  		}
 15566  		t5 := auxToType(mem_0_2_2_2.Aux)
 15567  		_ = mem_0_2_2_2.Args[2]
 15568  		mem_0_2_2_2_0 := mem_0_2_2_2.Args[0]
 15569  		if mem_0_2_2_2_0.Op != OpOffPtr {
 15570  			break
 15571  		}
 15572  		tt5 := mem_0_2_2_2_0.Type
 15573  		o5 := auxIntToInt64(mem_0_2_2_2_0.AuxInt)
 15574  		p5 := mem_0_2_2_2_0.Args[0]
 15575  		d4 := mem_0_2_2_2.Args[1]
 15576  		mem_0_2_2_2_2 := mem_0_2_2_2.Args[2]
 15577  		if mem_0_2_2_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2_2_2.AuxInt) != n {
 15578  			break
 15579  		}
 15580  		t6 := auxToType(mem_0_2_2_2_2.Aux)
 15581  		p6 := mem_0_2_2_2_2.Args[0]
 15582  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size() && n >= o5+t5.Size()) {
 15583  			break
 15584  		}
 15585  		v.reset(OpStore)
 15586  		v.Aux = typeToAux(t2)
 15587  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 15588  		v0.AuxInt = int64ToAuxInt(o2)
 15589  		v0.AddArg(dst)
 15590  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15591  		v1.Aux = typeToAux(t3)
 15592  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 15593  		v2.AuxInt = int64ToAuxInt(o3)
 15594  		v2.AddArg(dst)
 15595  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15596  		v3.Aux = typeToAux(t4)
 15597  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 15598  		v4.AuxInt = int64ToAuxInt(o4)
 15599  		v4.AddArg(dst)
 15600  		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 15601  		v5.Aux = typeToAux(t5)
 15602  		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
 15603  		v6.AuxInt = int64ToAuxInt(o5)
 15604  		v6.AddArg(dst)
 15605  		v7 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 15606  		v7.AuxInt = int64ToAuxInt(n)
 15607  		v7.Aux = typeToAux(t1)
 15608  		v7.AddArg2(dst, mem)
 15609  		v5.AddArg3(v6, d4, v7)
 15610  		v3.AddArg3(v4, d3, v5)
 15611  		v1.AddArg3(v2, d2, v3)
 15612  		v.AddArg3(v0, d1, v1)
 15613  		return true
 15614  	}
 15615  	// match: (Move {t1} [s] dst tmp1 midmem:(Move {t2} [s] tmp2 src _))
 15616  	// cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
 15617  	// result: (Move {t1} [s] dst src midmem)
 15618  	for {
 15619  		s := auxIntToInt64(v.AuxInt)
 15620  		t1 := auxToType(v.Aux)
 15621  		dst := v_0
 15622  		tmp1 := v_1
 15623  		midmem := v_2
 15624  		if midmem.Op != OpMove || auxIntToInt64(midmem.AuxInt) != s {
 15625  			break
 15626  		}
 15627  		t2 := auxToType(midmem.Aux)
 15628  		src := midmem.Args[1]
 15629  		tmp2 := midmem.Args[0]
 15630  		if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
 15631  			break
 15632  		}
 15633  		v.reset(OpMove)
 15634  		v.AuxInt = int64ToAuxInt(s)
 15635  		v.Aux = typeToAux(t1)
 15636  		v.AddArg3(dst, src, midmem)
 15637  		return true
 15638  	}
 15639  	// match: (Move {t1} [s] dst tmp1 midmem:(VarDef (Move {t2} [s] tmp2 src _)))
 15640  	// cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
 15641  	// result: (Move {t1} [s] dst src midmem)
 15642  	for {
 15643  		s := auxIntToInt64(v.AuxInt)
 15644  		t1 := auxToType(v.Aux)
 15645  		dst := v_0
 15646  		tmp1 := v_1
 15647  		midmem := v_2
 15648  		if midmem.Op != OpVarDef {
 15649  			break
 15650  		}
 15651  		midmem_0 := midmem.Args[0]
 15652  		if midmem_0.Op != OpMove || auxIntToInt64(midmem_0.AuxInt) != s {
 15653  			break
 15654  		}
 15655  		t2 := auxToType(midmem_0.Aux)
 15656  		src := midmem_0.Args[1]
 15657  		tmp2 := midmem_0.Args[0]
 15658  		if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
 15659  			break
 15660  		}
 15661  		v.reset(OpMove)
 15662  		v.AuxInt = int64ToAuxInt(s)
 15663  		v.Aux = typeToAux(t1)
 15664  		v.AddArg3(dst, src, midmem)
 15665  		return true
 15666  	}
 15667  	// match: (Move dst src mem)
 15668  	// cond: isSamePtr(dst, src)
 15669  	// result: mem
 15670  	for {
 15671  		dst := v_0
 15672  		src := v_1
 15673  		mem := v_2
 15674  		if !(isSamePtr(dst, src)) {
 15675  			break
 15676  		}
 15677  		v.copyOf(mem)
 15678  		return true
 15679  	}
 15680  	return false
 15681  }
 15682  func rewriteValuegeneric_OpMul16(v *Value) bool {
 15683  	v_1 := v.Args[1]
 15684  	v_0 := v.Args[0]
 15685  	b := v.Block
 15686  	typ := &b.Func.Config.Types
 15687  	// match: (Mul16 (Const16 [c]) (Const16 [d]))
 15688  	// result: (Const16 [c*d])
 15689  	for {
 15690  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15691  			if v_0.Op != OpConst16 {
 15692  				continue
 15693  			}
 15694  			c := auxIntToInt16(v_0.AuxInt)
 15695  			if v_1.Op != OpConst16 {
 15696  				continue
 15697  			}
 15698  			d := auxIntToInt16(v_1.AuxInt)
 15699  			v.reset(OpConst16)
 15700  			v.AuxInt = int16ToAuxInt(c * d)
 15701  			return true
 15702  		}
 15703  		break
 15704  	}
 15705  	// match: (Mul16 (Const16 [1]) x)
 15706  	// result: x
 15707  	for {
 15708  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15709  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 1 {
 15710  				continue
 15711  			}
 15712  			x := v_1
 15713  			v.copyOf(x)
 15714  			return true
 15715  		}
 15716  		break
 15717  	}
 15718  	// match: (Mul16 (Const16 [-1]) x)
 15719  	// result: (Neg16 x)
 15720  	for {
 15721  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15722  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
 15723  				continue
 15724  			}
 15725  			x := v_1
 15726  			v.reset(OpNeg16)
 15727  			v.AddArg(x)
 15728  			return true
 15729  		}
 15730  		break
 15731  	}
 15732  	// match: (Mul16 <t> n (Const16 [c]))
 15733  	// cond: isPowerOfTwo16(c)
 15734  	// result: (Lsh16x64 <t> n (Const64 <typ.UInt64> [log16(c)]))
 15735  	for {
 15736  		t := v.Type
 15737  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15738  			n := v_0
 15739  			if v_1.Op != OpConst16 {
 15740  				continue
 15741  			}
 15742  			c := auxIntToInt16(v_1.AuxInt)
 15743  			if !(isPowerOfTwo16(c)) {
 15744  				continue
 15745  			}
 15746  			v.reset(OpLsh16x64)
 15747  			v.Type = t
 15748  			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 15749  			v0.AuxInt = int64ToAuxInt(log16(c))
 15750  			v.AddArg2(n, v0)
 15751  			return true
 15752  		}
 15753  		break
 15754  	}
 15755  	// match: (Mul16 <t> n (Const16 [c]))
 15756  	// cond: t.IsSigned() && isPowerOfTwo16(-c)
 15757  	// result: (Neg16 (Lsh16x64 <t> n (Const64 <typ.UInt64> [log16(-c)])))
 15758  	for {
 15759  		t := v.Type
 15760  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15761  			n := v_0
 15762  			if v_1.Op != OpConst16 {
 15763  				continue
 15764  			}
 15765  			c := auxIntToInt16(v_1.AuxInt)
 15766  			if !(t.IsSigned() && isPowerOfTwo16(-c)) {
 15767  				continue
 15768  			}
 15769  			v.reset(OpNeg16)
 15770  			v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
 15771  			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 15772  			v1.AuxInt = int64ToAuxInt(log16(-c))
 15773  			v0.AddArg2(n, v1)
 15774  			v.AddArg(v0)
 15775  			return true
 15776  		}
 15777  		break
 15778  	}
 15779  	// match: (Mul16 (Const16 [0]) _)
 15780  	// result: (Const16 [0])
 15781  	for {
 15782  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15783  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 15784  				continue
 15785  			}
 15786  			v.reset(OpConst16)
 15787  			v.AuxInt = int16ToAuxInt(0)
 15788  			return true
 15789  		}
 15790  		break
 15791  	}
 15792  	// match: (Mul16 (Mul16 i:(Const16 <t>) z) x)
 15793  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 15794  	// result: (Mul16 i (Mul16 <t> x z))
 15795  	for {
 15796  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15797  			if v_0.Op != OpMul16 {
 15798  				continue
 15799  			}
 15800  			_ = v_0.Args[1]
 15801  			v_0_0 := v_0.Args[0]
 15802  			v_0_1 := v_0.Args[1]
 15803  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 15804  				i := v_0_0
 15805  				if i.Op != OpConst16 {
 15806  					continue
 15807  				}
 15808  				t := i.Type
 15809  				z := v_0_1
 15810  				x := v_1
 15811  				if !(z.Op != OpConst16 && x.Op != OpConst16) {
 15812  					continue
 15813  				}
 15814  				v.reset(OpMul16)
 15815  				v0 := b.NewValue0(v.Pos, OpMul16, t)
 15816  				v0.AddArg2(x, z)
 15817  				v.AddArg2(i, v0)
 15818  				return true
 15819  			}
 15820  		}
 15821  		break
 15822  	}
 15823  	// match: (Mul16 (Const16 <t> [c]) (Mul16 (Const16 <t> [d]) x))
 15824  	// result: (Mul16 (Const16 <t> [c*d]) x)
 15825  	for {
 15826  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15827  			if v_0.Op != OpConst16 {
 15828  				continue
 15829  			}
 15830  			t := v_0.Type
 15831  			c := auxIntToInt16(v_0.AuxInt)
 15832  			if v_1.Op != OpMul16 {
 15833  				continue
 15834  			}
 15835  			_ = v_1.Args[1]
 15836  			v_1_0 := v_1.Args[0]
 15837  			v_1_1 := v_1.Args[1]
 15838  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 15839  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 15840  					continue
 15841  				}
 15842  				d := auxIntToInt16(v_1_0.AuxInt)
 15843  				x := v_1_1
 15844  				v.reset(OpMul16)
 15845  				v0 := b.NewValue0(v.Pos, OpConst16, t)
 15846  				v0.AuxInt = int16ToAuxInt(c * d)
 15847  				v.AddArg2(v0, x)
 15848  				return true
 15849  			}
 15850  		}
 15851  		break
 15852  	}
 15853  	return false
 15854  }
 15855  func rewriteValuegeneric_OpMul32(v *Value) bool {
 15856  	v_1 := v.Args[1]
 15857  	v_0 := v.Args[0]
 15858  	b := v.Block
 15859  	typ := &b.Func.Config.Types
 15860  	// match: (Mul32 (Const32 [c]) (Const32 [d]))
 15861  	// result: (Const32 [c*d])
 15862  	for {
 15863  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15864  			if v_0.Op != OpConst32 {
 15865  				continue
 15866  			}
 15867  			c := auxIntToInt32(v_0.AuxInt)
 15868  			if v_1.Op != OpConst32 {
 15869  				continue
 15870  			}
 15871  			d := auxIntToInt32(v_1.AuxInt)
 15872  			v.reset(OpConst32)
 15873  			v.AuxInt = int32ToAuxInt(c * d)
 15874  			return true
 15875  		}
 15876  		break
 15877  	}
 15878  	// match: (Mul32 (Const32 [1]) x)
 15879  	// result: x
 15880  	for {
 15881  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15882  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 1 {
 15883  				continue
 15884  			}
 15885  			x := v_1
 15886  			v.copyOf(x)
 15887  			return true
 15888  		}
 15889  		break
 15890  	}
 15891  	// match: (Mul32 (Const32 [-1]) x)
 15892  	// result: (Neg32 x)
 15893  	for {
 15894  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15895  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
 15896  				continue
 15897  			}
 15898  			x := v_1
 15899  			v.reset(OpNeg32)
 15900  			v.AddArg(x)
 15901  			return true
 15902  		}
 15903  		break
 15904  	}
 15905  	// match: (Mul32 <t> n (Const32 [c]))
 15906  	// cond: isPowerOfTwo32(c)
 15907  	// result: (Lsh32x64 <t> n (Const64 <typ.UInt64> [log32(c)]))
 15908  	for {
 15909  		t := v.Type
 15910  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15911  			n := v_0
 15912  			if v_1.Op != OpConst32 {
 15913  				continue
 15914  			}
 15915  			c := auxIntToInt32(v_1.AuxInt)
 15916  			if !(isPowerOfTwo32(c)) {
 15917  				continue
 15918  			}
 15919  			v.reset(OpLsh32x64)
 15920  			v.Type = t
 15921  			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 15922  			v0.AuxInt = int64ToAuxInt(log32(c))
 15923  			v.AddArg2(n, v0)
 15924  			return true
 15925  		}
 15926  		break
 15927  	}
 15928  	// match: (Mul32 <t> n (Const32 [c]))
 15929  	// cond: t.IsSigned() && isPowerOfTwo32(-c)
 15930  	// result: (Neg32 (Lsh32x64 <t> n (Const64 <typ.UInt64> [log32(-c)])))
 15931  	for {
 15932  		t := v.Type
 15933  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15934  			n := v_0
 15935  			if v_1.Op != OpConst32 {
 15936  				continue
 15937  			}
 15938  			c := auxIntToInt32(v_1.AuxInt)
 15939  			if !(t.IsSigned() && isPowerOfTwo32(-c)) {
 15940  				continue
 15941  			}
 15942  			v.reset(OpNeg32)
 15943  			v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
 15944  			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 15945  			v1.AuxInt = int64ToAuxInt(log32(-c))
 15946  			v0.AddArg2(n, v1)
 15947  			v.AddArg(v0)
 15948  			return true
 15949  		}
 15950  		break
 15951  	}
 15952  	// match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x))
 15953  	// result: (Add32 (Const32 <t> [c*d]) (Mul32 <t> (Const32 <t> [c]) x))
 15954  	for {
 15955  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15956  			if v_0.Op != OpConst32 {
 15957  				continue
 15958  			}
 15959  			t := v_0.Type
 15960  			c := auxIntToInt32(v_0.AuxInt)
 15961  			if v_1.Op != OpAdd32 || v_1.Type != t {
 15962  				continue
 15963  			}
 15964  			_ = v_1.Args[1]
 15965  			v_1_0 := v_1.Args[0]
 15966  			v_1_1 := v_1.Args[1]
 15967  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 15968  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 15969  					continue
 15970  				}
 15971  				d := auxIntToInt32(v_1_0.AuxInt)
 15972  				x := v_1_1
 15973  				v.reset(OpAdd32)
 15974  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 15975  				v0.AuxInt = int32ToAuxInt(c * d)
 15976  				v1 := b.NewValue0(v.Pos, OpMul32, t)
 15977  				v2 := b.NewValue0(v.Pos, OpConst32, t)
 15978  				v2.AuxInt = int32ToAuxInt(c)
 15979  				v1.AddArg2(v2, x)
 15980  				v.AddArg2(v0, v1)
 15981  				return true
 15982  			}
 15983  		}
 15984  		break
 15985  	}
 15986  	// match: (Mul32 (Const32 [0]) _)
 15987  	// result: (Const32 [0])
 15988  	for {
 15989  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 15990  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 15991  				continue
 15992  			}
 15993  			v.reset(OpConst32)
 15994  			v.AuxInt = int32ToAuxInt(0)
 15995  			return true
 15996  		}
 15997  		break
 15998  	}
 15999  	// match: (Mul32 (Mul32 i:(Const32 <t>) z) x)
 16000  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 16001  	// result: (Mul32 i (Mul32 <t> x z))
 16002  	for {
 16003  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16004  			if v_0.Op != OpMul32 {
 16005  				continue
 16006  			}
 16007  			_ = v_0.Args[1]
 16008  			v_0_0 := v_0.Args[0]
 16009  			v_0_1 := v_0.Args[1]
 16010  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 16011  				i := v_0_0
 16012  				if i.Op != OpConst32 {
 16013  					continue
 16014  				}
 16015  				t := i.Type
 16016  				z := v_0_1
 16017  				x := v_1
 16018  				if !(z.Op != OpConst32 && x.Op != OpConst32) {
 16019  					continue
 16020  				}
 16021  				v.reset(OpMul32)
 16022  				v0 := b.NewValue0(v.Pos, OpMul32, t)
 16023  				v0.AddArg2(x, z)
 16024  				v.AddArg2(i, v0)
 16025  				return true
 16026  			}
 16027  		}
 16028  		break
 16029  	}
 16030  	// match: (Mul32 (Const32 <t> [c]) (Mul32 (Const32 <t> [d]) x))
 16031  	// result: (Mul32 (Const32 <t> [c*d]) x)
 16032  	for {
 16033  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16034  			if v_0.Op != OpConst32 {
 16035  				continue
 16036  			}
 16037  			t := v_0.Type
 16038  			c := auxIntToInt32(v_0.AuxInt)
 16039  			if v_1.Op != OpMul32 {
 16040  				continue
 16041  			}
 16042  			_ = v_1.Args[1]
 16043  			v_1_0 := v_1.Args[0]
 16044  			v_1_1 := v_1.Args[1]
 16045  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 16046  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 16047  					continue
 16048  				}
 16049  				d := auxIntToInt32(v_1_0.AuxInt)
 16050  				x := v_1_1
 16051  				v.reset(OpMul32)
 16052  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 16053  				v0.AuxInt = int32ToAuxInt(c * d)
 16054  				v.AddArg2(v0, x)
 16055  				return true
 16056  			}
 16057  		}
 16058  		break
 16059  	}
 16060  	return false
 16061  }
 16062  func rewriteValuegeneric_OpMul32F(v *Value) bool {
 16063  	v_1 := v.Args[1]
 16064  	v_0 := v.Args[0]
 16065  	// match: (Mul32F (Const32F [c]) (Const32F [d]))
 16066  	// cond: c*d == c*d
 16067  	// result: (Const32F [c*d])
 16068  	for {
 16069  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16070  			if v_0.Op != OpConst32F {
 16071  				continue
 16072  			}
 16073  			c := auxIntToFloat32(v_0.AuxInt)
 16074  			if v_1.Op != OpConst32F {
 16075  				continue
 16076  			}
 16077  			d := auxIntToFloat32(v_1.AuxInt)
 16078  			if !(c*d == c*d) {
 16079  				continue
 16080  			}
 16081  			v.reset(OpConst32F)
 16082  			v.AuxInt = float32ToAuxInt(c * d)
 16083  			return true
 16084  		}
 16085  		break
 16086  	}
 16087  	// match: (Mul32F x (Const32F [1]))
 16088  	// result: x
 16089  	for {
 16090  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16091  			x := v_0
 16092  			if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != 1 {
 16093  				continue
 16094  			}
 16095  			v.copyOf(x)
 16096  			return true
 16097  		}
 16098  		break
 16099  	}
 16100  	// match: (Mul32F x (Const32F [-1]))
 16101  	// result: (Neg32F x)
 16102  	for {
 16103  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16104  			x := v_0
 16105  			if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != -1 {
 16106  				continue
 16107  			}
 16108  			v.reset(OpNeg32F)
 16109  			v.AddArg(x)
 16110  			return true
 16111  		}
 16112  		break
 16113  	}
 16114  	// match: (Mul32F x (Const32F [2]))
 16115  	// result: (Add32F x x)
 16116  	for {
 16117  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16118  			x := v_0
 16119  			if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != 2 {
 16120  				continue
 16121  			}
 16122  			v.reset(OpAdd32F)
 16123  			v.AddArg2(x, x)
 16124  			return true
 16125  		}
 16126  		break
 16127  	}
 16128  	return false
 16129  }
 16130  func rewriteValuegeneric_OpMul64(v *Value) bool {
 16131  	v_1 := v.Args[1]
 16132  	v_0 := v.Args[0]
 16133  	b := v.Block
 16134  	typ := &b.Func.Config.Types
 16135  	// match: (Mul64 (Const64 [c]) (Const64 [d]))
 16136  	// result: (Const64 [c*d])
 16137  	for {
 16138  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16139  			if v_0.Op != OpConst64 {
 16140  				continue
 16141  			}
 16142  			c := auxIntToInt64(v_0.AuxInt)
 16143  			if v_1.Op != OpConst64 {
 16144  				continue
 16145  			}
 16146  			d := auxIntToInt64(v_1.AuxInt)
 16147  			v.reset(OpConst64)
 16148  			v.AuxInt = int64ToAuxInt(c * d)
 16149  			return true
 16150  		}
 16151  		break
 16152  	}
 16153  	// match: (Mul64 (Const64 [1]) x)
 16154  	// result: x
 16155  	for {
 16156  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16157  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 1 {
 16158  				continue
 16159  			}
 16160  			x := v_1
 16161  			v.copyOf(x)
 16162  			return true
 16163  		}
 16164  		break
 16165  	}
 16166  	// match: (Mul64 (Const64 [-1]) x)
 16167  	// result: (Neg64 x)
 16168  	for {
 16169  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16170  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
 16171  				continue
 16172  			}
 16173  			x := v_1
 16174  			v.reset(OpNeg64)
 16175  			v.AddArg(x)
 16176  			return true
 16177  		}
 16178  		break
 16179  	}
 16180  	// match: (Mul64 <t> n (Const64 [c]))
 16181  	// cond: isPowerOfTwo64(c)
 16182  	// result: (Lsh64x64 <t> n (Const64 <typ.UInt64> [log64(c)]))
 16183  	for {
 16184  		t := v.Type
 16185  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16186  			n := v_0
 16187  			if v_1.Op != OpConst64 {
 16188  				continue
 16189  			}
 16190  			c := auxIntToInt64(v_1.AuxInt)
 16191  			if !(isPowerOfTwo64(c)) {
 16192  				continue
 16193  			}
 16194  			v.reset(OpLsh64x64)
 16195  			v.Type = t
 16196  			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 16197  			v0.AuxInt = int64ToAuxInt(log64(c))
 16198  			v.AddArg2(n, v0)
 16199  			return true
 16200  		}
 16201  		break
 16202  	}
 16203  	// match: (Mul64 <t> n (Const64 [c]))
 16204  	// cond: t.IsSigned() && isPowerOfTwo64(-c)
 16205  	// result: (Neg64 (Lsh64x64 <t> n (Const64 <typ.UInt64> [log64(-c)])))
 16206  	for {
 16207  		t := v.Type
 16208  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16209  			n := v_0
 16210  			if v_1.Op != OpConst64 {
 16211  				continue
 16212  			}
 16213  			c := auxIntToInt64(v_1.AuxInt)
 16214  			if !(t.IsSigned() && isPowerOfTwo64(-c)) {
 16215  				continue
 16216  			}
 16217  			v.reset(OpNeg64)
 16218  			v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
 16219  			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 16220  			v1.AuxInt = int64ToAuxInt(log64(-c))
 16221  			v0.AddArg2(n, v1)
 16222  			v.AddArg(v0)
 16223  			return true
 16224  		}
 16225  		break
 16226  	}
 16227  	// match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x))
 16228  	// result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x))
 16229  	for {
 16230  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16231  			if v_0.Op != OpConst64 {
 16232  				continue
 16233  			}
 16234  			t := v_0.Type
 16235  			c := auxIntToInt64(v_0.AuxInt)
 16236  			if v_1.Op != OpAdd64 || v_1.Type != t {
 16237  				continue
 16238  			}
 16239  			_ = v_1.Args[1]
 16240  			v_1_0 := v_1.Args[0]
 16241  			v_1_1 := v_1.Args[1]
 16242  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 16243  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 16244  					continue
 16245  				}
 16246  				d := auxIntToInt64(v_1_0.AuxInt)
 16247  				x := v_1_1
 16248  				v.reset(OpAdd64)
 16249  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 16250  				v0.AuxInt = int64ToAuxInt(c * d)
 16251  				v1 := b.NewValue0(v.Pos, OpMul64, t)
 16252  				v2 := b.NewValue0(v.Pos, OpConst64, t)
 16253  				v2.AuxInt = int64ToAuxInt(c)
 16254  				v1.AddArg2(v2, x)
 16255  				v.AddArg2(v0, v1)
 16256  				return true
 16257  			}
 16258  		}
 16259  		break
 16260  	}
 16261  	// match: (Mul64 (Const64 [0]) _)
 16262  	// result: (Const64 [0])
 16263  	for {
 16264  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16265  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 16266  				continue
 16267  			}
 16268  			v.reset(OpConst64)
 16269  			v.AuxInt = int64ToAuxInt(0)
 16270  			return true
 16271  		}
 16272  		break
 16273  	}
 16274  	// match: (Mul64 (Mul64 i:(Const64 <t>) z) x)
 16275  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 16276  	// result: (Mul64 i (Mul64 <t> x z))
 16277  	for {
 16278  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16279  			if v_0.Op != OpMul64 {
 16280  				continue
 16281  			}
 16282  			_ = v_0.Args[1]
 16283  			v_0_0 := v_0.Args[0]
 16284  			v_0_1 := v_0.Args[1]
 16285  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 16286  				i := v_0_0
 16287  				if i.Op != OpConst64 {
 16288  					continue
 16289  				}
 16290  				t := i.Type
 16291  				z := v_0_1
 16292  				x := v_1
 16293  				if !(z.Op != OpConst64 && x.Op != OpConst64) {
 16294  					continue
 16295  				}
 16296  				v.reset(OpMul64)
 16297  				v0 := b.NewValue0(v.Pos, OpMul64, t)
 16298  				v0.AddArg2(x, z)
 16299  				v.AddArg2(i, v0)
 16300  				return true
 16301  			}
 16302  		}
 16303  		break
 16304  	}
 16305  	// match: (Mul64 (Const64 <t> [c]) (Mul64 (Const64 <t> [d]) x))
 16306  	// result: (Mul64 (Const64 <t> [c*d]) x)
 16307  	for {
 16308  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16309  			if v_0.Op != OpConst64 {
 16310  				continue
 16311  			}
 16312  			t := v_0.Type
 16313  			c := auxIntToInt64(v_0.AuxInt)
 16314  			if v_1.Op != OpMul64 {
 16315  				continue
 16316  			}
 16317  			_ = v_1.Args[1]
 16318  			v_1_0 := v_1.Args[0]
 16319  			v_1_1 := v_1.Args[1]
 16320  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 16321  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 16322  					continue
 16323  				}
 16324  				d := auxIntToInt64(v_1_0.AuxInt)
 16325  				x := v_1_1
 16326  				v.reset(OpMul64)
 16327  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 16328  				v0.AuxInt = int64ToAuxInt(c * d)
 16329  				v.AddArg2(v0, x)
 16330  				return true
 16331  			}
 16332  		}
 16333  		break
 16334  	}
 16335  	return false
 16336  }
 16337  func rewriteValuegeneric_OpMul64F(v *Value) bool {
 16338  	v_1 := v.Args[1]
 16339  	v_0 := v.Args[0]
 16340  	// match: (Mul64F (Const64F [c]) (Const64F [d]))
 16341  	// cond: c*d == c*d
 16342  	// result: (Const64F [c*d])
 16343  	for {
 16344  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16345  			if v_0.Op != OpConst64F {
 16346  				continue
 16347  			}
 16348  			c := auxIntToFloat64(v_0.AuxInt)
 16349  			if v_1.Op != OpConst64F {
 16350  				continue
 16351  			}
 16352  			d := auxIntToFloat64(v_1.AuxInt)
 16353  			if !(c*d == c*d) {
 16354  				continue
 16355  			}
 16356  			v.reset(OpConst64F)
 16357  			v.AuxInt = float64ToAuxInt(c * d)
 16358  			return true
 16359  		}
 16360  		break
 16361  	}
 16362  	// match: (Mul64F x (Const64F [1]))
 16363  	// result: x
 16364  	for {
 16365  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16366  			x := v_0
 16367  			if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != 1 {
 16368  				continue
 16369  			}
 16370  			v.copyOf(x)
 16371  			return true
 16372  		}
 16373  		break
 16374  	}
 16375  	// match: (Mul64F x (Const64F [-1]))
 16376  	// result: (Neg64F x)
 16377  	for {
 16378  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16379  			x := v_0
 16380  			if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != -1 {
 16381  				continue
 16382  			}
 16383  			v.reset(OpNeg64F)
 16384  			v.AddArg(x)
 16385  			return true
 16386  		}
 16387  		break
 16388  	}
 16389  	// match: (Mul64F x (Const64F [2]))
 16390  	// result: (Add64F x x)
 16391  	for {
 16392  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16393  			x := v_0
 16394  			if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != 2 {
 16395  				continue
 16396  			}
 16397  			v.reset(OpAdd64F)
 16398  			v.AddArg2(x, x)
 16399  			return true
 16400  		}
 16401  		break
 16402  	}
 16403  	return false
 16404  }
 16405  func rewriteValuegeneric_OpMul8(v *Value) bool {
 16406  	v_1 := v.Args[1]
 16407  	v_0 := v.Args[0]
 16408  	b := v.Block
 16409  	typ := &b.Func.Config.Types
 16410  	// match: (Mul8 (Const8 [c]) (Const8 [d]))
 16411  	// result: (Const8 [c*d])
 16412  	for {
 16413  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16414  			if v_0.Op != OpConst8 {
 16415  				continue
 16416  			}
 16417  			c := auxIntToInt8(v_0.AuxInt)
 16418  			if v_1.Op != OpConst8 {
 16419  				continue
 16420  			}
 16421  			d := auxIntToInt8(v_1.AuxInt)
 16422  			v.reset(OpConst8)
 16423  			v.AuxInt = int8ToAuxInt(c * d)
 16424  			return true
 16425  		}
 16426  		break
 16427  	}
 16428  	// match: (Mul8 (Const8 [1]) x)
 16429  	// result: x
 16430  	for {
 16431  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16432  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 1 {
 16433  				continue
 16434  			}
 16435  			x := v_1
 16436  			v.copyOf(x)
 16437  			return true
 16438  		}
 16439  		break
 16440  	}
 16441  	// match: (Mul8 (Const8 [-1]) x)
 16442  	// result: (Neg8 x)
 16443  	for {
 16444  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16445  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
 16446  				continue
 16447  			}
 16448  			x := v_1
 16449  			v.reset(OpNeg8)
 16450  			v.AddArg(x)
 16451  			return true
 16452  		}
 16453  		break
 16454  	}
 16455  	// match: (Mul8 <t> n (Const8 [c]))
 16456  	// cond: isPowerOfTwo8(c)
 16457  	// result: (Lsh8x64 <t> n (Const64 <typ.UInt64> [log8(c)]))
 16458  	for {
 16459  		t := v.Type
 16460  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16461  			n := v_0
 16462  			if v_1.Op != OpConst8 {
 16463  				continue
 16464  			}
 16465  			c := auxIntToInt8(v_1.AuxInt)
 16466  			if !(isPowerOfTwo8(c)) {
 16467  				continue
 16468  			}
 16469  			v.reset(OpLsh8x64)
 16470  			v.Type = t
 16471  			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 16472  			v0.AuxInt = int64ToAuxInt(log8(c))
 16473  			v.AddArg2(n, v0)
 16474  			return true
 16475  		}
 16476  		break
 16477  	}
 16478  	// match: (Mul8 <t> n (Const8 [c]))
 16479  	// cond: t.IsSigned() && isPowerOfTwo8(-c)
 16480  	// result: (Neg8 (Lsh8x64 <t> n (Const64 <typ.UInt64> [log8(-c)])))
 16481  	for {
 16482  		t := v.Type
 16483  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16484  			n := v_0
 16485  			if v_1.Op != OpConst8 {
 16486  				continue
 16487  			}
 16488  			c := auxIntToInt8(v_1.AuxInt)
 16489  			if !(t.IsSigned() && isPowerOfTwo8(-c)) {
 16490  				continue
 16491  			}
 16492  			v.reset(OpNeg8)
 16493  			v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
 16494  			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 16495  			v1.AuxInt = int64ToAuxInt(log8(-c))
 16496  			v0.AddArg2(n, v1)
 16497  			v.AddArg(v0)
 16498  			return true
 16499  		}
 16500  		break
 16501  	}
 16502  	// match: (Mul8 (Const8 [0]) _)
 16503  	// result: (Const8 [0])
 16504  	for {
 16505  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16506  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 16507  				continue
 16508  			}
 16509  			v.reset(OpConst8)
 16510  			v.AuxInt = int8ToAuxInt(0)
 16511  			return true
 16512  		}
 16513  		break
 16514  	}
 16515  	// match: (Mul8 (Mul8 i:(Const8 <t>) z) x)
 16516  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 16517  	// result: (Mul8 i (Mul8 <t> x z))
 16518  	for {
 16519  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16520  			if v_0.Op != OpMul8 {
 16521  				continue
 16522  			}
 16523  			_ = v_0.Args[1]
 16524  			v_0_0 := v_0.Args[0]
 16525  			v_0_1 := v_0.Args[1]
 16526  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 16527  				i := v_0_0
 16528  				if i.Op != OpConst8 {
 16529  					continue
 16530  				}
 16531  				t := i.Type
 16532  				z := v_0_1
 16533  				x := v_1
 16534  				if !(z.Op != OpConst8 && x.Op != OpConst8) {
 16535  					continue
 16536  				}
 16537  				v.reset(OpMul8)
 16538  				v0 := b.NewValue0(v.Pos, OpMul8, t)
 16539  				v0.AddArg2(x, z)
 16540  				v.AddArg2(i, v0)
 16541  				return true
 16542  			}
 16543  		}
 16544  		break
 16545  	}
 16546  	// match: (Mul8 (Const8 <t> [c]) (Mul8 (Const8 <t> [d]) x))
 16547  	// result: (Mul8 (Const8 <t> [c*d]) x)
 16548  	for {
 16549  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16550  			if v_0.Op != OpConst8 {
 16551  				continue
 16552  			}
 16553  			t := v_0.Type
 16554  			c := auxIntToInt8(v_0.AuxInt)
 16555  			if v_1.Op != OpMul8 {
 16556  				continue
 16557  			}
 16558  			_ = v_1.Args[1]
 16559  			v_1_0 := v_1.Args[0]
 16560  			v_1_1 := v_1.Args[1]
 16561  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 16562  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 16563  					continue
 16564  				}
 16565  				d := auxIntToInt8(v_1_0.AuxInt)
 16566  				x := v_1_1
 16567  				v.reset(OpMul8)
 16568  				v0 := b.NewValue0(v.Pos, OpConst8, t)
 16569  				v0.AuxInt = int8ToAuxInt(c * d)
 16570  				v.AddArg2(v0, x)
 16571  				return true
 16572  			}
 16573  		}
 16574  		break
 16575  	}
 16576  	return false
 16577  }
 16578  func rewriteValuegeneric_OpNeg16(v *Value) bool {
 16579  	v_0 := v.Args[0]
 16580  	b := v.Block
 16581  	// match: (Neg16 (Const16 [c]))
 16582  	// result: (Const16 [-c])
 16583  	for {
 16584  		if v_0.Op != OpConst16 {
 16585  			break
 16586  		}
 16587  		c := auxIntToInt16(v_0.AuxInt)
 16588  		v.reset(OpConst16)
 16589  		v.AuxInt = int16ToAuxInt(-c)
 16590  		return true
 16591  	}
 16592  	// match: (Neg16 (Sub16 x y))
 16593  	// result: (Sub16 y x)
 16594  	for {
 16595  		if v_0.Op != OpSub16 {
 16596  			break
 16597  		}
 16598  		y := v_0.Args[1]
 16599  		x := v_0.Args[0]
 16600  		v.reset(OpSub16)
 16601  		v.AddArg2(y, x)
 16602  		return true
 16603  	}
 16604  	// match: (Neg16 (Neg16 x))
 16605  	// result: x
 16606  	for {
 16607  		if v_0.Op != OpNeg16 {
 16608  			break
 16609  		}
 16610  		x := v_0.Args[0]
 16611  		v.copyOf(x)
 16612  		return true
 16613  	}
 16614  	// match: (Neg16 <t> (Com16 x))
 16615  	// result: (Add16 (Const16 <t> [1]) x)
 16616  	for {
 16617  		t := v.Type
 16618  		if v_0.Op != OpCom16 {
 16619  			break
 16620  		}
 16621  		x := v_0.Args[0]
 16622  		v.reset(OpAdd16)
 16623  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 16624  		v0.AuxInt = int16ToAuxInt(1)
 16625  		v.AddArg2(v0, x)
 16626  		return true
 16627  	}
 16628  	return false
 16629  }
 16630  func rewriteValuegeneric_OpNeg32(v *Value) bool {
 16631  	v_0 := v.Args[0]
 16632  	b := v.Block
 16633  	// match: (Neg32 (Const32 [c]))
 16634  	// result: (Const32 [-c])
 16635  	for {
 16636  		if v_0.Op != OpConst32 {
 16637  			break
 16638  		}
 16639  		c := auxIntToInt32(v_0.AuxInt)
 16640  		v.reset(OpConst32)
 16641  		v.AuxInt = int32ToAuxInt(-c)
 16642  		return true
 16643  	}
 16644  	// match: (Neg32 (Sub32 x y))
 16645  	// result: (Sub32 y x)
 16646  	for {
 16647  		if v_0.Op != OpSub32 {
 16648  			break
 16649  		}
 16650  		y := v_0.Args[1]
 16651  		x := v_0.Args[0]
 16652  		v.reset(OpSub32)
 16653  		v.AddArg2(y, x)
 16654  		return true
 16655  	}
 16656  	// match: (Neg32 (Neg32 x))
 16657  	// result: x
 16658  	for {
 16659  		if v_0.Op != OpNeg32 {
 16660  			break
 16661  		}
 16662  		x := v_0.Args[0]
 16663  		v.copyOf(x)
 16664  		return true
 16665  	}
 16666  	// match: (Neg32 <t> (Com32 x))
 16667  	// result: (Add32 (Const32 <t> [1]) x)
 16668  	for {
 16669  		t := v.Type
 16670  		if v_0.Op != OpCom32 {
 16671  			break
 16672  		}
 16673  		x := v_0.Args[0]
 16674  		v.reset(OpAdd32)
 16675  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 16676  		v0.AuxInt = int32ToAuxInt(1)
 16677  		v.AddArg2(v0, x)
 16678  		return true
 16679  	}
 16680  	return false
 16681  }
 16682  func rewriteValuegeneric_OpNeg32F(v *Value) bool {
 16683  	v_0 := v.Args[0]
 16684  	// match: (Neg32F (Const32F [c]))
 16685  	// cond: c != 0
 16686  	// result: (Const32F [-c])
 16687  	for {
 16688  		if v_0.Op != OpConst32F {
 16689  			break
 16690  		}
 16691  		c := auxIntToFloat32(v_0.AuxInt)
 16692  		if !(c != 0) {
 16693  			break
 16694  		}
 16695  		v.reset(OpConst32F)
 16696  		v.AuxInt = float32ToAuxInt(-c)
 16697  		return true
 16698  	}
 16699  	return false
 16700  }
 16701  func rewriteValuegeneric_OpNeg64(v *Value) bool {
 16702  	v_0 := v.Args[0]
 16703  	b := v.Block
 16704  	// match: (Neg64 (Const64 [c]))
 16705  	// result: (Const64 [-c])
 16706  	for {
 16707  		if v_0.Op != OpConst64 {
 16708  			break
 16709  		}
 16710  		c := auxIntToInt64(v_0.AuxInt)
 16711  		v.reset(OpConst64)
 16712  		v.AuxInt = int64ToAuxInt(-c)
 16713  		return true
 16714  	}
 16715  	// match: (Neg64 (Sub64 x y))
 16716  	// result: (Sub64 y x)
 16717  	for {
 16718  		if v_0.Op != OpSub64 {
 16719  			break
 16720  		}
 16721  		y := v_0.Args[1]
 16722  		x := v_0.Args[0]
 16723  		v.reset(OpSub64)
 16724  		v.AddArg2(y, x)
 16725  		return true
 16726  	}
 16727  	// match: (Neg64 (Neg64 x))
 16728  	// result: x
 16729  	for {
 16730  		if v_0.Op != OpNeg64 {
 16731  			break
 16732  		}
 16733  		x := v_0.Args[0]
 16734  		v.copyOf(x)
 16735  		return true
 16736  	}
 16737  	// match: (Neg64 <t> (Com64 x))
 16738  	// result: (Add64 (Const64 <t> [1]) x)
 16739  	for {
 16740  		t := v.Type
 16741  		if v_0.Op != OpCom64 {
 16742  			break
 16743  		}
 16744  		x := v_0.Args[0]
 16745  		v.reset(OpAdd64)
 16746  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 16747  		v0.AuxInt = int64ToAuxInt(1)
 16748  		v.AddArg2(v0, x)
 16749  		return true
 16750  	}
 16751  	return false
 16752  }
 16753  func rewriteValuegeneric_OpNeg64F(v *Value) bool {
 16754  	v_0 := v.Args[0]
 16755  	// match: (Neg64F (Const64F [c]))
 16756  	// cond: c != 0
 16757  	// result: (Const64F [-c])
 16758  	for {
 16759  		if v_0.Op != OpConst64F {
 16760  			break
 16761  		}
 16762  		c := auxIntToFloat64(v_0.AuxInt)
 16763  		if !(c != 0) {
 16764  			break
 16765  		}
 16766  		v.reset(OpConst64F)
 16767  		v.AuxInt = float64ToAuxInt(-c)
 16768  		return true
 16769  	}
 16770  	return false
 16771  }
 16772  func rewriteValuegeneric_OpNeg8(v *Value) bool {
 16773  	v_0 := v.Args[0]
 16774  	b := v.Block
 16775  	// match: (Neg8 (Const8 [c]))
 16776  	// result: (Const8 [-c])
 16777  	for {
 16778  		if v_0.Op != OpConst8 {
 16779  			break
 16780  		}
 16781  		c := auxIntToInt8(v_0.AuxInt)
 16782  		v.reset(OpConst8)
 16783  		v.AuxInt = int8ToAuxInt(-c)
 16784  		return true
 16785  	}
 16786  	// match: (Neg8 (Sub8 x y))
 16787  	// result: (Sub8 y x)
 16788  	for {
 16789  		if v_0.Op != OpSub8 {
 16790  			break
 16791  		}
 16792  		y := v_0.Args[1]
 16793  		x := v_0.Args[0]
 16794  		v.reset(OpSub8)
 16795  		v.AddArg2(y, x)
 16796  		return true
 16797  	}
 16798  	// match: (Neg8 (Neg8 x))
 16799  	// result: x
 16800  	for {
 16801  		if v_0.Op != OpNeg8 {
 16802  			break
 16803  		}
 16804  		x := v_0.Args[0]
 16805  		v.copyOf(x)
 16806  		return true
 16807  	}
 16808  	// match: (Neg8 <t> (Com8 x))
 16809  	// result: (Add8 (Const8 <t> [1]) x)
 16810  	for {
 16811  		t := v.Type
 16812  		if v_0.Op != OpCom8 {
 16813  			break
 16814  		}
 16815  		x := v_0.Args[0]
 16816  		v.reset(OpAdd8)
 16817  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 16818  		v0.AuxInt = int8ToAuxInt(1)
 16819  		v.AddArg2(v0, x)
 16820  		return true
 16821  	}
 16822  	return false
 16823  }
 16824  func rewriteValuegeneric_OpNeq16(v *Value) bool {
 16825  	v_1 := v.Args[1]
 16826  	v_0 := v.Args[0]
 16827  	b := v.Block
 16828  	typ := &b.Func.Config.Types
 16829  	// match: (Neq16 x x)
 16830  	// result: (ConstBool [false])
 16831  	for {
 16832  		x := v_0
 16833  		if x != v_1 {
 16834  			break
 16835  		}
 16836  		v.reset(OpConstBool)
 16837  		v.AuxInt = boolToAuxInt(false)
 16838  		return true
 16839  	}
 16840  	// match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
 16841  	// result: (Neq16 (Const16 <t> [c-d]) x)
 16842  	for {
 16843  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16844  			if v_0.Op != OpConst16 {
 16845  				continue
 16846  			}
 16847  			t := v_0.Type
 16848  			c := auxIntToInt16(v_0.AuxInt)
 16849  			if v_1.Op != OpAdd16 {
 16850  				continue
 16851  			}
 16852  			_ = v_1.Args[1]
 16853  			v_1_0 := v_1.Args[0]
 16854  			v_1_1 := v_1.Args[1]
 16855  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 16856  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 16857  					continue
 16858  				}
 16859  				d := auxIntToInt16(v_1_0.AuxInt)
 16860  				x := v_1_1
 16861  				v.reset(OpNeq16)
 16862  				v0 := b.NewValue0(v.Pos, OpConst16, t)
 16863  				v0.AuxInt = int16ToAuxInt(c - d)
 16864  				v.AddArg2(v0, x)
 16865  				return true
 16866  			}
 16867  		}
 16868  		break
 16869  	}
 16870  	// match: (Neq16 (Const16 [c]) (Const16 [d]))
 16871  	// result: (ConstBool [c != d])
 16872  	for {
 16873  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16874  			if v_0.Op != OpConst16 {
 16875  				continue
 16876  			}
 16877  			c := auxIntToInt16(v_0.AuxInt)
 16878  			if v_1.Op != OpConst16 {
 16879  				continue
 16880  			}
 16881  			d := auxIntToInt16(v_1.AuxInt)
 16882  			v.reset(OpConstBool)
 16883  			v.AuxInt = boolToAuxInt(c != d)
 16884  			return true
 16885  		}
 16886  		break
 16887  	}
 16888  	// match: (Neq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
 16889  	// cond: k > 0 && k < 15 && kbar == 16 - k
 16890  	// result: (Neq16 (And16 <t> n (Const16 <t> [1<<uint(k)-1])) (Const16 <t> [0]))
 16891  	for {
 16892  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16893  			n := v_0
 16894  			if v_1.Op != OpLsh16x64 {
 16895  				continue
 16896  			}
 16897  			_ = v_1.Args[1]
 16898  			v_1_0 := v_1.Args[0]
 16899  			if v_1_0.Op != OpRsh16x64 {
 16900  				continue
 16901  			}
 16902  			_ = v_1_0.Args[1]
 16903  			v_1_0_0 := v_1_0.Args[0]
 16904  			if v_1_0_0.Op != OpAdd16 {
 16905  				continue
 16906  			}
 16907  			t := v_1_0_0.Type
 16908  			_ = v_1_0_0.Args[1]
 16909  			v_1_0_0_0 := v_1_0_0.Args[0]
 16910  			v_1_0_0_1 := v_1_0_0.Args[1]
 16911  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
 16912  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh16Ux64 || v_1_0_0_1.Type != t {
 16913  					continue
 16914  				}
 16915  				_ = v_1_0_0_1.Args[1]
 16916  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
 16917  				if v_1_0_0_1_0.Op != OpRsh16x64 || v_1_0_0_1_0.Type != t {
 16918  					continue
 16919  				}
 16920  				_ = v_1_0_0_1_0.Args[1]
 16921  				if n != v_1_0_0_1_0.Args[0] {
 16922  					continue
 16923  				}
 16924  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
 16925  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 15 {
 16926  					continue
 16927  				}
 16928  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
 16929  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
 16930  					continue
 16931  				}
 16932  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
 16933  				v_1_0_1 := v_1_0.Args[1]
 16934  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
 16935  					continue
 16936  				}
 16937  				k := auxIntToInt64(v_1_0_1.AuxInt)
 16938  				v_1_1 := v_1.Args[1]
 16939  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 15 && kbar == 16-k) {
 16940  					continue
 16941  				}
 16942  				v.reset(OpNeq16)
 16943  				v0 := b.NewValue0(v.Pos, OpAnd16, t)
 16944  				v1 := b.NewValue0(v.Pos, OpConst16, t)
 16945  				v1.AuxInt = int16ToAuxInt(1<<uint(k) - 1)
 16946  				v0.AddArg2(n, v1)
 16947  				v2 := b.NewValue0(v.Pos, OpConst16, t)
 16948  				v2.AuxInt = int16ToAuxInt(0)
 16949  				v.AddArg2(v0, v2)
 16950  				return true
 16951  			}
 16952  		}
 16953  		break
 16954  	}
 16955  	// match: (Neq16 s:(Sub16 x y) (Const16 [0]))
 16956  	// cond: s.Uses == 1
 16957  	// result: (Neq16 x y)
 16958  	for {
 16959  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16960  			s := v_0
 16961  			if s.Op != OpSub16 {
 16962  				continue
 16963  			}
 16964  			y := s.Args[1]
 16965  			x := s.Args[0]
 16966  			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(s.Uses == 1) {
 16967  				continue
 16968  			}
 16969  			v.reset(OpNeq16)
 16970  			v.AddArg2(x, y)
 16971  			return true
 16972  		}
 16973  		break
 16974  	}
 16975  	// match: (Neq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [y]))
 16976  	// cond: oneBit16(y)
 16977  	// result: (Eq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [0]))
 16978  	for {
 16979  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 16980  			if v_0.Op != OpAnd16 {
 16981  				continue
 16982  			}
 16983  			t := v_0.Type
 16984  			_ = v_0.Args[1]
 16985  			v_0_0 := v_0.Args[0]
 16986  			v_0_1 := v_0.Args[1]
 16987  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 16988  				x := v_0_0
 16989  				if v_0_1.Op != OpConst16 || v_0_1.Type != t {
 16990  					continue
 16991  				}
 16992  				y := auxIntToInt16(v_0_1.AuxInt)
 16993  				if v_1.Op != OpConst16 || v_1.Type != t || auxIntToInt16(v_1.AuxInt) != y || !(oneBit16(y)) {
 16994  					continue
 16995  				}
 16996  				v.reset(OpEq16)
 16997  				v0 := b.NewValue0(v.Pos, OpAnd16, t)
 16998  				v1 := b.NewValue0(v.Pos, OpConst16, t)
 16999  				v1.AuxInt = int16ToAuxInt(y)
 17000  				v0.AddArg2(x, v1)
 17001  				v2 := b.NewValue0(v.Pos, OpConst16, t)
 17002  				v2.AuxInt = int16ToAuxInt(0)
 17003  				v.AddArg2(v0, v2)
 17004  				return true
 17005  			}
 17006  		}
 17007  		break
 17008  	}
 17009  	return false
 17010  }
 17011  func rewriteValuegeneric_OpNeq32(v *Value) bool {
 17012  	v_1 := v.Args[1]
 17013  	v_0 := v.Args[0]
 17014  	b := v.Block
 17015  	typ := &b.Func.Config.Types
 17016  	// match: (Neq32 x x)
 17017  	// result: (ConstBool [false])
 17018  	for {
 17019  		x := v_0
 17020  		if x != v_1 {
 17021  			break
 17022  		}
 17023  		v.reset(OpConstBool)
 17024  		v.AuxInt = boolToAuxInt(false)
 17025  		return true
 17026  	}
 17027  	// match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
 17028  	// result: (Neq32 (Const32 <t> [c-d]) x)
 17029  	for {
 17030  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17031  			if v_0.Op != OpConst32 {
 17032  				continue
 17033  			}
 17034  			t := v_0.Type
 17035  			c := auxIntToInt32(v_0.AuxInt)
 17036  			if v_1.Op != OpAdd32 {
 17037  				continue
 17038  			}
 17039  			_ = v_1.Args[1]
 17040  			v_1_0 := v_1.Args[0]
 17041  			v_1_1 := v_1.Args[1]
 17042  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 17043  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 17044  					continue
 17045  				}
 17046  				d := auxIntToInt32(v_1_0.AuxInt)
 17047  				x := v_1_1
 17048  				v.reset(OpNeq32)
 17049  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 17050  				v0.AuxInt = int32ToAuxInt(c - d)
 17051  				v.AddArg2(v0, x)
 17052  				return true
 17053  			}
 17054  		}
 17055  		break
 17056  	}
 17057  	// match: (Neq32 (Const32 [c]) (Const32 [d]))
 17058  	// result: (ConstBool [c != d])
 17059  	for {
 17060  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17061  			if v_0.Op != OpConst32 {
 17062  				continue
 17063  			}
 17064  			c := auxIntToInt32(v_0.AuxInt)
 17065  			if v_1.Op != OpConst32 {
 17066  				continue
 17067  			}
 17068  			d := auxIntToInt32(v_1.AuxInt)
 17069  			v.reset(OpConstBool)
 17070  			v.AuxInt = boolToAuxInt(c != d)
 17071  			return true
 17072  		}
 17073  		break
 17074  	}
 17075  	// match: (Neq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
 17076  	// cond: k > 0 && k < 31 && kbar == 32 - k
 17077  	// result: (Neq32 (And32 <t> n (Const32 <t> [1<<uint(k)-1])) (Const32 <t> [0]))
 17078  	for {
 17079  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17080  			n := v_0
 17081  			if v_1.Op != OpLsh32x64 {
 17082  				continue
 17083  			}
 17084  			_ = v_1.Args[1]
 17085  			v_1_0 := v_1.Args[0]
 17086  			if v_1_0.Op != OpRsh32x64 {
 17087  				continue
 17088  			}
 17089  			_ = v_1_0.Args[1]
 17090  			v_1_0_0 := v_1_0.Args[0]
 17091  			if v_1_0_0.Op != OpAdd32 {
 17092  				continue
 17093  			}
 17094  			t := v_1_0_0.Type
 17095  			_ = v_1_0_0.Args[1]
 17096  			v_1_0_0_0 := v_1_0_0.Args[0]
 17097  			v_1_0_0_1 := v_1_0_0.Args[1]
 17098  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
 17099  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh32Ux64 || v_1_0_0_1.Type != t {
 17100  					continue
 17101  				}
 17102  				_ = v_1_0_0_1.Args[1]
 17103  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
 17104  				if v_1_0_0_1_0.Op != OpRsh32x64 || v_1_0_0_1_0.Type != t {
 17105  					continue
 17106  				}
 17107  				_ = v_1_0_0_1_0.Args[1]
 17108  				if n != v_1_0_0_1_0.Args[0] {
 17109  					continue
 17110  				}
 17111  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
 17112  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 31 {
 17113  					continue
 17114  				}
 17115  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
 17116  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
 17117  					continue
 17118  				}
 17119  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
 17120  				v_1_0_1 := v_1_0.Args[1]
 17121  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
 17122  					continue
 17123  				}
 17124  				k := auxIntToInt64(v_1_0_1.AuxInt)
 17125  				v_1_1 := v_1.Args[1]
 17126  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 31 && kbar == 32-k) {
 17127  					continue
 17128  				}
 17129  				v.reset(OpNeq32)
 17130  				v0 := b.NewValue0(v.Pos, OpAnd32, t)
 17131  				v1 := b.NewValue0(v.Pos, OpConst32, t)
 17132  				v1.AuxInt = int32ToAuxInt(1<<uint(k) - 1)
 17133  				v0.AddArg2(n, v1)
 17134  				v2 := b.NewValue0(v.Pos, OpConst32, t)
 17135  				v2.AuxInt = int32ToAuxInt(0)
 17136  				v.AddArg2(v0, v2)
 17137  				return true
 17138  			}
 17139  		}
 17140  		break
 17141  	}
 17142  	// match: (Neq32 s:(Sub32 x y) (Const32 [0]))
 17143  	// cond: s.Uses == 1
 17144  	// result: (Neq32 x y)
 17145  	for {
 17146  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17147  			s := v_0
 17148  			if s.Op != OpSub32 {
 17149  				continue
 17150  			}
 17151  			y := s.Args[1]
 17152  			x := s.Args[0]
 17153  			if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 || !(s.Uses == 1) {
 17154  				continue
 17155  			}
 17156  			v.reset(OpNeq32)
 17157  			v.AddArg2(x, y)
 17158  			return true
 17159  		}
 17160  		break
 17161  	}
 17162  	// match: (Neq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [y]))
 17163  	// cond: oneBit32(y)
 17164  	// result: (Eq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [0]))
 17165  	for {
 17166  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17167  			if v_0.Op != OpAnd32 {
 17168  				continue
 17169  			}
 17170  			t := v_0.Type
 17171  			_ = v_0.Args[1]
 17172  			v_0_0 := v_0.Args[0]
 17173  			v_0_1 := v_0.Args[1]
 17174  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 17175  				x := v_0_0
 17176  				if v_0_1.Op != OpConst32 || v_0_1.Type != t {
 17177  					continue
 17178  				}
 17179  				y := auxIntToInt32(v_0_1.AuxInt)
 17180  				if v_1.Op != OpConst32 || v_1.Type != t || auxIntToInt32(v_1.AuxInt) != y || !(oneBit32(y)) {
 17181  					continue
 17182  				}
 17183  				v.reset(OpEq32)
 17184  				v0 := b.NewValue0(v.Pos, OpAnd32, t)
 17185  				v1 := b.NewValue0(v.Pos, OpConst32, t)
 17186  				v1.AuxInt = int32ToAuxInt(y)
 17187  				v0.AddArg2(x, v1)
 17188  				v2 := b.NewValue0(v.Pos, OpConst32, t)
 17189  				v2.AuxInt = int32ToAuxInt(0)
 17190  				v.AddArg2(v0, v2)
 17191  				return true
 17192  			}
 17193  		}
 17194  		break
 17195  	}
 17196  	return false
 17197  }
 17198  func rewriteValuegeneric_OpNeq32F(v *Value) bool {
 17199  	v_1 := v.Args[1]
 17200  	v_0 := v.Args[0]
 17201  	// match: (Neq32F (Const32F [c]) (Const32F [d]))
 17202  	// result: (ConstBool [c != d])
 17203  	for {
 17204  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17205  			if v_0.Op != OpConst32F {
 17206  				continue
 17207  			}
 17208  			c := auxIntToFloat32(v_0.AuxInt)
 17209  			if v_1.Op != OpConst32F {
 17210  				continue
 17211  			}
 17212  			d := auxIntToFloat32(v_1.AuxInt)
 17213  			v.reset(OpConstBool)
 17214  			v.AuxInt = boolToAuxInt(c != d)
 17215  			return true
 17216  		}
 17217  		break
 17218  	}
 17219  	return false
 17220  }
 17221  func rewriteValuegeneric_OpNeq64(v *Value) bool {
 17222  	v_1 := v.Args[1]
 17223  	v_0 := v.Args[0]
 17224  	b := v.Block
 17225  	typ := &b.Func.Config.Types
 17226  	// match: (Neq64 x x)
 17227  	// result: (ConstBool [false])
 17228  	for {
 17229  		x := v_0
 17230  		if x != v_1 {
 17231  			break
 17232  		}
 17233  		v.reset(OpConstBool)
 17234  		v.AuxInt = boolToAuxInt(false)
 17235  		return true
 17236  	}
 17237  	// match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
 17238  	// result: (Neq64 (Const64 <t> [c-d]) x)
 17239  	for {
 17240  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17241  			if v_0.Op != OpConst64 {
 17242  				continue
 17243  			}
 17244  			t := v_0.Type
 17245  			c := auxIntToInt64(v_0.AuxInt)
 17246  			if v_1.Op != OpAdd64 {
 17247  				continue
 17248  			}
 17249  			_ = v_1.Args[1]
 17250  			v_1_0 := v_1.Args[0]
 17251  			v_1_1 := v_1.Args[1]
 17252  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 17253  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 17254  					continue
 17255  				}
 17256  				d := auxIntToInt64(v_1_0.AuxInt)
 17257  				x := v_1_1
 17258  				v.reset(OpNeq64)
 17259  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 17260  				v0.AuxInt = int64ToAuxInt(c - d)
 17261  				v.AddArg2(v0, x)
 17262  				return true
 17263  			}
 17264  		}
 17265  		break
 17266  	}
 17267  	// match: (Neq64 (Const64 [c]) (Const64 [d]))
 17268  	// result: (ConstBool [c != d])
 17269  	for {
 17270  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17271  			if v_0.Op != OpConst64 {
 17272  				continue
 17273  			}
 17274  			c := auxIntToInt64(v_0.AuxInt)
 17275  			if v_1.Op != OpConst64 {
 17276  				continue
 17277  			}
 17278  			d := auxIntToInt64(v_1.AuxInt)
 17279  			v.reset(OpConstBool)
 17280  			v.AuxInt = boolToAuxInt(c != d)
 17281  			return true
 17282  		}
 17283  		break
 17284  	}
 17285  	// match: (Neq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
 17286  	// cond: k > 0 && k < 63 && kbar == 64 - k
 17287  	// result: (Neq64 (And64 <t> n (Const64 <t> [1<<uint(k)-1])) (Const64 <t> [0]))
 17288  	for {
 17289  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17290  			n := v_0
 17291  			if v_1.Op != OpLsh64x64 {
 17292  				continue
 17293  			}
 17294  			_ = v_1.Args[1]
 17295  			v_1_0 := v_1.Args[0]
 17296  			if v_1_0.Op != OpRsh64x64 {
 17297  				continue
 17298  			}
 17299  			_ = v_1_0.Args[1]
 17300  			v_1_0_0 := v_1_0.Args[0]
 17301  			if v_1_0_0.Op != OpAdd64 {
 17302  				continue
 17303  			}
 17304  			t := v_1_0_0.Type
 17305  			_ = v_1_0_0.Args[1]
 17306  			v_1_0_0_0 := v_1_0_0.Args[0]
 17307  			v_1_0_0_1 := v_1_0_0.Args[1]
 17308  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
 17309  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh64Ux64 || v_1_0_0_1.Type != t {
 17310  					continue
 17311  				}
 17312  				_ = v_1_0_0_1.Args[1]
 17313  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
 17314  				if v_1_0_0_1_0.Op != OpRsh64x64 || v_1_0_0_1_0.Type != t {
 17315  					continue
 17316  				}
 17317  				_ = v_1_0_0_1_0.Args[1]
 17318  				if n != v_1_0_0_1_0.Args[0] {
 17319  					continue
 17320  				}
 17321  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
 17322  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 63 {
 17323  					continue
 17324  				}
 17325  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
 17326  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
 17327  					continue
 17328  				}
 17329  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
 17330  				v_1_0_1 := v_1_0.Args[1]
 17331  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
 17332  					continue
 17333  				}
 17334  				k := auxIntToInt64(v_1_0_1.AuxInt)
 17335  				v_1_1 := v_1.Args[1]
 17336  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 63 && kbar == 64-k) {
 17337  					continue
 17338  				}
 17339  				v.reset(OpNeq64)
 17340  				v0 := b.NewValue0(v.Pos, OpAnd64, t)
 17341  				v1 := b.NewValue0(v.Pos, OpConst64, t)
 17342  				v1.AuxInt = int64ToAuxInt(1<<uint(k) - 1)
 17343  				v0.AddArg2(n, v1)
 17344  				v2 := b.NewValue0(v.Pos, OpConst64, t)
 17345  				v2.AuxInt = int64ToAuxInt(0)
 17346  				v.AddArg2(v0, v2)
 17347  				return true
 17348  			}
 17349  		}
 17350  		break
 17351  	}
 17352  	// match: (Neq64 s:(Sub64 x y) (Const64 [0]))
 17353  	// cond: s.Uses == 1
 17354  	// result: (Neq64 x y)
 17355  	for {
 17356  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17357  			s := v_0
 17358  			if s.Op != OpSub64 {
 17359  				continue
 17360  			}
 17361  			y := s.Args[1]
 17362  			x := s.Args[0]
 17363  			if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 || !(s.Uses == 1) {
 17364  				continue
 17365  			}
 17366  			v.reset(OpNeq64)
 17367  			v.AddArg2(x, y)
 17368  			return true
 17369  		}
 17370  		break
 17371  	}
 17372  	// match: (Neq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [y]))
 17373  	// cond: oneBit64(y)
 17374  	// result: (Eq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [0]))
 17375  	for {
 17376  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17377  			if v_0.Op != OpAnd64 {
 17378  				continue
 17379  			}
 17380  			t := v_0.Type
 17381  			_ = v_0.Args[1]
 17382  			v_0_0 := v_0.Args[0]
 17383  			v_0_1 := v_0.Args[1]
 17384  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 17385  				x := v_0_0
 17386  				if v_0_1.Op != OpConst64 || v_0_1.Type != t {
 17387  					continue
 17388  				}
 17389  				y := auxIntToInt64(v_0_1.AuxInt)
 17390  				if v_1.Op != OpConst64 || v_1.Type != t || auxIntToInt64(v_1.AuxInt) != y || !(oneBit64(y)) {
 17391  					continue
 17392  				}
 17393  				v.reset(OpEq64)
 17394  				v0 := b.NewValue0(v.Pos, OpAnd64, t)
 17395  				v1 := b.NewValue0(v.Pos, OpConst64, t)
 17396  				v1.AuxInt = int64ToAuxInt(y)
 17397  				v0.AddArg2(x, v1)
 17398  				v2 := b.NewValue0(v.Pos, OpConst64, t)
 17399  				v2.AuxInt = int64ToAuxInt(0)
 17400  				v.AddArg2(v0, v2)
 17401  				return true
 17402  			}
 17403  		}
 17404  		break
 17405  	}
 17406  	return false
 17407  }
 17408  func rewriteValuegeneric_OpNeq64F(v *Value) bool {
 17409  	v_1 := v.Args[1]
 17410  	v_0 := v.Args[0]
 17411  	// match: (Neq64F (Const64F [c]) (Const64F [d]))
 17412  	// result: (ConstBool [c != d])
 17413  	for {
 17414  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17415  			if v_0.Op != OpConst64F {
 17416  				continue
 17417  			}
 17418  			c := auxIntToFloat64(v_0.AuxInt)
 17419  			if v_1.Op != OpConst64F {
 17420  				continue
 17421  			}
 17422  			d := auxIntToFloat64(v_1.AuxInt)
 17423  			v.reset(OpConstBool)
 17424  			v.AuxInt = boolToAuxInt(c != d)
 17425  			return true
 17426  		}
 17427  		break
 17428  	}
 17429  	return false
 17430  }
 17431  func rewriteValuegeneric_OpNeq8(v *Value) bool {
 17432  	v_1 := v.Args[1]
 17433  	v_0 := v.Args[0]
 17434  	b := v.Block
 17435  	typ := &b.Func.Config.Types
 17436  	// match: (Neq8 x x)
 17437  	// result: (ConstBool [false])
 17438  	for {
 17439  		x := v_0
 17440  		if x != v_1 {
 17441  			break
 17442  		}
 17443  		v.reset(OpConstBool)
 17444  		v.AuxInt = boolToAuxInt(false)
 17445  		return true
 17446  	}
 17447  	// match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
 17448  	// result: (Neq8 (Const8 <t> [c-d]) x)
 17449  	for {
 17450  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17451  			if v_0.Op != OpConst8 {
 17452  				continue
 17453  			}
 17454  			t := v_0.Type
 17455  			c := auxIntToInt8(v_0.AuxInt)
 17456  			if v_1.Op != OpAdd8 {
 17457  				continue
 17458  			}
 17459  			_ = v_1.Args[1]
 17460  			v_1_0 := v_1.Args[0]
 17461  			v_1_1 := v_1.Args[1]
 17462  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 17463  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 17464  					continue
 17465  				}
 17466  				d := auxIntToInt8(v_1_0.AuxInt)
 17467  				x := v_1_1
 17468  				v.reset(OpNeq8)
 17469  				v0 := b.NewValue0(v.Pos, OpConst8, t)
 17470  				v0.AuxInt = int8ToAuxInt(c - d)
 17471  				v.AddArg2(v0, x)
 17472  				return true
 17473  			}
 17474  		}
 17475  		break
 17476  	}
 17477  	// match: (Neq8 (Const8 [c]) (Const8 [d]))
 17478  	// result: (ConstBool [c != d])
 17479  	for {
 17480  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17481  			if v_0.Op != OpConst8 {
 17482  				continue
 17483  			}
 17484  			c := auxIntToInt8(v_0.AuxInt)
 17485  			if v_1.Op != OpConst8 {
 17486  				continue
 17487  			}
 17488  			d := auxIntToInt8(v_1.AuxInt)
 17489  			v.reset(OpConstBool)
 17490  			v.AuxInt = boolToAuxInt(c != d)
 17491  			return true
 17492  		}
 17493  		break
 17494  	}
 17495  	// match: (Neq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
 17496  	// cond: k > 0 && k < 7 && kbar == 8 - k
 17497  	// result: (Neq8 (And8 <t> n (Const8 <t> [1<<uint(k)-1])) (Const8 <t> [0]))
 17498  	for {
 17499  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17500  			n := v_0
 17501  			if v_1.Op != OpLsh8x64 {
 17502  				continue
 17503  			}
 17504  			_ = v_1.Args[1]
 17505  			v_1_0 := v_1.Args[0]
 17506  			if v_1_0.Op != OpRsh8x64 {
 17507  				continue
 17508  			}
 17509  			_ = v_1_0.Args[1]
 17510  			v_1_0_0 := v_1_0.Args[0]
 17511  			if v_1_0_0.Op != OpAdd8 {
 17512  				continue
 17513  			}
 17514  			t := v_1_0_0.Type
 17515  			_ = v_1_0_0.Args[1]
 17516  			v_1_0_0_0 := v_1_0_0.Args[0]
 17517  			v_1_0_0_1 := v_1_0_0.Args[1]
 17518  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
 17519  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh8Ux64 || v_1_0_0_1.Type != t {
 17520  					continue
 17521  				}
 17522  				_ = v_1_0_0_1.Args[1]
 17523  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
 17524  				if v_1_0_0_1_0.Op != OpRsh8x64 || v_1_0_0_1_0.Type != t {
 17525  					continue
 17526  				}
 17527  				_ = v_1_0_0_1_0.Args[1]
 17528  				if n != v_1_0_0_1_0.Args[0] {
 17529  					continue
 17530  				}
 17531  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
 17532  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 7 {
 17533  					continue
 17534  				}
 17535  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
 17536  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
 17537  					continue
 17538  				}
 17539  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
 17540  				v_1_0_1 := v_1_0.Args[1]
 17541  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
 17542  					continue
 17543  				}
 17544  				k := auxIntToInt64(v_1_0_1.AuxInt)
 17545  				v_1_1 := v_1.Args[1]
 17546  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 7 && kbar == 8-k) {
 17547  					continue
 17548  				}
 17549  				v.reset(OpNeq8)
 17550  				v0 := b.NewValue0(v.Pos, OpAnd8, t)
 17551  				v1 := b.NewValue0(v.Pos, OpConst8, t)
 17552  				v1.AuxInt = int8ToAuxInt(1<<uint(k) - 1)
 17553  				v0.AddArg2(n, v1)
 17554  				v2 := b.NewValue0(v.Pos, OpConst8, t)
 17555  				v2.AuxInt = int8ToAuxInt(0)
 17556  				v.AddArg2(v0, v2)
 17557  				return true
 17558  			}
 17559  		}
 17560  		break
 17561  	}
 17562  	// match: (Neq8 s:(Sub8 x y) (Const8 [0]))
 17563  	// cond: s.Uses == 1
 17564  	// result: (Neq8 x y)
 17565  	for {
 17566  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17567  			s := v_0
 17568  			if s.Op != OpSub8 {
 17569  				continue
 17570  			}
 17571  			y := s.Args[1]
 17572  			x := s.Args[0]
 17573  			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(s.Uses == 1) {
 17574  				continue
 17575  			}
 17576  			v.reset(OpNeq8)
 17577  			v.AddArg2(x, y)
 17578  			return true
 17579  		}
 17580  		break
 17581  	}
 17582  	// match: (Neq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [y]))
 17583  	// cond: oneBit8(y)
 17584  	// result: (Eq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [0]))
 17585  	for {
 17586  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17587  			if v_0.Op != OpAnd8 {
 17588  				continue
 17589  			}
 17590  			t := v_0.Type
 17591  			_ = v_0.Args[1]
 17592  			v_0_0 := v_0.Args[0]
 17593  			v_0_1 := v_0.Args[1]
 17594  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 17595  				x := v_0_0
 17596  				if v_0_1.Op != OpConst8 || v_0_1.Type != t {
 17597  					continue
 17598  				}
 17599  				y := auxIntToInt8(v_0_1.AuxInt)
 17600  				if v_1.Op != OpConst8 || v_1.Type != t || auxIntToInt8(v_1.AuxInt) != y || !(oneBit8(y)) {
 17601  					continue
 17602  				}
 17603  				v.reset(OpEq8)
 17604  				v0 := b.NewValue0(v.Pos, OpAnd8, t)
 17605  				v1 := b.NewValue0(v.Pos, OpConst8, t)
 17606  				v1.AuxInt = int8ToAuxInt(y)
 17607  				v0.AddArg2(x, v1)
 17608  				v2 := b.NewValue0(v.Pos, OpConst8, t)
 17609  				v2.AuxInt = int8ToAuxInt(0)
 17610  				v.AddArg2(v0, v2)
 17611  				return true
 17612  			}
 17613  		}
 17614  		break
 17615  	}
 17616  	return false
 17617  }
 17618  func rewriteValuegeneric_OpNeqB(v *Value) bool {
 17619  	v_1 := v.Args[1]
 17620  	v_0 := v.Args[0]
 17621  	// match: (NeqB (ConstBool [c]) (ConstBool [d]))
 17622  	// result: (ConstBool [c != d])
 17623  	for {
 17624  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17625  			if v_0.Op != OpConstBool {
 17626  				continue
 17627  			}
 17628  			c := auxIntToBool(v_0.AuxInt)
 17629  			if v_1.Op != OpConstBool {
 17630  				continue
 17631  			}
 17632  			d := auxIntToBool(v_1.AuxInt)
 17633  			v.reset(OpConstBool)
 17634  			v.AuxInt = boolToAuxInt(c != d)
 17635  			return true
 17636  		}
 17637  		break
 17638  	}
 17639  	// match: (NeqB (ConstBool [false]) x)
 17640  	// result: x
 17641  	for {
 17642  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17643  			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
 17644  				continue
 17645  			}
 17646  			x := v_1
 17647  			v.copyOf(x)
 17648  			return true
 17649  		}
 17650  		break
 17651  	}
 17652  	// match: (NeqB (ConstBool [true]) x)
 17653  	// result: (Not x)
 17654  	for {
 17655  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17656  			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
 17657  				continue
 17658  			}
 17659  			x := v_1
 17660  			v.reset(OpNot)
 17661  			v.AddArg(x)
 17662  			return true
 17663  		}
 17664  		break
 17665  	}
 17666  	// match: (NeqB (Not x) (Not y))
 17667  	// result: (NeqB x y)
 17668  	for {
 17669  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17670  			if v_0.Op != OpNot {
 17671  				continue
 17672  			}
 17673  			x := v_0.Args[0]
 17674  			if v_1.Op != OpNot {
 17675  				continue
 17676  			}
 17677  			y := v_1.Args[0]
 17678  			v.reset(OpNeqB)
 17679  			v.AddArg2(x, y)
 17680  			return true
 17681  		}
 17682  		break
 17683  	}
 17684  	return false
 17685  }
 17686  func rewriteValuegeneric_OpNeqInter(v *Value) bool {
 17687  	v_1 := v.Args[1]
 17688  	v_0 := v.Args[0]
 17689  	b := v.Block
 17690  	typ := &b.Func.Config.Types
 17691  	// match: (NeqInter x y)
 17692  	// result: (NeqPtr (ITab x) (ITab y))
 17693  	for {
 17694  		x := v_0
 17695  		y := v_1
 17696  		v.reset(OpNeqPtr)
 17697  		v0 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
 17698  		v0.AddArg(x)
 17699  		v1 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
 17700  		v1.AddArg(y)
 17701  		v.AddArg2(v0, v1)
 17702  		return true
 17703  	}
 17704  }
 17705  func rewriteValuegeneric_OpNeqPtr(v *Value) bool {
 17706  	v_1 := v.Args[1]
 17707  	v_0 := v.Args[0]
 17708  	// match: (NeqPtr x x)
 17709  	// result: (ConstBool [false])
 17710  	for {
 17711  		x := v_0
 17712  		if x != v_1 {
 17713  			break
 17714  		}
 17715  		v.reset(OpConstBool)
 17716  		v.AuxInt = boolToAuxInt(false)
 17717  		return true
 17718  	}
 17719  	// match: (NeqPtr (Addr {x} _) (Addr {y} _))
 17720  	// result: (ConstBool [x != y])
 17721  	for {
 17722  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17723  			if v_0.Op != OpAddr {
 17724  				continue
 17725  			}
 17726  			x := auxToSym(v_0.Aux)
 17727  			if v_1.Op != OpAddr {
 17728  				continue
 17729  			}
 17730  			y := auxToSym(v_1.Aux)
 17731  			v.reset(OpConstBool)
 17732  			v.AuxInt = boolToAuxInt(x != y)
 17733  			return true
 17734  		}
 17735  		break
 17736  	}
 17737  	// match: (NeqPtr (Addr {x} _) (OffPtr [o] (Addr {y} _)))
 17738  	// result: (ConstBool [x != y || o != 0])
 17739  	for {
 17740  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17741  			if v_0.Op != OpAddr {
 17742  				continue
 17743  			}
 17744  			x := auxToSym(v_0.Aux)
 17745  			if v_1.Op != OpOffPtr {
 17746  				continue
 17747  			}
 17748  			o := auxIntToInt64(v_1.AuxInt)
 17749  			v_1_0 := v_1.Args[0]
 17750  			if v_1_0.Op != OpAddr {
 17751  				continue
 17752  			}
 17753  			y := auxToSym(v_1_0.Aux)
 17754  			v.reset(OpConstBool)
 17755  			v.AuxInt = boolToAuxInt(x != y || o != 0)
 17756  			return true
 17757  		}
 17758  		break
 17759  	}
 17760  	// match: (NeqPtr (OffPtr [o1] (Addr {x} _)) (OffPtr [o2] (Addr {y} _)))
 17761  	// result: (ConstBool [x != y || o1 != o2])
 17762  	for {
 17763  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17764  			if v_0.Op != OpOffPtr {
 17765  				continue
 17766  			}
 17767  			o1 := auxIntToInt64(v_0.AuxInt)
 17768  			v_0_0 := v_0.Args[0]
 17769  			if v_0_0.Op != OpAddr {
 17770  				continue
 17771  			}
 17772  			x := auxToSym(v_0_0.Aux)
 17773  			if v_1.Op != OpOffPtr {
 17774  				continue
 17775  			}
 17776  			o2 := auxIntToInt64(v_1.AuxInt)
 17777  			v_1_0 := v_1.Args[0]
 17778  			if v_1_0.Op != OpAddr {
 17779  				continue
 17780  			}
 17781  			y := auxToSym(v_1_0.Aux)
 17782  			v.reset(OpConstBool)
 17783  			v.AuxInt = boolToAuxInt(x != y || o1 != o2)
 17784  			return true
 17785  		}
 17786  		break
 17787  	}
 17788  	// match: (NeqPtr (LocalAddr {x} _ _) (LocalAddr {y} _ _))
 17789  	// result: (ConstBool [x != y])
 17790  	for {
 17791  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17792  			if v_0.Op != OpLocalAddr {
 17793  				continue
 17794  			}
 17795  			x := auxToSym(v_0.Aux)
 17796  			if v_1.Op != OpLocalAddr {
 17797  				continue
 17798  			}
 17799  			y := auxToSym(v_1.Aux)
 17800  			v.reset(OpConstBool)
 17801  			v.AuxInt = boolToAuxInt(x != y)
 17802  			return true
 17803  		}
 17804  		break
 17805  	}
 17806  	// match: (NeqPtr (LocalAddr {x} _ _) (OffPtr [o] (LocalAddr {y} _ _)))
 17807  	// result: (ConstBool [x != y || o != 0])
 17808  	for {
 17809  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17810  			if v_0.Op != OpLocalAddr {
 17811  				continue
 17812  			}
 17813  			x := auxToSym(v_0.Aux)
 17814  			if v_1.Op != OpOffPtr {
 17815  				continue
 17816  			}
 17817  			o := auxIntToInt64(v_1.AuxInt)
 17818  			v_1_0 := v_1.Args[0]
 17819  			if v_1_0.Op != OpLocalAddr {
 17820  				continue
 17821  			}
 17822  			y := auxToSym(v_1_0.Aux)
 17823  			v.reset(OpConstBool)
 17824  			v.AuxInt = boolToAuxInt(x != y || o != 0)
 17825  			return true
 17826  		}
 17827  		break
 17828  	}
 17829  	// match: (NeqPtr (OffPtr [o1] (LocalAddr {x} _ _)) (OffPtr [o2] (LocalAddr {y} _ _)))
 17830  	// result: (ConstBool [x != y || o1 != o2])
 17831  	for {
 17832  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17833  			if v_0.Op != OpOffPtr {
 17834  				continue
 17835  			}
 17836  			o1 := auxIntToInt64(v_0.AuxInt)
 17837  			v_0_0 := v_0.Args[0]
 17838  			if v_0_0.Op != OpLocalAddr {
 17839  				continue
 17840  			}
 17841  			x := auxToSym(v_0_0.Aux)
 17842  			if v_1.Op != OpOffPtr {
 17843  				continue
 17844  			}
 17845  			o2 := auxIntToInt64(v_1.AuxInt)
 17846  			v_1_0 := v_1.Args[0]
 17847  			if v_1_0.Op != OpLocalAddr {
 17848  				continue
 17849  			}
 17850  			y := auxToSym(v_1_0.Aux)
 17851  			v.reset(OpConstBool)
 17852  			v.AuxInt = boolToAuxInt(x != y || o1 != o2)
 17853  			return true
 17854  		}
 17855  		break
 17856  	}
 17857  	// match: (NeqPtr (OffPtr [o1] p1) p2)
 17858  	// cond: isSamePtr(p1, p2)
 17859  	// result: (ConstBool [o1 != 0])
 17860  	for {
 17861  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17862  			if v_0.Op != OpOffPtr {
 17863  				continue
 17864  			}
 17865  			o1 := auxIntToInt64(v_0.AuxInt)
 17866  			p1 := v_0.Args[0]
 17867  			p2 := v_1
 17868  			if !(isSamePtr(p1, p2)) {
 17869  				continue
 17870  			}
 17871  			v.reset(OpConstBool)
 17872  			v.AuxInt = boolToAuxInt(o1 != 0)
 17873  			return true
 17874  		}
 17875  		break
 17876  	}
 17877  	// match: (NeqPtr (OffPtr [o1] p1) (OffPtr [o2] p2))
 17878  	// cond: isSamePtr(p1, p2)
 17879  	// result: (ConstBool [o1 != o2])
 17880  	for {
 17881  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17882  			if v_0.Op != OpOffPtr {
 17883  				continue
 17884  			}
 17885  			o1 := auxIntToInt64(v_0.AuxInt)
 17886  			p1 := v_0.Args[0]
 17887  			if v_1.Op != OpOffPtr {
 17888  				continue
 17889  			}
 17890  			o2 := auxIntToInt64(v_1.AuxInt)
 17891  			p2 := v_1.Args[0]
 17892  			if !(isSamePtr(p1, p2)) {
 17893  				continue
 17894  			}
 17895  			v.reset(OpConstBool)
 17896  			v.AuxInt = boolToAuxInt(o1 != o2)
 17897  			return true
 17898  		}
 17899  		break
 17900  	}
 17901  	// match: (NeqPtr (Const32 [c]) (Const32 [d]))
 17902  	// result: (ConstBool [c != d])
 17903  	for {
 17904  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17905  			if v_0.Op != OpConst32 {
 17906  				continue
 17907  			}
 17908  			c := auxIntToInt32(v_0.AuxInt)
 17909  			if v_1.Op != OpConst32 {
 17910  				continue
 17911  			}
 17912  			d := auxIntToInt32(v_1.AuxInt)
 17913  			v.reset(OpConstBool)
 17914  			v.AuxInt = boolToAuxInt(c != d)
 17915  			return true
 17916  		}
 17917  		break
 17918  	}
 17919  	// match: (NeqPtr (Const64 [c]) (Const64 [d]))
 17920  	// result: (ConstBool [c != d])
 17921  	for {
 17922  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17923  			if v_0.Op != OpConst64 {
 17924  				continue
 17925  			}
 17926  			c := auxIntToInt64(v_0.AuxInt)
 17927  			if v_1.Op != OpConst64 {
 17928  				continue
 17929  			}
 17930  			d := auxIntToInt64(v_1.AuxInt)
 17931  			v.reset(OpConstBool)
 17932  			v.AuxInt = boolToAuxInt(c != d)
 17933  			return true
 17934  		}
 17935  		break
 17936  	}
 17937  	// match: (NeqPtr (LocalAddr _ _) (Addr _))
 17938  	// result: (ConstBool [true])
 17939  	for {
 17940  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17941  			if v_0.Op != OpLocalAddr || v_1.Op != OpAddr {
 17942  				continue
 17943  			}
 17944  			v.reset(OpConstBool)
 17945  			v.AuxInt = boolToAuxInt(true)
 17946  			return true
 17947  		}
 17948  		break
 17949  	}
 17950  	// match: (NeqPtr (OffPtr (LocalAddr _ _)) (Addr _))
 17951  	// result: (ConstBool [true])
 17952  	for {
 17953  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17954  			if v_0.Op != OpOffPtr {
 17955  				continue
 17956  			}
 17957  			v_0_0 := v_0.Args[0]
 17958  			if v_0_0.Op != OpLocalAddr || v_1.Op != OpAddr {
 17959  				continue
 17960  			}
 17961  			v.reset(OpConstBool)
 17962  			v.AuxInt = boolToAuxInt(true)
 17963  			return true
 17964  		}
 17965  		break
 17966  	}
 17967  	// match: (NeqPtr (LocalAddr _ _) (OffPtr (Addr _)))
 17968  	// result: (ConstBool [true])
 17969  	for {
 17970  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17971  			if v_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
 17972  				continue
 17973  			}
 17974  			v_1_0 := v_1.Args[0]
 17975  			if v_1_0.Op != OpAddr {
 17976  				continue
 17977  			}
 17978  			v.reset(OpConstBool)
 17979  			v.AuxInt = boolToAuxInt(true)
 17980  			return true
 17981  		}
 17982  		break
 17983  	}
 17984  	// match: (NeqPtr (OffPtr (LocalAddr _ _)) (OffPtr (Addr _)))
 17985  	// result: (ConstBool [true])
 17986  	for {
 17987  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 17988  			if v_0.Op != OpOffPtr {
 17989  				continue
 17990  			}
 17991  			v_0_0 := v_0.Args[0]
 17992  			if v_0_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
 17993  				continue
 17994  			}
 17995  			v_1_0 := v_1.Args[0]
 17996  			if v_1_0.Op != OpAddr {
 17997  				continue
 17998  			}
 17999  			v.reset(OpConstBool)
 18000  			v.AuxInt = boolToAuxInt(true)
 18001  			return true
 18002  		}
 18003  		break
 18004  	}
 18005  	// match: (NeqPtr (AddPtr p1 o1) p2)
 18006  	// cond: isSamePtr(p1, p2)
 18007  	// result: (IsNonNil o1)
 18008  	for {
 18009  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18010  			if v_0.Op != OpAddPtr {
 18011  				continue
 18012  			}
 18013  			o1 := v_0.Args[1]
 18014  			p1 := v_0.Args[0]
 18015  			p2 := v_1
 18016  			if !(isSamePtr(p1, p2)) {
 18017  				continue
 18018  			}
 18019  			v.reset(OpIsNonNil)
 18020  			v.AddArg(o1)
 18021  			return true
 18022  		}
 18023  		break
 18024  	}
 18025  	// match: (NeqPtr (Const32 [0]) p)
 18026  	// result: (IsNonNil p)
 18027  	for {
 18028  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18029  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 18030  				continue
 18031  			}
 18032  			p := v_1
 18033  			v.reset(OpIsNonNil)
 18034  			v.AddArg(p)
 18035  			return true
 18036  		}
 18037  		break
 18038  	}
 18039  	// match: (NeqPtr (Const64 [0]) p)
 18040  	// result: (IsNonNil p)
 18041  	for {
 18042  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18043  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 18044  				continue
 18045  			}
 18046  			p := v_1
 18047  			v.reset(OpIsNonNil)
 18048  			v.AddArg(p)
 18049  			return true
 18050  		}
 18051  		break
 18052  	}
 18053  	// match: (NeqPtr (ConstNil) p)
 18054  	// result: (IsNonNil p)
 18055  	for {
 18056  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18057  			if v_0.Op != OpConstNil {
 18058  				continue
 18059  			}
 18060  			p := v_1
 18061  			v.reset(OpIsNonNil)
 18062  			v.AddArg(p)
 18063  			return true
 18064  		}
 18065  		break
 18066  	}
 18067  	return false
 18068  }
 18069  func rewriteValuegeneric_OpNeqSlice(v *Value) bool {
 18070  	v_1 := v.Args[1]
 18071  	v_0 := v.Args[0]
 18072  	b := v.Block
 18073  	typ := &b.Func.Config.Types
 18074  	// match: (NeqSlice x y)
 18075  	// result: (NeqPtr (SlicePtr x) (SlicePtr y))
 18076  	for {
 18077  		x := v_0
 18078  		y := v_1
 18079  		v.reset(OpNeqPtr)
 18080  		v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
 18081  		v0.AddArg(x)
 18082  		v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
 18083  		v1.AddArg(y)
 18084  		v.AddArg2(v0, v1)
 18085  		return true
 18086  	}
 18087  }
 18088  func rewriteValuegeneric_OpNilCheck(v *Value) bool {
 18089  	v_1 := v.Args[1]
 18090  	v_0 := v.Args[0]
 18091  	b := v.Block
 18092  	fe := b.Func.fe
 18093  	// match: (NilCheck (GetG mem) mem)
 18094  	// result: mem
 18095  	for {
 18096  		if v_0.Op != OpGetG {
 18097  			break
 18098  		}
 18099  		mem := v_0.Args[0]
 18100  		if mem != v_1 {
 18101  			break
 18102  		}
 18103  		v.copyOf(mem)
 18104  		return true
 18105  	}
 18106  	// match: (NilCheck (SelectN [0] call:(StaticLECall _ _)) _)
 18107  	// cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")
 18108  	// result: (Invalid)
 18109  	for {
 18110  		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
 18111  			break
 18112  		}
 18113  		call := v_0.Args[0]
 18114  		if call.Op != OpStaticLECall || len(call.Args) != 2 || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
 18115  			break
 18116  		}
 18117  		v.reset(OpInvalid)
 18118  		return true
 18119  	}
 18120  	// match: (NilCheck (OffPtr (SelectN [0] call:(StaticLECall _ _))) _)
 18121  	// cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")
 18122  	// result: (Invalid)
 18123  	for {
 18124  		if v_0.Op != OpOffPtr {
 18125  			break
 18126  		}
 18127  		v_0_0 := v_0.Args[0]
 18128  		if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 {
 18129  			break
 18130  		}
 18131  		call := v_0_0.Args[0]
 18132  		if call.Op != OpStaticLECall || len(call.Args) != 2 || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
 18133  			break
 18134  		}
 18135  		v.reset(OpInvalid)
 18136  		return true
 18137  	}
 18138  	return false
 18139  }
 18140  func rewriteValuegeneric_OpNot(v *Value) bool {
 18141  	v_0 := v.Args[0]
 18142  	// match: (Not (ConstBool [c]))
 18143  	// result: (ConstBool [!c])
 18144  	for {
 18145  		if v_0.Op != OpConstBool {
 18146  			break
 18147  		}
 18148  		c := auxIntToBool(v_0.AuxInt)
 18149  		v.reset(OpConstBool)
 18150  		v.AuxInt = boolToAuxInt(!c)
 18151  		return true
 18152  	}
 18153  	// match: (Not (Eq64 x y))
 18154  	// result: (Neq64 x y)
 18155  	for {
 18156  		if v_0.Op != OpEq64 {
 18157  			break
 18158  		}
 18159  		y := v_0.Args[1]
 18160  		x := v_0.Args[0]
 18161  		v.reset(OpNeq64)
 18162  		v.AddArg2(x, y)
 18163  		return true
 18164  	}
 18165  	// match: (Not (Eq32 x y))
 18166  	// result: (Neq32 x y)
 18167  	for {
 18168  		if v_0.Op != OpEq32 {
 18169  			break
 18170  		}
 18171  		y := v_0.Args[1]
 18172  		x := v_0.Args[0]
 18173  		v.reset(OpNeq32)
 18174  		v.AddArg2(x, y)
 18175  		return true
 18176  	}
 18177  	// match: (Not (Eq16 x y))
 18178  	// result: (Neq16 x y)
 18179  	for {
 18180  		if v_0.Op != OpEq16 {
 18181  			break
 18182  		}
 18183  		y := v_0.Args[1]
 18184  		x := v_0.Args[0]
 18185  		v.reset(OpNeq16)
 18186  		v.AddArg2(x, y)
 18187  		return true
 18188  	}
 18189  	// match: (Not (Eq8 x y))
 18190  	// result: (Neq8 x y)
 18191  	for {
 18192  		if v_0.Op != OpEq8 {
 18193  			break
 18194  		}
 18195  		y := v_0.Args[1]
 18196  		x := v_0.Args[0]
 18197  		v.reset(OpNeq8)
 18198  		v.AddArg2(x, y)
 18199  		return true
 18200  	}
 18201  	// match: (Not (EqB x y))
 18202  	// result: (NeqB x y)
 18203  	for {
 18204  		if v_0.Op != OpEqB {
 18205  			break
 18206  		}
 18207  		y := v_0.Args[1]
 18208  		x := v_0.Args[0]
 18209  		v.reset(OpNeqB)
 18210  		v.AddArg2(x, y)
 18211  		return true
 18212  	}
 18213  	// match: (Not (EqPtr x y))
 18214  	// result: (NeqPtr x y)
 18215  	for {
 18216  		if v_0.Op != OpEqPtr {
 18217  			break
 18218  		}
 18219  		y := v_0.Args[1]
 18220  		x := v_0.Args[0]
 18221  		v.reset(OpNeqPtr)
 18222  		v.AddArg2(x, y)
 18223  		return true
 18224  	}
 18225  	// match: (Not (Eq64F x y))
 18226  	// result: (Neq64F x y)
 18227  	for {
 18228  		if v_0.Op != OpEq64F {
 18229  			break
 18230  		}
 18231  		y := v_0.Args[1]
 18232  		x := v_0.Args[0]
 18233  		v.reset(OpNeq64F)
 18234  		v.AddArg2(x, y)
 18235  		return true
 18236  	}
 18237  	// match: (Not (Eq32F x y))
 18238  	// result: (Neq32F x y)
 18239  	for {
 18240  		if v_0.Op != OpEq32F {
 18241  			break
 18242  		}
 18243  		y := v_0.Args[1]
 18244  		x := v_0.Args[0]
 18245  		v.reset(OpNeq32F)
 18246  		v.AddArg2(x, y)
 18247  		return true
 18248  	}
 18249  	// match: (Not (Neq64 x y))
 18250  	// result: (Eq64 x y)
 18251  	for {
 18252  		if v_0.Op != OpNeq64 {
 18253  			break
 18254  		}
 18255  		y := v_0.Args[1]
 18256  		x := v_0.Args[0]
 18257  		v.reset(OpEq64)
 18258  		v.AddArg2(x, y)
 18259  		return true
 18260  	}
 18261  	// match: (Not (Neq32 x y))
 18262  	// result: (Eq32 x y)
 18263  	for {
 18264  		if v_0.Op != OpNeq32 {
 18265  			break
 18266  		}
 18267  		y := v_0.Args[1]
 18268  		x := v_0.Args[0]
 18269  		v.reset(OpEq32)
 18270  		v.AddArg2(x, y)
 18271  		return true
 18272  	}
 18273  	// match: (Not (Neq16 x y))
 18274  	// result: (Eq16 x y)
 18275  	for {
 18276  		if v_0.Op != OpNeq16 {
 18277  			break
 18278  		}
 18279  		y := v_0.Args[1]
 18280  		x := v_0.Args[0]
 18281  		v.reset(OpEq16)
 18282  		v.AddArg2(x, y)
 18283  		return true
 18284  	}
 18285  	// match: (Not (Neq8 x y))
 18286  	// result: (Eq8 x y)
 18287  	for {
 18288  		if v_0.Op != OpNeq8 {
 18289  			break
 18290  		}
 18291  		y := v_0.Args[1]
 18292  		x := v_0.Args[0]
 18293  		v.reset(OpEq8)
 18294  		v.AddArg2(x, y)
 18295  		return true
 18296  	}
 18297  	// match: (Not (NeqB x y))
 18298  	// result: (EqB x y)
 18299  	for {
 18300  		if v_0.Op != OpNeqB {
 18301  			break
 18302  		}
 18303  		y := v_0.Args[1]
 18304  		x := v_0.Args[0]
 18305  		v.reset(OpEqB)
 18306  		v.AddArg2(x, y)
 18307  		return true
 18308  	}
 18309  	// match: (Not (NeqPtr x y))
 18310  	// result: (EqPtr x y)
 18311  	for {
 18312  		if v_0.Op != OpNeqPtr {
 18313  			break
 18314  		}
 18315  		y := v_0.Args[1]
 18316  		x := v_0.Args[0]
 18317  		v.reset(OpEqPtr)
 18318  		v.AddArg2(x, y)
 18319  		return true
 18320  	}
 18321  	// match: (Not (Neq64F x y))
 18322  	// result: (Eq64F x y)
 18323  	for {
 18324  		if v_0.Op != OpNeq64F {
 18325  			break
 18326  		}
 18327  		y := v_0.Args[1]
 18328  		x := v_0.Args[0]
 18329  		v.reset(OpEq64F)
 18330  		v.AddArg2(x, y)
 18331  		return true
 18332  	}
 18333  	// match: (Not (Neq32F x y))
 18334  	// result: (Eq32F x y)
 18335  	for {
 18336  		if v_0.Op != OpNeq32F {
 18337  			break
 18338  		}
 18339  		y := v_0.Args[1]
 18340  		x := v_0.Args[0]
 18341  		v.reset(OpEq32F)
 18342  		v.AddArg2(x, y)
 18343  		return true
 18344  	}
 18345  	// match: (Not (Less64 x y))
 18346  	// result: (Leq64 y x)
 18347  	for {
 18348  		if v_0.Op != OpLess64 {
 18349  			break
 18350  		}
 18351  		y := v_0.Args[1]
 18352  		x := v_0.Args[0]
 18353  		v.reset(OpLeq64)
 18354  		v.AddArg2(y, x)
 18355  		return true
 18356  	}
 18357  	// match: (Not (Less32 x y))
 18358  	// result: (Leq32 y x)
 18359  	for {
 18360  		if v_0.Op != OpLess32 {
 18361  			break
 18362  		}
 18363  		y := v_0.Args[1]
 18364  		x := v_0.Args[0]
 18365  		v.reset(OpLeq32)
 18366  		v.AddArg2(y, x)
 18367  		return true
 18368  	}
 18369  	// match: (Not (Less16 x y))
 18370  	// result: (Leq16 y x)
 18371  	for {
 18372  		if v_0.Op != OpLess16 {
 18373  			break
 18374  		}
 18375  		y := v_0.Args[1]
 18376  		x := v_0.Args[0]
 18377  		v.reset(OpLeq16)
 18378  		v.AddArg2(y, x)
 18379  		return true
 18380  	}
 18381  	// match: (Not (Less8 x y))
 18382  	// result: (Leq8 y x)
 18383  	for {
 18384  		if v_0.Op != OpLess8 {
 18385  			break
 18386  		}
 18387  		y := v_0.Args[1]
 18388  		x := v_0.Args[0]
 18389  		v.reset(OpLeq8)
 18390  		v.AddArg2(y, x)
 18391  		return true
 18392  	}
 18393  	// match: (Not (Less64U x y))
 18394  	// result: (Leq64U y x)
 18395  	for {
 18396  		if v_0.Op != OpLess64U {
 18397  			break
 18398  		}
 18399  		y := v_0.Args[1]
 18400  		x := v_0.Args[0]
 18401  		v.reset(OpLeq64U)
 18402  		v.AddArg2(y, x)
 18403  		return true
 18404  	}
 18405  	// match: (Not (Less32U x y))
 18406  	// result: (Leq32U y x)
 18407  	for {
 18408  		if v_0.Op != OpLess32U {
 18409  			break
 18410  		}
 18411  		y := v_0.Args[1]
 18412  		x := v_0.Args[0]
 18413  		v.reset(OpLeq32U)
 18414  		v.AddArg2(y, x)
 18415  		return true
 18416  	}
 18417  	// match: (Not (Less16U x y))
 18418  	// result: (Leq16U y x)
 18419  	for {
 18420  		if v_0.Op != OpLess16U {
 18421  			break
 18422  		}
 18423  		y := v_0.Args[1]
 18424  		x := v_0.Args[0]
 18425  		v.reset(OpLeq16U)
 18426  		v.AddArg2(y, x)
 18427  		return true
 18428  	}
 18429  	// match: (Not (Less8U x y))
 18430  	// result: (Leq8U y x)
 18431  	for {
 18432  		if v_0.Op != OpLess8U {
 18433  			break
 18434  		}
 18435  		y := v_0.Args[1]
 18436  		x := v_0.Args[0]
 18437  		v.reset(OpLeq8U)
 18438  		v.AddArg2(y, x)
 18439  		return true
 18440  	}
 18441  	// match: (Not (Leq64 x y))
 18442  	// result: (Less64 y x)
 18443  	for {
 18444  		if v_0.Op != OpLeq64 {
 18445  			break
 18446  		}
 18447  		y := v_0.Args[1]
 18448  		x := v_0.Args[0]
 18449  		v.reset(OpLess64)
 18450  		v.AddArg2(y, x)
 18451  		return true
 18452  	}
 18453  	// match: (Not (Leq32 x y))
 18454  	// result: (Less32 y x)
 18455  	for {
 18456  		if v_0.Op != OpLeq32 {
 18457  			break
 18458  		}
 18459  		y := v_0.Args[1]
 18460  		x := v_0.Args[0]
 18461  		v.reset(OpLess32)
 18462  		v.AddArg2(y, x)
 18463  		return true
 18464  	}
 18465  	// match: (Not (Leq16 x y))
 18466  	// result: (Less16 y x)
 18467  	for {
 18468  		if v_0.Op != OpLeq16 {
 18469  			break
 18470  		}
 18471  		y := v_0.Args[1]
 18472  		x := v_0.Args[0]
 18473  		v.reset(OpLess16)
 18474  		v.AddArg2(y, x)
 18475  		return true
 18476  	}
 18477  	// match: (Not (Leq8 x y))
 18478  	// result: (Less8 y x)
 18479  	for {
 18480  		if v_0.Op != OpLeq8 {
 18481  			break
 18482  		}
 18483  		y := v_0.Args[1]
 18484  		x := v_0.Args[0]
 18485  		v.reset(OpLess8)
 18486  		v.AddArg2(y, x)
 18487  		return true
 18488  	}
 18489  	// match: (Not (Leq64U x y))
 18490  	// result: (Less64U y x)
 18491  	for {
 18492  		if v_0.Op != OpLeq64U {
 18493  			break
 18494  		}
 18495  		y := v_0.Args[1]
 18496  		x := v_0.Args[0]
 18497  		v.reset(OpLess64U)
 18498  		v.AddArg2(y, x)
 18499  		return true
 18500  	}
 18501  	// match: (Not (Leq32U x y))
 18502  	// result: (Less32U y x)
 18503  	for {
 18504  		if v_0.Op != OpLeq32U {
 18505  			break
 18506  		}
 18507  		y := v_0.Args[1]
 18508  		x := v_0.Args[0]
 18509  		v.reset(OpLess32U)
 18510  		v.AddArg2(y, x)
 18511  		return true
 18512  	}
 18513  	// match: (Not (Leq16U x y))
 18514  	// result: (Less16U y x)
 18515  	for {
 18516  		if v_0.Op != OpLeq16U {
 18517  			break
 18518  		}
 18519  		y := v_0.Args[1]
 18520  		x := v_0.Args[0]
 18521  		v.reset(OpLess16U)
 18522  		v.AddArg2(y, x)
 18523  		return true
 18524  	}
 18525  	// match: (Not (Leq8U x y))
 18526  	// result: (Less8U y x)
 18527  	for {
 18528  		if v_0.Op != OpLeq8U {
 18529  			break
 18530  		}
 18531  		y := v_0.Args[1]
 18532  		x := v_0.Args[0]
 18533  		v.reset(OpLess8U)
 18534  		v.AddArg2(y, x)
 18535  		return true
 18536  	}
 18537  	return false
 18538  }
 18539  func rewriteValuegeneric_OpOffPtr(v *Value) bool {
 18540  	v_0 := v.Args[0]
 18541  	// match: (OffPtr (OffPtr p [y]) [x])
 18542  	// result: (OffPtr p [x+y])
 18543  	for {
 18544  		x := auxIntToInt64(v.AuxInt)
 18545  		if v_0.Op != OpOffPtr {
 18546  			break
 18547  		}
 18548  		y := auxIntToInt64(v_0.AuxInt)
 18549  		p := v_0.Args[0]
 18550  		v.reset(OpOffPtr)
 18551  		v.AuxInt = int64ToAuxInt(x + y)
 18552  		v.AddArg(p)
 18553  		return true
 18554  	}
 18555  	// match: (OffPtr p [0])
 18556  	// cond: v.Type.Compare(p.Type) == types.CMPeq
 18557  	// result: p
 18558  	for {
 18559  		if auxIntToInt64(v.AuxInt) != 0 {
 18560  			break
 18561  		}
 18562  		p := v_0
 18563  		if !(v.Type.Compare(p.Type) == types.CMPeq) {
 18564  			break
 18565  		}
 18566  		v.copyOf(p)
 18567  		return true
 18568  	}
 18569  	return false
 18570  }
 18571  func rewriteValuegeneric_OpOr16(v *Value) bool {
 18572  	v_1 := v.Args[1]
 18573  	v_0 := v.Args[0]
 18574  	b := v.Block
 18575  	config := b.Func.Config
 18576  	// match: (Or16 (Const16 [c]) (Const16 [d]))
 18577  	// result: (Const16 [c|d])
 18578  	for {
 18579  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18580  			if v_0.Op != OpConst16 {
 18581  				continue
 18582  			}
 18583  			c := auxIntToInt16(v_0.AuxInt)
 18584  			if v_1.Op != OpConst16 {
 18585  				continue
 18586  			}
 18587  			d := auxIntToInt16(v_1.AuxInt)
 18588  			v.reset(OpConst16)
 18589  			v.AuxInt = int16ToAuxInt(c | d)
 18590  			return true
 18591  		}
 18592  		break
 18593  	}
 18594  	// match: (Or16 x x)
 18595  	// result: x
 18596  	for {
 18597  		x := v_0
 18598  		if x != v_1 {
 18599  			break
 18600  		}
 18601  		v.copyOf(x)
 18602  		return true
 18603  	}
 18604  	// match: (Or16 (Const16 [0]) x)
 18605  	// result: x
 18606  	for {
 18607  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18608  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 18609  				continue
 18610  			}
 18611  			x := v_1
 18612  			v.copyOf(x)
 18613  			return true
 18614  		}
 18615  		break
 18616  	}
 18617  	// match: (Or16 (Const16 [-1]) _)
 18618  	// result: (Const16 [-1])
 18619  	for {
 18620  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18621  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
 18622  				continue
 18623  			}
 18624  			v.reset(OpConst16)
 18625  			v.AuxInt = int16ToAuxInt(-1)
 18626  			return true
 18627  		}
 18628  		break
 18629  	}
 18630  	// match: (Or16 (Com16 x) x)
 18631  	// result: (Const16 [-1])
 18632  	for {
 18633  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18634  			if v_0.Op != OpCom16 {
 18635  				continue
 18636  			}
 18637  			x := v_0.Args[0]
 18638  			if x != v_1 {
 18639  				continue
 18640  			}
 18641  			v.reset(OpConst16)
 18642  			v.AuxInt = int16ToAuxInt(-1)
 18643  			return true
 18644  		}
 18645  		break
 18646  	}
 18647  	// match: (Or16 x (Or16 x y))
 18648  	// result: (Or16 x y)
 18649  	for {
 18650  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18651  			x := v_0
 18652  			if v_1.Op != OpOr16 {
 18653  				continue
 18654  			}
 18655  			_ = v_1.Args[1]
 18656  			v_1_0 := v_1.Args[0]
 18657  			v_1_1 := v_1.Args[1]
 18658  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 18659  				if x != v_1_0 {
 18660  					continue
 18661  				}
 18662  				y := v_1_1
 18663  				v.reset(OpOr16)
 18664  				v.AddArg2(x, y)
 18665  				return true
 18666  			}
 18667  		}
 18668  		break
 18669  	}
 18670  	// match: (Or16 (And16 x (Const16 [c2])) (Const16 <t> [c1]))
 18671  	// cond: ^(c1 | c2) == 0
 18672  	// result: (Or16 (Const16 <t> [c1]) x)
 18673  	for {
 18674  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18675  			if v_0.Op != OpAnd16 {
 18676  				continue
 18677  			}
 18678  			_ = v_0.Args[1]
 18679  			v_0_0 := v_0.Args[0]
 18680  			v_0_1 := v_0.Args[1]
 18681  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 18682  				x := v_0_0
 18683  				if v_0_1.Op != OpConst16 {
 18684  					continue
 18685  				}
 18686  				c2 := auxIntToInt16(v_0_1.AuxInt)
 18687  				if v_1.Op != OpConst16 {
 18688  					continue
 18689  				}
 18690  				t := v_1.Type
 18691  				c1 := auxIntToInt16(v_1.AuxInt)
 18692  				if !(^(c1 | c2) == 0) {
 18693  					continue
 18694  				}
 18695  				v.reset(OpOr16)
 18696  				v0 := b.NewValue0(v.Pos, OpConst16, t)
 18697  				v0.AuxInt = int16ToAuxInt(c1)
 18698  				v.AddArg2(v0, x)
 18699  				return true
 18700  			}
 18701  		}
 18702  		break
 18703  	}
 18704  	// match: (Or16 (Or16 i:(Const16 <t>) z) x)
 18705  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 18706  	// result: (Or16 i (Or16 <t> z x))
 18707  	for {
 18708  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18709  			if v_0.Op != OpOr16 {
 18710  				continue
 18711  			}
 18712  			_ = v_0.Args[1]
 18713  			v_0_0 := v_0.Args[0]
 18714  			v_0_1 := v_0.Args[1]
 18715  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 18716  				i := v_0_0
 18717  				if i.Op != OpConst16 {
 18718  					continue
 18719  				}
 18720  				t := i.Type
 18721  				z := v_0_1
 18722  				x := v_1
 18723  				if !(z.Op != OpConst16 && x.Op != OpConst16) {
 18724  					continue
 18725  				}
 18726  				v.reset(OpOr16)
 18727  				v0 := b.NewValue0(v.Pos, OpOr16, t)
 18728  				v0.AddArg2(z, x)
 18729  				v.AddArg2(i, v0)
 18730  				return true
 18731  			}
 18732  		}
 18733  		break
 18734  	}
 18735  	// match: (Or16 (Const16 <t> [c]) (Or16 (Const16 <t> [d]) x))
 18736  	// result: (Or16 (Const16 <t> [c|d]) x)
 18737  	for {
 18738  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18739  			if v_0.Op != OpConst16 {
 18740  				continue
 18741  			}
 18742  			t := v_0.Type
 18743  			c := auxIntToInt16(v_0.AuxInt)
 18744  			if v_1.Op != OpOr16 {
 18745  				continue
 18746  			}
 18747  			_ = v_1.Args[1]
 18748  			v_1_0 := v_1.Args[0]
 18749  			v_1_1 := v_1.Args[1]
 18750  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 18751  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 18752  					continue
 18753  				}
 18754  				d := auxIntToInt16(v_1_0.AuxInt)
 18755  				x := v_1_1
 18756  				v.reset(OpOr16)
 18757  				v0 := b.NewValue0(v.Pos, OpConst16, t)
 18758  				v0.AuxInt = int16ToAuxInt(c | d)
 18759  				v.AddArg2(v0, x)
 18760  				return true
 18761  			}
 18762  		}
 18763  		break
 18764  	}
 18765  	// match: (Or16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
 18766  	// cond: c < 16 && d == 16-c && canRotate(config, 16)
 18767  	// result: (RotateLeft16 x z)
 18768  	for {
 18769  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18770  			if v_0.Op != OpLsh16x64 {
 18771  				continue
 18772  			}
 18773  			_ = v_0.Args[1]
 18774  			x := v_0.Args[0]
 18775  			z := v_0.Args[1]
 18776  			if z.Op != OpConst64 {
 18777  				continue
 18778  			}
 18779  			c := auxIntToInt64(z.AuxInt)
 18780  			if v_1.Op != OpRsh16Ux64 {
 18781  				continue
 18782  			}
 18783  			_ = v_1.Args[1]
 18784  			if x != v_1.Args[0] {
 18785  				continue
 18786  			}
 18787  			v_1_1 := v_1.Args[1]
 18788  			if v_1_1.Op != OpConst64 {
 18789  				continue
 18790  			}
 18791  			d := auxIntToInt64(v_1_1.AuxInt)
 18792  			if !(c < 16 && d == 16-c && canRotate(config, 16)) {
 18793  				continue
 18794  			}
 18795  			v.reset(OpRotateLeft16)
 18796  			v.AddArg2(x, z)
 18797  			return true
 18798  		}
 18799  		break
 18800  	}
 18801  	// match: (Or16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
 18802  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 18803  	// result: (RotateLeft16 x y)
 18804  	for {
 18805  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18806  			left := v_0
 18807  			if left.Op != OpLsh16x64 {
 18808  				continue
 18809  			}
 18810  			y := left.Args[1]
 18811  			x := left.Args[0]
 18812  			right := v_1
 18813  			if right.Op != OpRsh16Ux64 {
 18814  				continue
 18815  			}
 18816  			_ = right.Args[1]
 18817  			if x != right.Args[0] {
 18818  				continue
 18819  			}
 18820  			right_1 := right.Args[1]
 18821  			if right_1.Op != OpSub64 {
 18822  				continue
 18823  			}
 18824  			_ = right_1.Args[1]
 18825  			right_1_0 := right_1.Args[0]
 18826  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 18827  				continue
 18828  			}
 18829  			v.reset(OpRotateLeft16)
 18830  			v.AddArg2(x, y)
 18831  			return true
 18832  		}
 18833  		break
 18834  	}
 18835  	// match: (Or16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
 18836  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 18837  	// result: (RotateLeft16 x y)
 18838  	for {
 18839  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18840  			left := v_0
 18841  			if left.Op != OpLsh16x32 {
 18842  				continue
 18843  			}
 18844  			y := left.Args[1]
 18845  			x := left.Args[0]
 18846  			right := v_1
 18847  			if right.Op != OpRsh16Ux32 {
 18848  				continue
 18849  			}
 18850  			_ = right.Args[1]
 18851  			if x != right.Args[0] {
 18852  				continue
 18853  			}
 18854  			right_1 := right.Args[1]
 18855  			if right_1.Op != OpSub32 {
 18856  				continue
 18857  			}
 18858  			_ = right_1.Args[1]
 18859  			right_1_0 := right_1.Args[0]
 18860  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 18861  				continue
 18862  			}
 18863  			v.reset(OpRotateLeft16)
 18864  			v.AddArg2(x, y)
 18865  			return true
 18866  		}
 18867  		break
 18868  	}
 18869  	// match: (Or16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
 18870  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 18871  	// result: (RotateLeft16 x y)
 18872  	for {
 18873  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18874  			left := v_0
 18875  			if left.Op != OpLsh16x16 {
 18876  				continue
 18877  			}
 18878  			y := left.Args[1]
 18879  			x := left.Args[0]
 18880  			right := v_1
 18881  			if right.Op != OpRsh16Ux16 {
 18882  				continue
 18883  			}
 18884  			_ = right.Args[1]
 18885  			if x != right.Args[0] {
 18886  				continue
 18887  			}
 18888  			right_1 := right.Args[1]
 18889  			if right_1.Op != OpSub16 {
 18890  				continue
 18891  			}
 18892  			_ = right_1.Args[1]
 18893  			right_1_0 := right_1.Args[0]
 18894  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 18895  				continue
 18896  			}
 18897  			v.reset(OpRotateLeft16)
 18898  			v.AddArg2(x, y)
 18899  			return true
 18900  		}
 18901  		break
 18902  	}
 18903  	// match: (Or16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
 18904  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 18905  	// result: (RotateLeft16 x y)
 18906  	for {
 18907  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18908  			left := v_0
 18909  			if left.Op != OpLsh16x8 {
 18910  				continue
 18911  			}
 18912  			y := left.Args[1]
 18913  			x := left.Args[0]
 18914  			right := v_1
 18915  			if right.Op != OpRsh16Ux8 {
 18916  				continue
 18917  			}
 18918  			_ = right.Args[1]
 18919  			if x != right.Args[0] {
 18920  				continue
 18921  			}
 18922  			right_1 := right.Args[1]
 18923  			if right_1.Op != OpSub8 {
 18924  				continue
 18925  			}
 18926  			_ = right_1.Args[1]
 18927  			right_1_0 := right_1.Args[0]
 18928  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 18929  				continue
 18930  			}
 18931  			v.reset(OpRotateLeft16)
 18932  			v.AddArg2(x, y)
 18933  			return true
 18934  		}
 18935  		break
 18936  	}
 18937  	// match: (Or16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
 18938  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 18939  	// result: (RotateLeft16 x z)
 18940  	for {
 18941  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18942  			right := v_0
 18943  			if right.Op != OpRsh16Ux64 {
 18944  				continue
 18945  			}
 18946  			y := right.Args[1]
 18947  			x := right.Args[0]
 18948  			left := v_1
 18949  			if left.Op != OpLsh16x64 {
 18950  				continue
 18951  			}
 18952  			_ = left.Args[1]
 18953  			if x != left.Args[0] {
 18954  				continue
 18955  			}
 18956  			z := left.Args[1]
 18957  			if z.Op != OpSub64 {
 18958  				continue
 18959  			}
 18960  			_ = z.Args[1]
 18961  			z_0 := z.Args[0]
 18962  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 18963  				continue
 18964  			}
 18965  			v.reset(OpRotateLeft16)
 18966  			v.AddArg2(x, z)
 18967  			return true
 18968  		}
 18969  		break
 18970  	}
 18971  	// match: (Or16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
 18972  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 18973  	// result: (RotateLeft16 x z)
 18974  	for {
 18975  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18976  			right := v_0
 18977  			if right.Op != OpRsh16Ux32 {
 18978  				continue
 18979  			}
 18980  			y := right.Args[1]
 18981  			x := right.Args[0]
 18982  			left := v_1
 18983  			if left.Op != OpLsh16x32 {
 18984  				continue
 18985  			}
 18986  			_ = left.Args[1]
 18987  			if x != left.Args[0] {
 18988  				continue
 18989  			}
 18990  			z := left.Args[1]
 18991  			if z.Op != OpSub32 {
 18992  				continue
 18993  			}
 18994  			_ = z.Args[1]
 18995  			z_0 := z.Args[0]
 18996  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 18997  				continue
 18998  			}
 18999  			v.reset(OpRotateLeft16)
 19000  			v.AddArg2(x, z)
 19001  			return true
 19002  		}
 19003  		break
 19004  	}
 19005  	// match: (Or16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
 19006  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 19007  	// result: (RotateLeft16 x z)
 19008  	for {
 19009  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19010  			right := v_0
 19011  			if right.Op != OpRsh16Ux16 {
 19012  				continue
 19013  			}
 19014  			y := right.Args[1]
 19015  			x := right.Args[0]
 19016  			left := v_1
 19017  			if left.Op != OpLsh16x16 {
 19018  				continue
 19019  			}
 19020  			_ = left.Args[1]
 19021  			if x != left.Args[0] {
 19022  				continue
 19023  			}
 19024  			z := left.Args[1]
 19025  			if z.Op != OpSub16 {
 19026  				continue
 19027  			}
 19028  			_ = z.Args[1]
 19029  			z_0 := z.Args[0]
 19030  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 19031  				continue
 19032  			}
 19033  			v.reset(OpRotateLeft16)
 19034  			v.AddArg2(x, z)
 19035  			return true
 19036  		}
 19037  		break
 19038  	}
 19039  	// match: (Or16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
 19040  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 19041  	// result: (RotateLeft16 x z)
 19042  	for {
 19043  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19044  			right := v_0
 19045  			if right.Op != OpRsh16Ux8 {
 19046  				continue
 19047  			}
 19048  			y := right.Args[1]
 19049  			x := right.Args[0]
 19050  			left := v_1
 19051  			if left.Op != OpLsh16x8 {
 19052  				continue
 19053  			}
 19054  			_ = left.Args[1]
 19055  			if x != left.Args[0] {
 19056  				continue
 19057  			}
 19058  			z := left.Args[1]
 19059  			if z.Op != OpSub8 {
 19060  				continue
 19061  			}
 19062  			_ = z.Args[1]
 19063  			z_0 := z.Args[0]
 19064  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 19065  				continue
 19066  			}
 19067  			v.reset(OpRotateLeft16)
 19068  			v.AddArg2(x, z)
 19069  			return true
 19070  		}
 19071  		break
 19072  	}
 19073  	return false
 19074  }
 19075  func rewriteValuegeneric_OpOr32(v *Value) bool {
 19076  	v_1 := v.Args[1]
 19077  	v_0 := v.Args[0]
 19078  	b := v.Block
 19079  	config := b.Func.Config
 19080  	// match: (Or32 (Const32 [c]) (Const32 [d]))
 19081  	// result: (Const32 [c|d])
 19082  	for {
 19083  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19084  			if v_0.Op != OpConst32 {
 19085  				continue
 19086  			}
 19087  			c := auxIntToInt32(v_0.AuxInt)
 19088  			if v_1.Op != OpConst32 {
 19089  				continue
 19090  			}
 19091  			d := auxIntToInt32(v_1.AuxInt)
 19092  			v.reset(OpConst32)
 19093  			v.AuxInt = int32ToAuxInt(c | d)
 19094  			return true
 19095  		}
 19096  		break
 19097  	}
 19098  	// match: (Or32 x x)
 19099  	// result: x
 19100  	for {
 19101  		x := v_0
 19102  		if x != v_1 {
 19103  			break
 19104  		}
 19105  		v.copyOf(x)
 19106  		return true
 19107  	}
 19108  	// match: (Or32 (Const32 [0]) x)
 19109  	// result: x
 19110  	for {
 19111  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19112  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 19113  				continue
 19114  			}
 19115  			x := v_1
 19116  			v.copyOf(x)
 19117  			return true
 19118  		}
 19119  		break
 19120  	}
 19121  	// match: (Or32 (Const32 [-1]) _)
 19122  	// result: (Const32 [-1])
 19123  	for {
 19124  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19125  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
 19126  				continue
 19127  			}
 19128  			v.reset(OpConst32)
 19129  			v.AuxInt = int32ToAuxInt(-1)
 19130  			return true
 19131  		}
 19132  		break
 19133  	}
 19134  	// match: (Or32 (Com32 x) x)
 19135  	// result: (Const32 [-1])
 19136  	for {
 19137  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19138  			if v_0.Op != OpCom32 {
 19139  				continue
 19140  			}
 19141  			x := v_0.Args[0]
 19142  			if x != v_1 {
 19143  				continue
 19144  			}
 19145  			v.reset(OpConst32)
 19146  			v.AuxInt = int32ToAuxInt(-1)
 19147  			return true
 19148  		}
 19149  		break
 19150  	}
 19151  	// match: (Or32 x (Or32 x y))
 19152  	// result: (Or32 x y)
 19153  	for {
 19154  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19155  			x := v_0
 19156  			if v_1.Op != OpOr32 {
 19157  				continue
 19158  			}
 19159  			_ = v_1.Args[1]
 19160  			v_1_0 := v_1.Args[0]
 19161  			v_1_1 := v_1.Args[1]
 19162  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 19163  				if x != v_1_0 {
 19164  					continue
 19165  				}
 19166  				y := v_1_1
 19167  				v.reset(OpOr32)
 19168  				v.AddArg2(x, y)
 19169  				return true
 19170  			}
 19171  		}
 19172  		break
 19173  	}
 19174  	// match: (Or32 (And32 x (Const32 [c2])) (Const32 <t> [c1]))
 19175  	// cond: ^(c1 | c2) == 0
 19176  	// result: (Or32 (Const32 <t> [c1]) x)
 19177  	for {
 19178  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19179  			if v_0.Op != OpAnd32 {
 19180  				continue
 19181  			}
 19182  			_ = v_0.Args[1]
 19183  			v_0_0 := v_0.Args[0]
 19184  			v_0_1 := v_0.Args[1]
 19185  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 19186  				x := v_0_0
 19187  				if v_0_1.Op != OpConst32 {
 19188  					continue
 19189  				}
 19190  				c2 := auxIntToInt32(v_0_1.AuxInt)
 19191  				if v_1.Op != OpConst32 {
 19192  					continue
 19193  				}
 19194  				t := v_1.Type
 19195  				c1 := auxIntToInt32(v_1.AuxInt)
 19196  				if !(^(c1 | c2) == 0) {
 19197  					continue
 19198  				}
 19199  				v.reset(OpOr32)
 19200  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 19201  				v0.AuxInt = int32ToAuxInt(c1)
 19202  				v.AddArg2(v0, x)
 19203  				return true
 19204  			}
 19205  		}
 19206  		break
 19207  	}
 19208  	// match: (Or32 (Or32 i:(Const32 <t>) z) x)
 19209  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 19210  	// result: (Or32 i (Or32 <t> z x))
 19211  	for {
 19212  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19213  			if v_0.Op != OpOr32 {
 19214  				continue
 19215  			}
 19216  			_ = v_0.Args[1]
 19217  			v_0_0 := v_0.Args[0]
 19218  			v_0_1 := v_0.Args[1]
 19219  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 19220  				i := v_0_0
 19221  				if i.Op != OpConst32 {
 19222  					continue
 19223  				}
 19224  				t := i.Type
 19225  				z := v_0_1
 19226  				x := v_1
 19227  				if !(z.Op != OpConst32 && x.Op != OpConst32) {
 19228  					continue
 19229  				}
 19230  				v.reset(OpOr32)
 19231  				v0 := b.NewValue0(v.Pos, OpOr32, t)
 19232  				v0.AddArg2(z, x)
 19233  				v.AddArg2(i, v0)
 19234  				return true
 19235  			}
 19236  		}
 19237  		break
 19238  	}
 19239  	// match: (Or32 (Const32 <t> [c]) (Or32 (Const32 <t> [d]) x))
 19240  	// result: (Or32 (Const32 <t> [c|d]) x)
 19241  	for {
 19242  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19243  			if v_0.Op != OpConst32 {
 19244  				continue
 19245  			}
 19246  			t := v_0.Type
 19247  			c := auxIntToInt32(v_0.AuxInt)
 19248  			if v_1.Op != OpOr32 {
 19249  				continue
 19250  			}
 19251  			_ = v_1.Args[1]
 19252  			v_1_0 := v_1.Args[0]
 19253  			v_1_1 := v_1.Args[1]
 19254  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 19255  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 19256  					continue
 19257  				}
 19258  				d := auxIntToInt32(v_1_0.AuxInt)
 19259  				x := v_1_1
 19260  				v.reset(OpOr32)
 19261  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 19262  				v0.AuxInt = int32ToAuxInt(c | d)
 19263  				v.AddArg2(v0, x)
 19264  				return true
 19265  			}
 19266  		}
 19267  		break
 19268  	}
 19269  	// match: (Or32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
 19270  	// cond: c < 32 && d == 32-c && canRotate(config, 32)
 19271  	// result: (RotateLeft32 x z)
 19272  	for {
 19273  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19274  			if v_0.Op != OpLsh32x64 {
 19275  				continue
 19276  			}
 19277  			_ = v_0.Args[1]
 19278  			x := v_0.Args[0]
 19279  			z := v_0.Args[1]
 19280  			if z.Op != OpConst64 {
 19281  				continue
 19282  			}
 19283  			c := auxIntToInt64(z.AuxInt)
 19284  			if v_1.Op != OpRsh32Ux64 {
 19285  				continue
 19286  			}
 19287  			_ = v_1.Args[1]
 19288  			if x != v_1.Args[0] {
 19289  				continue
 19290  			}
 19291  			v_1_1 := v_1.Args[1]
 19292  			if v_1_1.Op != OpConst64 {
 19293  				continue
 19294  			}
 19295  			d := auxIntToInt64(v_1_1.AuxInt)
 19296  			if !(c < 32 && d == 32-c && canRotate(config, 32)) {
 19297  				continue
 19298  			}
 19299  			v.reset(OpRotateLeft32)
 19300  			v.AddArg2(x, z)
 19301  			return true
 19302  		}
 19303  		break
 19304  	}
 19305  	// match: (Or32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
 19306  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 19307  	// result: (RotateLeft32 x y)
 19308  	for {
 19309  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19310  			left := v_0
 19311  			if left.Op != OpLsh32x64 {
 19312  				continue
 19313  			}
 19314  			y := left.Args[1]
 19315  			x := left.Args[0]
 19316  			right := v_1
 19317  			if right.Op != OpRsh32Ux64 {
 19318  				continue
 19319  			}
 19320  			_ = right.Args[1]
 19321  			if x != right.Args[0] {
 19322  				continue
 19323  			}
 19324  			right_1 := right.Args[1]
 19325  			if right_1.Op != OpSub64 {
 19326  				continue
 19327  			}
 19328  			_ = right_1.Args[1]
 19329  			right_1_0 := right_1.Args[0]
 19330  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 19331  				continue
 19332  			}
 19333  			v.reset(OpRotateLeft32)
 19334  			v.AddArg2(x, y)
 19335  			return true
 19336  		}
 19337  		break
 19338  	}
 19339  	// match: (Or32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
 19340  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 19341  	// result: (RotateLeft32 x y)
 19342  	for {
 19343  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19344  			left := v_0
 19345  			if left.Op != OpLsh32x32 {
 19346  				continue
 19347  			}
 19348  			y := left.Args[1]
 19349  			x := left.Args[0]
 19350  			right := v_1
 19351  			if right.Op != OpRsh32Ux32 {
 19352  				continue
 19353  			}
 19354  			_ = right.Args[1]
 19355  			if x != right.Args[0] {
 19356  				continue
 19357  			}
 19358  			right_1 := right.Args[1]
 19359  			if right_1.Op != OpSub32 {
 19360  				continue
 19361  			}
 19362  			_ = right_1.Args[1]
 19363  			right_1_0 := right_1.Args[0]
 19364  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 19365  				continue
 19366  			}
 19367  			v.reset(OpRotateLeft32)
 19368  			v.AddArg2(x, y)
 19369  			return true
 19370  		}
 19371  		break
 19372  	}
 19373  	// match: (Or32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
 19374  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 19375  	// result: (RotateLeft32 x y)
 19376  	for {
 19377  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19378  			left := v_0
 19379  			if left.Op != OpLsh32x16 {
 19380  				continue
 19381  			}
 19382  			y := left.Args[1]
 19383  			x := left.Args[0]
 19384  			right := v_1
 19385  			if right.Op != OpRsh32Ux16 {
 19386  				continue
 19387  			}
 19388  			_ = right.Args[1]
 19389  			if x != right.Args[0] {
 19390  				continue
 19391  			}
 19392  			right_1 := right.Args[1]
 19393  			if right_1.Op != OpSub16 {
 19394  				continue
 19395  			}
 19396  			_ = right_1.Args[1]
 19397  			right_1_0 := right_1.Args[0]
 19398  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 19399  				continue
 19400  			}
 19401  			v.reset(OpRotateLeft32)
 19402  			v.AddArg2(x, y)
 19403  			return true
 19404  		}
 19405  		break
 19406  	}
 19407  	// match: (Or32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
 19408  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 19409  	// result: (RotateLeft32 x y)
 19410  	for {
 19411  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19412  			left := v_0
 19413  			if left.Op != OpLsh32x8 {
 19414  				continue
 19415  			}
 19416  			y := left.Args[1]
 19417  			x := left.Args[0]
 19418  			right := v_1
 19419  			if right.Op != OpRsh32Ux8 {
 19420  				continue
 19421  			}
 19422  			_ = right.Args[1]
 19423  			if x != right.Args[0] {
 19424  				continue
 19425  			}
 19426  			right_1 := right.Args[1]
 19427  			if right_1.Op != OpSub8 {
 19428  				continue
 19429  			}
 19430  			_ = right_1.Args[1]
 19431  			right_1_0 := right_1.Args[0]
 19432  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 19433  				continue
 19434  			}
 19435  			v.reset(OpRotateLeft32)
 19436  			v.AddArg2(x, y)
 19437  			return true
 19438  		}
 19439  		break
 19440  	}
 19441  	// match: (Or32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
 19442  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 19443  	// result: (RotateLeft32 x z)
 19444  	for {
 19445  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19446  			right := v_0
 19447  			if right.Op != OpRsh32Ux64 {
 19448  				continue
 19449  			}
 19450  			y := right.Args[1]
 19451  			x := right.Args[0]
 19452  			left := v_1
 19453  			if left.Op != OpLsh32x64 {
 19454  				continue
 19455  			}
 19456  			_ = left.Args[1]
 19457  			if x != left.Args[0] {
 19458  				continue
 19459  			}
 19460  			z := left.Args[1]
 19461  			if z.Op != OpSub64 {
 19462  				continue
 19463  			}
 19464  			_ = z.Args[1]
 19465  			z_0 := z.Args[0]
 19466  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 19467  				continue
 19468  			}
 19469  			v.reset(OpRotateLeft32)
 19470  			v.AddArg2(x, z)
 19471  			return true
 19472  		}
 19473  		break
 19474  	}
 19475  	// match: (Or32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
 19476  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 19477  	// result: (RotateLeft32 x z)
 19478  	for {
 19479  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19480  			right := v_0
 19481  			if right.Op != OpRsh32Ux32 {
 19482  				continue
 19483  			}
 19484  			y := right.Args[1]
 19485  			x := right.Args[0]
 19486  			left := v_1
 19487  			if left.Op != OpLsh32x32 {
 19488  				continue
 19489  			}
 19490  			_ = left.Args[1]
 19491  			if x != left.Args[0] {
 19492  				continue
 19493  			}
 19494  			z := left.Args[1]
 19495  			if z.Op != OpSub32 {
 19496  				continue
 19497  			}
 19498  			_ = z.Args[1]
 19499  			z_0 := z.Args[0]
 19500  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 19501  				continue
 19502  			}
 19503  			v.reset(OpRotateLeft32)
 19504  			v.AddArg2(x, z)
 19505  			return true
 19506  		}
 19507  		break
 19508  	}
 19509  	// match: (Or32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
 19510  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 19511  	// result: (RotateLeft32 x z)
 19512  	for {
 19513  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19514  			right := v_0
 19515  			if right.Op != OpRsh32Ux16 {
 19516  				continue
 19517  			}
 19518  			y := right.Args[1]
 19519  			x := right.Args[0]
 19520  			left := v_1
 19521  			if left.Op != OpLsh32x16 {
 19522  				continue
 19523  			}
 19524  			_ = left.Args[1]
 19525  			if x != left.Args[0] {
 19526  				continue
 19527  			}
 19528  			z := left.Args[1]
 19529  			if z.Op != OpSub16 {
 19530  				continue
 19531  			}
 19532  			_ = z.Args[1]
 19533  			z_0 := z.Args[0]
 19534  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 19535  				continue
 19536  			}
 19537  			v.reset(OpRotateLeft32)
 19538  			v.AddArg2(x, z)
 19539  			return true
 19540  		}
 19541  		break
 19542  	}
 19543  	// match: (Or32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
 19544  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 19545  	// result: (RotateLeft32 x z)
 19546  	for {
 19547  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19548  			right := v_0
 19549  			if right.Op != OpRsh32Ux8 {
 19550  				continue
 19551  			}
 19552  			y := right.Args[1]
 19553  			x := right.Args[0]
 19554  			left := v_1
 19555  			if left.Op != OpLsh32x8 {
 19556  				continue
 19557  			}
 19558  			_ = left.Args[1]
 19559  			if x != left.Args[0] {
 19560  				continue
 19561  			}
 19562  			z := left.Args[1]
 19563  			if z.Op != OpSub8 {
 19564  				continue
 19565  			}
 19566  			_ = z.Args[1]
 19567  			z_0 := z.Args[0]
 19568  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 19569  				continue
 19570  			}
 19571  			v.reset(OpRotateLeft32)
 19572  			v.AddArg2(x, z)
 19573  			return true
 19574  		}
 19575  		break
 19576  	}
 19577  	return false
 19578  }
 19579  func rewriteValuegeneric_OpOr64(v *Value) bool {
 19580  	v_1 := v.Args[1]
 19581  	v_0 := v.Args[0]
 19582  	b := v.Block
 19583  	config := b.Func.Config
 19584  	// match: (Or64 (Const64 [c]) (Const64 [d]))
 19585  	// result: (Const64 [c|d])
 19586  	for {
 19587  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19588  			if v_0.Op != OpConst64 {
 19589  				continue
 19590  			}
 19591  			c := auxIntToInt64(v_0.AuxInt)
 19592  			if v_1.Op != OpConst64 {
 19593  				continue
 19594  			}
 19595  			d := auxIntToInt64(v_1.AuxInt)
 19596  			v.reset(OpConst64)
 19597  			v.AuxInt = int64ToAuxInt(c | d)
 19598  			return true
 19599  		}
 19600  		break
 19601  	}
 19602  	// match: (Or64 x x)
 19603  	// result: x
 19604  	for {
 19605  		x := v_0
 19606  		if x != v_1 {
 19607  			break
 19608  		}
 19609  		v.copyOf(x)
 19610  		return true
 19611  	}
 19612  	// match: (Or64 (Const64 [0]) x)
 19613  	// result: x
 19614  	for {
 19615  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19616  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 19617  				continue
 19618  			}
 19619  			x := v_1
 19620  			v.copyOf(x)
 19621  			return true
 19622  		}
 19623  		break
 19624  	}
 19625  	// match: (Or64 (Const64 [-1]) _)
 19626  	// result: (Const64 [-1])
 19627  	for {
 19628  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19629  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
 19630  				continue
 19631  			}
 19632  			v.reset(OpConst64)
 19633  			v.AuxInt = int64ToAuxInt(-1)
 19634  			return true
 19635  		}
 19636  		break
 19637  	}
 19638  	// match: (Or64 (Com64 x) x)
 19639  	// result: (Const64 [-1])
 19640  	for {
 19641  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19642  			if v_0.Op != OpCom64 {
 19643  				continue
 19644  			}
 19645  			x := v_0.Args[0]
 19646  			if x != v_1 {
 19647  				continue
 19648  			}
 19649  			v.reset(OpConst64)
 19650  			v.AuxInt = int64ToAuxInt(-1)
 19651  			return true
 19652  		}
 19653  		break
 19654  	}
 19655  	// match: (Or64 x (Or64 x y))
 19656  	// result: (Or64 x y)
 19657  	for {
 19658  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19659  			x := v_0
 19660  			if v_1.Op != OpOr64 {
 19661  				continue
 19662  			}
 19663  			_ = v_1.Args[1]
 19664  			v_1_0 := v_1.Args[0]
 19665  			v_1_1 := v_1.Args[1]
 19666  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 19667  				if x != v_1_0 {
 19668  					continue
 19669  				}
 19670  				y := v_1_1
 19671  				v.reset(OpOr64)
 19672  				v.AddArg2(x, y)
 19673  				return true
 19674  			}
 19675  		}
 19676  		break
 19677  	}
 19678  	// match: (Or64 (And64 x (Const64 [c2])) (Const64 <t> [c1]))
 19679  	// cond: ^(c1 | c2) == 0
 19680  	// result: (Or64 (Const64 <t> [c1]) x)
 19681  	for {
 19682  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19683  			if v_0.Op != OpAnd64 {
 19684  				continue
 19685  			}
 19686  			_ = v_0.Args[1]
 19687  			v_0_0 := v_0.Args[0]
 19688  			v_0_1 := v_0.Args[1]
 19689  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 19690  				x := v_0_0
 19691  				if v_0_1.Op != OpConst64 {
 19692  					continue
 19693  				}
 19694  				c2 := auxIntToInt64(v_0_1.AuxInt)
 19695  				if v_1.Op != OpConst64 {
 19696  					continue
 19697  				}
 19698  				t := v_1.Type
 19699  				c1 := auxIntToInt64(v_1.AuxInt)
 19700  				if !(^(c1 | c2) == 0) {
 19701  					continue
 19702  				}
 19703  				v.reset(OpOr64)
 19704  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 19705  				v0.AuxInt = int64ToAuxInt(c1)
 19706  				v.AddArg2(v0, x)
 19707  				return true
 19708  			}
 19709  		}
 19710  		break
 19711  	}
 19712  	// match: (Or64 (Or64 i:(Const64 <t>) z) x)
 19713  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 19714  	// result: (Or64 i (Or64 <t> z x))
 19715  	for {
 19716  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19717  			if v_0.Op != OpOr64 {
 19718  				continue
 19719  			}
 19720  			_ = v_0.Args[1]
 19721  			v_0_0 := v_0.Args[0]
 19722  			v_0_1 := v_0.Args[1]
 19723  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 19724  				i := v_0_0
 19725  				if i.Op != OpConst64 {
 19726  					continue
 19727  				}
 19728  				t := i.Type
 19729  				z := v_0_1
 19730  				x := v_1
 19731  				if !(z.Op != OpConst64 && x.Op != OpConst64) {
 19732  					continue
 19733  				}
 19734  				v.reset(OpOr64)
 19735  				v0 := b.NewValue0(v.Pos, OpOr64, t)
 19736  				v0.AddArg2(z, x)
 19737  				v.AddArg2(i, v0)
 19738  				return true
 19739  			}
 19740  		}
 19741  		break
 19742  	}
 19743  	// match: (Or64 (Const64 <t> [c]) (Or64 (Const64 <t> [d]) x))
 19744  	// result: (Or64 (Const64 <t> [c|d]) x)
 19745  	for {
 19746  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19747  			if v_0.Op != OpConst64 {
 19748  				continue
 19749  			}
 19750  			t := v_0.Type
 19751  			c := auxIntToInt64(v_0.AuxInt)
 19752  			if v_1.Op != OpOr64 {
 19753  				continue
 19754  			}
 19755  			_ = v_1.Args[1]
 19756  			v_1_0 := v_1.Args[0]
 19757  			v_1_1 := v_1.Args[1]
 19758  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 19759  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 19760  					continue
 19761  				}
 19762  				d := auxIntToInt64(v_1_0.AuxInt)
 19763  				x := v_1_1
 19764  				v.reset(OpOr64)
 19765  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 19766  				v0.AuxInt = int64ToAuxInt(c | d)
 19767  				v.AddArg2(v0, x)
 19768  				return true
 19769  			}
 19770  		}
 19771  		break
 19772  	}
 19773  	// match: (Or64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
 19774  	// cond: c < 64 && d == 64-c && canRotate(config, 64)
 19775  	// result: (RotateLeft64 x z)
 19776  	for {
 19777  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19778  			if v_0.Op != OpLsh64x64 {
 19779  				continue
 19780  			}
 19781  			_ = v_0.Args[1]
 19782  			x := v_0.Args[0]
 19783  			z := v_0.Args[1]
 19784  			if z.Op != OpConst64 {
 19785  				continue
 19786  			}
 19787  			c := auxIntToInt64(z.AuxInt)
 19788  			if v_1.Op != OpRsh64Ux64 {
 19789  				continue
 19790  			}
 19791  			_ = v_1.Args[1]
 19792  			if x != v_1.Args[0] {
 19793  				continue
 19794  			}
 19795  			v_1_1 := v_1.Args[1]
 19796  			if v_1_1.Op != OpConst64 {
 19797  				continue
 19798  			}
 19799  			d := auxIntToInt64(v_1_1.AuxInt)
 19800  			if !(c < 64 && d == 64-c && canRotate(config, 64)) {
 19801  				continue
 19802  			}
 19803  			v.reset(OpRotateLeft64)
 19804  			v.AddArg2(x, z)
 19805  			return true
 19806  		}
 19807  		break
 19808  	}
 19809  	// match: (Or64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
 19810  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 19811  	// result: (RotateLeft64 x y)
 19812  	for {
 19813  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19814  			left := v_0
 19815  			if left.Op != OpLsh64x64 {
 19816  				continue
 19817  			}
 19818  			y := left.Args[1]
 19819  			x := left.Args[0]
 19820  			right := v_1
 19821  			if right.Op != OpRsh64Ux64 {
 19822  				continue
 19823  			}
 19824  			_ = right.Args[1]
 19825  			if x != right.Args[0] {
 19826  				continue
 19827  			}
 19828  			right_1 := right.Args[1]
 19829  			if right_1.Op != OpSub64 {
 19830  				continue
 19831  			}
 19832  			_ = right_1.Args[1]
 19833  			right_1_0 := right_1.Args[0]
 19834  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 19835  				continue
 19836  			}
 19837  			v.reset(OpRotateLeft64)
 19838  			v.AddArg2(x, y)
 19839  			return true
 19840  		}
 19841  		break
 19842  	}
 19843  	// match: (Or64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
 19844  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 19845  	// result: (RotateLeft64 x y)
 19846  	for {
 19847  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19848  			left := v_0
 19849  			if left.Op != OpLsh64x32 {
 19850  				continue
 19851  			}
 19852  			y := left.Args[1]
 19853  			x := left.Args[0]
 19854  			right := v_1
 19855  			if right.Op != OpRsh64Ux32 {
 19856  				continue
 19857  			}
 19858  			_ = right.Args[1]
 19859  			if x != right.Args[0] {
 19860  				continue
 19861  			}
 19862  			right_1 := right.Args[1]
 19863  			if right_1.Op != OpSub32 {
 19864  				continue
 19865  			}
 19866  			_ = right_1.Args[1]
 19867  			right_1_0 := right_1.Args[0]
 19868  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 19869  				continue
 19870  			}
 19871  			v.reset(OpRotateLeft64)
 19872  			v.AddArg2(x, y)
 19873  			return true
 19874  		}
 19875  		break
 19876  	}
 19877  	// match: (Or64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
 19878  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 19879  	// result: (RotateLeft64 x y)
 19880  	for {
 19881  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19882  			left := v_0
 19883  			if left.Op != OpLsh64x16 {
 19884  				continue
 19885  			}
 19886  			y := left.Args[1]
 19887  			x := left.Args[0]
 19888  			right := v_1
 19889  			if right.Op != OpRsh64Ux16 {
 19890  				continue
 19891  			}
 19892  			_ = right.Args[1]
 19893  			if x != right.Args[0] {
 19894  				continue
 19895  			}
 19896  			right_1 := right.Args[1]
 19897  			if right_1.Op != OpSub16 {
 19898  				continue
 19899  			}
 19900  			_ = right_1.Args[1]
 19901  			right_1_0 := right_1.Args[0]
 19902  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 19903  				continue
 19904  			}
 19905  			v.reset(OpRotateLeft64)
 19906  			v.AddArg2(x, y)
 19907  			return true
 19908  		}
 19909  		break
 19910  	}
 19911  	// match: (Or64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
 19912  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 19913  	// result: (RotateLeft64 x y)
 19914  	for {
 19915  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19916  			left := v_0
 19917  			if left.Op != OpLsh64x8 {
 19918  				continue
 19919  			}
 19920  			y := left.Args[1]
 19921  			x := left.Args[0]
 19922  			right := v_1
 19923  			if right.Op != OpRsh64Ux8 {
 19924  				continue
 19925  			}
 19926  			_ = right.Args[1]
 19927  			if x != right.Args[0] {
 19928  				continue
 19929  			}
 19930  			right_1 := right.Args[1]
 19931  			if right_1.Op != OpSub8 {
 19932  				continue
 19933  			}
 19934  			_ = right_1.Args[1]
 19935  			right_1_0 := right_1.Args[0]
 19936  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 19937  				continue
 19938  			}
 19939  			v.reset(OpRotateLeft64)
 19940  			v.AddArg2(x, y)
 19941  			return true
 19942  		}
 19943  		break
 19944  	}
 19945  	// match: (Or64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
 19946  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 19947  	// result: (RotateLeft64 x z)
 19948  	for {
 19949  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19950  			right := v_0
 19951  			if right.Op != OpRsh64Ux64 {
 19952  				continue
 19953  			}
 19954  			y := right.Args[1]
 19955  			x := right.Args[0]
 19956  			left := v_1
 19957  			if left.Op != OpLsh64x64 {
 19958  				continue
 19959  			}
 19960  			_ = left.Args[1]
 19961  			if x != left.Args[0] {
 19962  				continue
 19963  			}
 19964  			z := left.Args[1]
 19965  			if z.Op != OpSub64 {
 19966  				continue
 19967  			}
 19968  			_ = z.Args[1]
 19969  			z_0 := z.Args[0]
 19970  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 19971  				continue
 19972  			}
 19973  			v.reset(OpRotateLeft64)
 19974  			v.AddArg2(x, z)
 19975  			return true
 19976  		}
 19977  		break
 19978  	}
 19979  	// match: (Or64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
 19980  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 19981  	// result: (RotateLeft64 x z)
 19982  	for {
 19983  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19984  			right := v_0
 19985  			if right.Op != OpRsh64Ux32 {
 19986  				continue
 19987  			}
 19988  			y := right.Args[1]
 19989  			x := right.Args[0]
 19990  			left := v_1
 19991  			if left.Op != OpLsh64x32 {
 19992  				continue
 19993  			}
 19994  			_ = left.Args[1]
 19995  			if x != left.Args[0] {
 19996  				continue
 19997  			}
 19998  			z := left.Args[1]
 19999  			if z.Op != OpSub32 {
 20000  				continue
 20001  			}
 20002  			_ = z.Args[1]
 20003  			z_0 := z.Args[0]
 20004  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 20005  				continue
 20006  			}
 20007  			v.reset(OpRotateLeft64)
 20008  			v.AddArg2(x, z)
 20009  			return true
 20010  		}
 20011  		break
 20012  	}
 20013  	// match: (Or64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
 20014  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 20015  	// result: (RotateLeft64 x z)
 20016  	for {
 20017  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20018  			right := v_0
 20019  			if right.Op != OpRsh64Ux16 {
 20020  				continue
 20021  			}
 20022  			y := right.Args[1]
 20023  			x := right.Args[0]
 20024  			left := v_1
 20025  			if left.Op != OpLsh64x16 {
 20026  				continue
 20027  			}
 20028  			_ = left.Args[1]
 20029  			if x != left.Args[0] {
 20030  				continue
 20031  			}
 20032  			z := left.Args[1]
 20033  			if z.Op != OpSub16 {
 20034  				continue
 20035  			}
 20036  			_ = z.Args[1]
 20037  			z_0 := z.Args[0]
 20038  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 20039  				continue
 20040  			}
 20041  			v.reset(OpRotateLeft64)
 20042  			v.AddArg2(x, z)
 20043  			return true
 20044  		}
 20045  		break
 20046  	}
 20047  	// match: (Or64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
 20048  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 20049  	// result: (RotateLeft64 x z)
 20050  	for {
 20051  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20052  			right := v_0
 20053  			if right.Op != OpRsh64Ux8 {
 20054  				continue
 20055  			}
 20056  			y := right.Args[1]
 20057  			x := right.Args[0]
 20058  			left := v_1
 20059  			if left.Op != OpLsh64x8 {
 20060  				continue
 20061  			}
 20062  			_ = left.Args[1]
 20063  			if x != left.Args[0] {
 20064  				continue
 20065  			}
 20066  			z := left.Args[1]
 20067  			if z.Op != OpSub8 {
 20068  				continue
 20069  			}
 20070  			_ = z.Args[1]
 20071  			z_0 := z.Args[0]
 20072  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 20073  				continue
 20074  			}
 20075  			v.reset(OpRotateLeft64)
 20076  			v.AddArg2(x, z)
 20077  			return true
 20078  		}
 20079  		break
 20080  	}
 20081  	return false
 20082  }
 20083  func rewriteValuegeneric_OpOr8(v *Value) bool {
 20084  	v_1 := v.Args[1]
 20085  	v_0 := v.Args[0]
 20086  	b := v.Block
 20087  	config := b.Func.Config
 20088  	// match: (Or8 (Const8 [c]) (Const8 [d]))
 20089  	// result: (Const8 [c|d])
 20090  	for {
 20091  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20092  			if v_0.Op != OpConst8 {
 20093  				continue
 20094  			}
 20095  			c := auxIntToInt8(v_0.AuxInt)
 20096  			if v_1.Op != OpConst8 {
 20097  				continue
 20098  			}
 20099  			d := auxIntToInt8(v_1.AuxInt)
 20100  			v.reset(OpConst8)
 20101  			v.AuxInt = int8ToAuxInt(c | d)
 20102  			return true
 20103  		}
 20104  		break
 20105  	}
 20106  	// match: (Or8 x x)
 20107  	// result: x
 20108  	for {
 20109  		x := v_0
 20110  		if x != v_1 {
 20111  			break
 20112  		}
 20113  		v.copyOf(x)
 20114  		return true
 20115  	}
 20116  	// match: (Or8 (Const8 [0]) x)
 20117  	// result: x
 20118  	for {
 20119  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20120  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 20121  				continue
 20122  			}
 20123  			x := v_1
 20124  			v.copyOf(x)
 20125  			return true
 20126  		}
 20127  		break
 20128  	}
 20129  	// match: (Or8 (Const8 [-1]) _)
 20130  	// result: (Const8 [-1])
 20131  	for {
 20132  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20133  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
 20134  				continue
 20135  			}
 20136  			v.reset(OpConst8)
 20137  			v.AuxInt = int8ToAuxInt(-1)
 20138  			return true
 20139  		}
 20140  		break
 20141  	}
 20142  	// match: (Or8 (Com8 x) x)
 20143  	// result: (Const8 [-1])
 20144  	for {
 20145  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20146  			if v_0.Op != OpCom8 {
 20147  				continue
 20148  			}
 20149  			x := v_0.Args[0]
 20150  			if x != v_1 {
 20151  				continue
 20152  			}
 20153  			v.reset(OpConst8)
 20154  			v.AuxInt = int8ToAuxInt(-1)
 20155  			return true
 20156  		}
 20157  		break
 20158  	}
 20159  	// match: (Or8 x (Or8 x y))
 20160  	// result: (Or8 x y)
 20161  	for {
 20162  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20163  			x := v_0
 20164  			if v_1.Op != OpOr8 {
 20165  				continue
 20166  			}
 20167  			_ = v_1.Args[1]
 20168  			v_1_0 := v_1.Args[0]
 20169  			v_1_1 := v_1.Args[1]
 20170  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 20171  				if x != v_1_0 {
 20172  					continue
 20173  				}
 20174  				y := v_1_1
 20175  				v.reset(OpOr8)
 20176  				v.AddArg2(x, y)
 20177  				return true
 20178  			}
 20179  		}
 20180  		break
 20181  	}
 20182  	// match: (Or8 (And8 x (Const8 [c2])) (Const8 <t> [c1]))
 20183  	// cond: ^(c1 | c2) == 0
 20184  	// result: (Or8 (Const8 <t> [c1]) x)
 20185  	for {
 20186  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20187  			if v_0.Op != OpAnd8 {
 20188  				continue
 20189  			}
 20190  			_ = v_0.Args[1]
 20191  			v_0_0 := v_0.Args[0]
 20192  			v_0_1 := v_0.Args[1]
 20193  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 20194  				x := v_0_0
 20195  				if v_0_1.Op != OpConst8 {
 20196  					continue
 20197  				}
 20198  				c2 := auxIntToInt8(v_0_1.AuxInt)
 20199  				if v_1.Op != OpConst8 {
 20200  					continue
 20201  				}
 20202  				t := v_1.Type
 20203  				c1 := auxIntToInt8(v_1.AuxInt)
 20204  				if !(^(c1 | c2) == 0) {
 20205  					continue
 20206  				}
 20207  				v.reset(OpOr8)
 20208  				v0 := b.NewValue0(v.Pos, OpConst8, t)
 20209  				v0.AuxInt = int8ToAuxInt(c1)
 20210  				v.AddArg2(v0, x)
 20211  				return true
 20212  			}
 20213  		}
 20214  		break
 20215  	}
 20216  	// match: (Or8 (Or8 i:(Const8 <t>) z) x)
 20217  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 20218  	// result: (Or8 i (Or8 <t> z x))
 20219  	for {
 20220  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20221  			if v_0.Op != OpOr8 {
 20222  				continue
 20223  			}
 20224  			_ = v_0.Args[1]
 20225  			v_0_0 := v_0.Args[0]
 20226  			v_0_1 := v_0.Args[1]
 20227  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 20228  				i := v_0_0
 20229  				if i.Op != OpConst8 {
 20230  					continue
 20231  				}
 20232  				t := i.Type
 20233  				z := v_0_1
 20234  				x := v_1
 20235  				if !(z.Op != OpConst8 && x.Op != OpConst8) {
 20236  					continue
 20237  				}
 20238  				v.reset(OpOr8)
 20239  				v0 := b.NewValue0(v.Pos, OpOr8, t)
 20240  				v0.AddArg2(z, x)
 20241  				v.AddArg2(i, v0)
 20242  				return true
 20243  			}
 20244  		}
 20245  		break
 20246  	}
 20247  	// match: (Or8 (Const8 <t> [c]) (Or8 (Const8 <t> [d]) x))
 20248  	// result: (Or8 (Const8 <t> [c|d]) x)
 20249  	for {
 20250  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20251  			if v_0.Op != OpConst8 {
 20252  				continue
 20253  			}
 20254  			t := v_0.Type
 20255  			c := auxIntToInt8(v_0.AuxInt)
 20256  			if v_1.Op != OpOr8 {
 20257  				continue
 20258  			}
 20259  			_ = v_1.Args[1]
 20260  			v_1_0 := v_1.Args[0]
 20261  			v_1_1 := v_1.Args[1]
 20262  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 20263  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 20264  					continue
 20265  				}
 20266  				d := auxIntToInt8(v_1_0.AuxInt)
 20267  				x := v_1_1
 20268  				v.reset(OpOr8)
 20269  				v0 := b.NewValue0(v.Pos, OpConst8, t)
 20270  				v0.AuxInt = int8ToAuxInt(c | d)
 20271  				v.AddArg2(v0, x)
 20272  				return true
 20273  			}
 20274  		}
 20275  		break
 20276  	}
 20277  	// match: (Or8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
 20278  	// cond: c < 8 && d == 8-c && canRotate(config, 8)
 20279  	// result: (RotateLeft8 x z)
 20280  	for {
 20281  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20282  			if v_0.Op != OpLsh8x64 {
 20283  				continue
 20284  			}
 20285  			_ = v_0.Args[1]
 20286  			x := v_0.Args[0]
 20287  			z := v_0.Args[1]
 20288  			if z.Op != OpConst64 {
 20289  				continue
 20290  			}
 20291  			c := auxIntToInt64(z.AuxInt)
 20292  			if v_1.Op != OpRsh8Ux64 {
 20293  				continue
 20294  			}
 20295  			_ = v_1.Args[1]
 20296  			if x != v_1.Args[0] {
 20297  				continue
 20298  			}
 20299  			v_1_1 := v_1.Args[1]
 20300  			if v_1_1.Op != OpConst64 {
 20301  				continue
 20302  			}
 20303  			d := auxIntToInt64(v_1_1.AuxInt)
 20304  			if !(c < 8 && d == 8-c && canRotate(config, 8)) {
 20305  				continue
 20306  			}
 20307  			v.reset(OpRotateLeft8)
 20308  			v.AddArg2(x, z)
 20309  			return true
 20310  		}
 20311  		break
 20312  	}
 20313  	// match: (Or8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
 20314  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 20315  	// result: (RotateLeft8 x y)
 20316  	for {
 20317  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20318  			left := v_0
 20319  			if left.Op != OpLsh8x64 {
 20320  				continue
 20321  			}
 20322  			y := left.Args[1]
 20323  			x := left.Args[0]
 20324  			right := v_1
 20325  			if right.Op != OpRsh8Ux64 {
 20326  				continue
 20327  			}
 20328  			_ = right.Args[1]
 20329  			if x != right.Args[0] {
 20330  				continue
 20331  			}
 20332  			right_1 := right.Args[1]
 20333  			if right_1.Op != OpSub64 {
 20334  				continue
 20335  			}
 20336  			_ = right_1.Args[1]
 20337  			right_1_0 := right_1.Args[0]
 20338  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 20339  				continue
 20340  			}
 20341  			v.reset(OpRotateLeft8)
 20342  			v.AddArg2(x, y)
 20343  			return true
 20344  		}
 20345  		break
 20346  	}
 20347  	// match: (Or8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
 20348  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 20349  	// result: (RotateLeft8 x y)
 20350  	for {
 20351  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20352  			left := v_0
 20353  			if left.Op != OpLsh8x32 {
 20354  				continue
 20355  			}
 20356  			y := left.Args[1]
 20357  			x := left.Args[0]
 20358  			right := v_1
 20359  			if right.Op != OpRsh8Ux32 {
 20360  				continue
 20361  			}
 20362  			_ = right.Args[1]
 20363  			if x != right.Args[0] {
 20364  				continue
 20365  			}
 20366  			right_1 := right.Args[1]
 20367  			if right_1.Op != OpSub32 {
 20368  				continue
 20369  			}
 20370  			_ = right_1.Args[1]
 20371  			right_1_0 := right_1.Args[0]
 20372  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 20373  				continue
 20374  			}
 20375  			v.reset(OpRotateLeft8)
 20376  			v.AddArg2(x, y)
 20377  			return true
 20378  		}
 20379  		break
 20380  	}
 20381  	// match: (Or8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
 20382  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 20383  	// result: (RotateLeft8 x y)
 20384  	for {
 20385  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20386  			left := v_0
 20387  			if left.Op != OpLsh8x16 {
 20388  				continue
 20389  			}
 20390  			y := left.Args[1]
 20391  			x := left.Args[0]
 20392  			right := v_1
 20393  			if right.Op != OpRsh8Ux16 {
 20394  				continue
 20395  			}
 20396  			_ = right.Args[1]
 20397  			if x != right.Args[0] {
 20398  				continue
 20399  			}
 20400  			right_1 := right.Args[1]
 20401  			if right_1.Op != OpSub16 {
 20402  				continue
 20403  			}
 20404  			_ = right_1.Args[1]
 20405  			right_1_0 := right_1.Args[0]
 20406  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 20407  				continue
 20408  			}
 20409  			v.reset(OpRotateLeft8)
 20410  			v.AddArg2(x, y)
 20411  			return true
 20412  		}
 20413  		break
 20414  	}
 20415  	// match: (Or8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
 20416  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 20417  	// result: (RotateLeft8 x y)
 20418  	for {
 20419  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20420  			left := v_0
 20421  			if left.Op != OpLsh8x8 {
 20422  				continue
 20423  			}
 20424  			y := left.Args[1]
 20425  			x := left.Args[0]
 20426  			right := v_1
 20427  			if right.Op != OpRsh8Ux8 {
 20428  				continue
 20429  			}
 20430  			_ = right.Args[1]
 20431  			if x != right.Args[0] {
 20432  				continue
 20433  			}
 20434  			right_1 := right.Args[1]
 20435  			if right_1.Op != OpSub8 {
 20436  				continue
 20437  			}
 20438  			_ = right_1.Args[1]
 20439  			right_1_0 := right_1.Args[0]
 20440  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 20441  				continue
 20442  			}
 20443  			v.reset(OpRotateLeft8)
 20444  			v.AddArg2(x, y)
 20445  			return true
 20446  		}
 20447  		break
 20448  	}
 20449  	// match: (Or8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
 20450  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 20451  	// result: (RotateLeft8 x z)
 20452  	for {
 20453  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20454  			right := v_0
 20455  			if right.Op != OpRsh8Ux64 {
 20456  				continue
 20457  			}
 20458  			y := right.Args[1]
 20459  			x := right.Args[0]
 20460  			left := v_1
 20461  			if left.Op != OpLsh8x64 {
 20462  				continue
 20463  			}
 20464  			_ = left.Args[1]
 20465  			if x != left.Args[0] {
 20466  				continue
 20467  			}
 20468  			z := left.Args[1]
 20469  			if z.Op != OpSub64 {
 20470  				continue
 20471  			}
 20472  			_ = z.Args[1]
 20473  			z_0 := z.Args[0]
 20474  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 20475  				continue
 20476  			}
 20477  			v.reset(OpRotateLeft8)
 20478  			v.AddArg2(x, z)
 20479  			return true
 20480  		}
 20481  		break
 20482  	}
 20483  	// match: (Or8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
 20484  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 20485  	// result: (RotateLeft8 x z)
 20486  	for {
 20487  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20488  			right := v_0
 20489  			if right.Op != OpRsh8Ux32 {
 20490  				continue
 20491  			}
 20492  			y := right.Args[1]
 20493  			x := right.Args[0]
 20494  			left := v_1
 20495  			if left.Op != OpLsh8x32 {
 20496  				continue
 20497  			}
 20498  			_ = left.Args[1]
 20499  			if x != left.Args[0] {
 20500  				continue
 20501  			}
 20502  			z := left.Args[1]
 20503  			if z.Op != OpSub32 {
 20504  				continue
 20505  			}
 20506  			_ = z.Args[1]
 20507  			z_0 := z.Args[0]
 20508  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 20509  				continue
 20510  			}
 20511  			v.reset(OpRotateLeft8)
 20512  			v.AddArg2(x, z)
 20513  			return true
 20514  		}
 20515  		break
 20516  	}
 20517  	// match: (Or8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
 20518  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 20519  	// result: (RotateLeft8 x z)
 20520  	for {
 20521  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20522  			right := v_0
 20523  			if right.Op != OpRsh8Ux16 {
 20524  				continue
 20525  			}
 20526  			y := right.Args[1]
 20527  			x := right.Args[0]
 20528  			left := v_1
 20529  			if left.Op != OpLsh8x16 {
 20530  				continue
 20531  			}
 20532  			_ = left.Args[1]
 20533  			if x != left.Args[0] {
 20534  				continue
 20535  			}
 20536  			z := left.Args[1]
 20537  			if z.Op != OpSub16 {
 20538  				continue
 20539  			}
 20540  			_ = z.Args[1]
 20541  			z_0 := z.Args[0]
 20542  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 20543  				continue
 20544  			}
 20545  			v.reset(OpRotateLeft8)
 20546  			v.AddArg2(x, z)
 20547  			return true
 20548  		}
 20549  		break
 20550  	}
 20551  	// match: (Or8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
 20552  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 20553  	// result: (RotateLeft8 x z)
 20554  	for {
 20555  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20556  			right := v_0
 20557  			if right.Op != OpRsh8Ux8 {
 20558  				continue
 20559  			}
 20560  			y := right.Args[1]
 20561  			x := right.Args[0]
 20562  			left := v_1
 20563  			if left.Op != OpLsh8x8 {
 20564  				continue
 20565  			}
 20566  			_ = left.Args[1]
 20567  			if x != left.Args[0] {
 20568  				continue
 20569  			}
 20570  			z := left.Args[1]
 20571  			if z.Op != OpSub8 {
 20572  				continue
 20573  			}
 20574  			_ = z.Args[1]
 20575  			z_0 := z.Args[0]
 20576  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 20577  				continue
 20578  			}
 20579  			v.reset(OpRotateLeft8)
 20580  			v.AddArg2(x, z)
 20581  			return true
 20582  		}
 20583  		break
 20584  	}
 20585  	return false
 20586  }
 20587  func rewriteValuegeneric_OpOrB(v *Value) bool {
 20588  	v_1 := v.Args[1]
 20589  	v_0 := v.Args[0]
 20590  	b := v.Block
 20591  	// match: (OrB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d])))
 20592  	// cond: c >= d
 20593  	// result: (Less64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
 20594  	for {
 20595  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20596  			if v_0.Op != OpLess64 {
 20597  				continue
 20598  			}
 20599  			x := v_0.Args[1]
 20600  			v_0_0 := v_0.Args[0]
 20601  			if v_0_0.Op != OpConst64 {
 20602  				continue
 20603  			}
 20604  			c := auxIntToInt64(v_0_0.AuxInt)
 20605  			if v_1.Op != OpLess64 {
 20606  				continue
 20607  			}
 20608  			_ = v_1.Args[1]
 20609  			if x != v_1.Args[0] {
 20610  				continue
 20611  			}
 20612  			v_1_1 := v_1.Args[1]
 20613  			if v_1_1.Op != OpConst64 {
 20614  				continue
 20615  			}
 20616  			d := auxIntToInt64(v_1_1.AuxInt)
 20617  			if !(c >= d) {
 20618  				continue
 20619  			}
 20620  			v.reset(OpLess64U)
 20621  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 20622  			v0.AuxInt = int64ToAuxInt(c - d)
 20623  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 20624  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 20625  			v2.AuxInt = int64ToAuxInt(d)
 20626  			v1.AddArg2(x, v2)
 20627  			v.AddArg2(v0, v1)
 20628  			return true
 20629  		}
 20630  		break
 20631  	}
 20632  	// match: (OrB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d])))
 20633  	// cond: c >= d
 20634  	// result: (Leq64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
 20635  	for {
 20636  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20637  			if v_0.Op != OpLeq64 {
 20638  				continue
 20639  			}
 20640  			x := v_0.Args[1]
 20641  			v_0_0 := v_0.Args[0]
 20642  			if v_0_0.Op != OpConst64 {
 20643  				continue
 20644  			}
 20645  			c := auxIntToInt64(v_0_0.AuxInt)
 20646  			if v_1.Op != OpLess64 {
 20647  				continue
 20648  			}
 20649  			_ = v_1.Args[1]
 20650  			if x != v_1.Args[0] {
 20651  				continue
 20652  			}
 20653  			v_1_1 := v_1.Args[1]
 20654  			if v_1_1.Op != OpConst64 {
 20655  				continue
 20656  			}
 20657  			d := auxIntToInt64(v_1_1.AuxInt)
 20658  			if !(c >= d) {
 20659  				continue
 20660  			}
 20661  			v.reset(OpLeq64U)
 20662  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 20663  			v0.AuxInt = int64ToAuxInt(c - d)
 20664  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 20665  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 20666  			v2.AuxInt = int64ToAuxInt(d)
 20667  			v1.AddArg2(x, v2)
 20668  			v.AddArg2(v0, v1)
 20669  			return true
 20670  		}
 20671  		break
 20672  	}
 20673  	// match: (OrB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d])))
 20674  	// cond: c >= d
 20675  	// result: (Less32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
 20676  	for {
 20677  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20678  			if v_0.Op != OpLess32 {
 20679  				continue
 20680  			}
 20681  			x := v_0.Args[1]
 20682  			v_0_0 := v_0.Args[0]
 20683  			if v_0_0.Op != OpConst32 {
 20684  				continue
 20685  			}
 20686  			c := auxIntToInt32(v_0_0.AuxInt)
 20687  			if v_1.Op != OpLess32 {
 20688  				continue
 20689  			}
 20690  			_ = v_1.Args[1]
 20691  			if x != v_1.Args[0] {
 20692  				continue
 20693  			}
 20694  			v_1_1 := v_1.Args[1]
 20695  			if v_1_1.Op != OpConst32 {
 20696  				continue
 20697  			}
 20698  			d := auxIntToInt32(v_1_1.AuxInt)
 20699  			if !(c >= d) {
 20700  				continue
 20701  			}
 20702  			v.reset(OpLess32U)
 20703  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 20704  			v0.AuxInt = int32ToAuxInt(c - d)
 20705  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 20706  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 20707  			v2.AuxInt = int32ToAuxInt(d)
 20708  			v1.AddArg2(x, v2)
 20709  			v.AddArg2(v0, v1)
 20710  			return true
 20711  		}
 20712  		break
 20713  	}
 20714  	// match: (OrB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d])))
 20715  	// cond: c >= d
 20716  	// result: (Leq32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
 20717  	for {
 20718  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20719  			if v_0.Op != OpLeq32 {
 20720  				continue
 20721  			}
 20722  			x := v_0.Args[1]
 20723  			v_0_0 := v_0.Args[0]
 20724  			if v_0_0.Op != OpConst32 {
 20725  				continue
 20726  			}
 20727  			c := auxIntToInt32(v_0_0.AuxInt)
 20728  			if v_1.Op != OpLess32 {
 20729  				continue
 20730  			}
 20731  			_ = v_1.Args[1]
 20732  			if x != v_1.Args[0] {
 20733  				continue
 20734  			}
 20735  			v_1_1 := v_1.Args[1]
 20736  			if v_1_1.Op != OpConst32 {
 20737  				continue
 20738  			}
 20739  			d := auxIntToInt32(v_1_1.AuxInt)
 20740  			if !(c >= d) {
 20741  				continue
 20742  			}
 20743  			v.reset(OpLeq32U)
 20744  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 20745  			v0.AuxInt = int32ToAuxInt(c - d)
 20746  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 20747  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 20748  			v2.AuxInt = int32ToAuxInt(d)
 20749  			v1.AddArg2(x, v2)
 20750  			v.AddArg2(v0, v1)
 20751  			return true
 20752  		}
 20753  		break
 20754  	}
 20755  	// match: (OrB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d])))
 20756  	// cond: c >= d
 20757  	// result: (Less16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
 20758  	for {
 20759  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20760  			if v_0.Op != OpLess16 {
 20761  				continue
 20762  			}
 20763  			x := v_0.Args[1]
 20764  			v_0_0 := v_0.Args[0]
 20765  			if v_0_0.Op != OpConst16 {
 20766  				continue
 20767  			}
 20768  			c := auxIntToInt16(v_0_0.AuxInt)
 20769  			if v_1.Op != OpLess16 {
 20770  				continue
 20771  			}
 20772  			_ = v_1.Args[1]
 20773  			if x != v_1.Args[0] {
 20774  				continue
 20775  			}
 20776  			v_1_1 := v_1.Args[1]
 20777  			if v_1_1.Op != OpConst16 {
 20778  				continue
 20779  			}
 20780  			d := auxIntToInt16(v_1_1.AuxInt)
 20781  			if !(c >= d) {
 20782  				continue
 20783  			}
 20784  			v.reset(OpLess16U)
 20785  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 20786  			v0.AuxInt = int16ToAuxInt(c - d)
 20787  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 20788  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 20789  			v2.AuxInt = int16ToAuxInt(d)
 20790  			v1.AddArg2(x, v2)
 20791  			v.AddArg2(v0, v1)
 20792  			return true
 20793  		}
 20794  		break
 20795  	}
 20796  	// match: (OrB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d])))
 20797  	// cond: c >= d
 20798  	// result: (Leq16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
 20799  	for {
 20800  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20801  			if v_0.Op != OpLeq16 {
 20802  				continue
 20803  			}
 20804  			x := v_0.Args[1]
 20805  			v_0_0 := v_0.Args[0]
 20806  			if v_0_0.Op != OpConst16 {
 20807  				continue
 20808  			}
 20809  			c := auxIntToInt16(v_0_0.AuxInt)
 20810  			if v_1.Op != OpLess16 {
 20811  				continue
 20812  			}
 20813  			_ = v_1.Args[1]
 20814  			if x != v_1.Args[0] {
 20815  				continue
 20816  			}
 20817  			v_1_1 := v_1.Args[1]
 20818  			if v_1_1.Op != OpConst16 {
 20819  				continue
 20820  			}
 20821  			d := auxIntToInt16(v_1_1.AuxInt)
 20822  			if !(c >= d) {
 20823  				continue
 20824  			}
 20825  			v.reset(OpLeq16U)
 20826  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 20827  			v0.AuxInt = int16ToAuxInt(c - d)
 20828  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 20829  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 20830  			v2.AuxInt = int16ToAuxInt(d)
 20831  			v1.AddArg2(x, v2)
 20832  			v.AddArg2(v0, v1)
 20833  			return true
 20834  		}
 20835  		break
 20836  	}
 20837  	// match: (OrB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d])))
 20838  	// cond: c >= d
 20839  	// result: (Less8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
 20840  	for {
 20841  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20842  			if v_0.Op != OpLess8 {
 20843  				continue
 20844  			}
 20845  			x := v_0.Args[1]
 20846  			v_0_0 := v_0.Args[0]
 20847  			if v_0_0.Op != OpConst8 {
 20848  				continue
 20849  			}
 20850  			c := auxIntToInt8(v_0_0.AuxInt)
 20851  			if v_1.Op != OpLess8 {
 20852  				continue
 20853  			}
 20854  			_ = v_1.Args[1]
 20855  			if x != v_1.Args[0] {
 20856  				continue
 20857  			}
 20858  			v_1_1 := v_1.Args[1]
 20859  			if v_1_1.Op != OpConst8 {
 20860  				continue
 20861  			}
 20862  			d := auxIntToInt8(v_1_1.AuxInt)
 20863  			if !(c >= d) {
 20864  				continue
 20865  			}
 20866  			v.reset(OpLess8U)
 20867  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 20868  			v0.AuxInt = int8ToAuxInt(c - d)
 20869  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 20870  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 20871  			v2.AuxInt = int8ToAuxInt(d)
 20872  			v1.AddArg2(x, v2)
 20873  			v.AddArg2(v0, v1)
 20874  			return true
 20875  		}
 20876  		break
 20877  	}
 20878  	// match: (OrB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d])))
 20879  	// cond: c >= d
 20880  	// result: (Leq8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
 20881  	for {
 20882  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20883  			if v_0.Op != OpLeq8 {
 20884  				continue
 20885  			}
 20886  			x := v_0.Args[1]
 20887  			v_0_0 := v_0.Args[0]
 20888  			if v_0_0.Op != OpConst8 {
 20889  				continue
 20890  			}
 20891  			c := auxIntToInt8(v_0_0.AuxInt)
 20892  			if v_1.Op != OpLess8 {
 20893  				continue
 20894  			}
 20895  			_ = v_1.Args[1]
 20896  			if x != v_1.Args[0] {
 20897  				continue
 20898  			}
 20899  			v_1_1 := v_1.Args[1]
 20900  			if v_1_1.Op != OpConst8 {
 20901  				continue
 20902  			}
 20903  			d := auxIntToInt8(v_1_1.AuxInt)
 20904  			if !(c >= d) {
 20905  				continue
 20906  			}
 20907  			v.reset(OpLeq8U)
 20908  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 20909  			v0.AuxInt = int8ToAuxInt(c - d)
 20910  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 20911  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 20912  			v2.AuxInt = int8ToAuxInt(d)
 20913  			v1.AddArg2(x, v2)
 20914  			v.AddArg2(v0, v1)
 20915  			return true
 20916  		}
 20917  		break
 20918  	}
 20919  	// match: (OrB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
 20920  	// cond: c >= d+1 && d+1 > d
 20921  	// result: (Less64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
 20922  	for {
 20923  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20924  			if v_0.Op != OpLess64 {
 20925  				continue
 20926  			}
 20927  			x := v_0.Args[1]
 20928  			v_0_0 := v_0.Args[0]
 20929  			if v_0_0.Op != OpConst64 {
 20930  				continue
 20931  			}
 20932  			c := auxIntToInt64(v_0_0.AuxInt)
 20933  			if v_1.Op != OpLeq64 {
 20934  				continue
 20935  			}
 20936  			_ = v_1.Args[1]
 20937  			if x != v_1.Args[0] {
 20938  				continue
 20939  			}
 20940  			v_1_1 := v_1.Args[1]
 20941  			if v_1_1.Op != OpConst64 {
 20942  				continue
 20943  			}
 20944  			d := auxIntToInt64(v_1_1.AuxInt)
 20945  			if !(c >= d+1 && d+1 > d) {
 20946  				continue
 20947  			}
 20948  			v.reset(OpLess64U)
 20949  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 20950  			v0.AuxInt = int64ToAuxInt(c - d - 1)
 20951  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 20952  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 20953  			v2.AuxInt = int64ToAuxInt(d + 1)
 20954  			v1.AddArg2(x, v2)
 20955  			v.AddArg2(v0, v1)
 20956  			return true
 20957  		}
 20958  		break
 20959  	}
 20960  	// match: (OrB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
 20961  	// cond: c >= d+1 && d+1 > d
 20962  	// result: (Leq64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
 20963  	for {
 20964  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20965  			if v_0.Op != OpLeq64 {
 20966  				continue
 20967  			}
 20968  			x := v_0.Args[1]
 20969  			v_0_0 := v_0.Args[0]
 20970  			if v_0_0.Op != OpConst64 {
 20971  				continue
 20972  			}
 20973  			c := auxIntToInt64(v_0_0.AuxInt)
 20974  			if v_1.Op != OpLeq64 {
 20975  				continue
 20976  			}
 20977  			_ = v_1.Args[1]
 20978  			if x != v_1.Args[0] {
 20979  				continue
 20980  			}
 20981  			v_1_1 := v_1.Args[1]
 20982  			if v_1_1.Op != OpConst64 {
 20983  				continue
 20984  			}
 20985  			d := auxIntToInt64(v_1_1.AuxInt)
 20986  			if !(c >= d+1 && d+1 > d) {
 20987  				continue
 20988  			}
 20989  			v.reset(OpLeq64U)
 20990  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 20991  			v0.AuxInt = int64ToAuxInt(c - d - 1)
 20992  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 20993  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 20994  			v2.AuxInt = int64ToAuxInt(d + 1)
 20995  			v1.AddArg2(x, v2)
 20996  			v.AddArg2(v0, v1)
 20997  			return true
 20998  		}
 20999  		break
 21000  	}
 21001  	// match: (OrB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
 21002  	// cond: c >= d+1 && d+1 > d
 21003  	// result: (Less32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
 21004  	for {
 21005  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21006  			if v_0.Op != OpLess32 {
 21007  				continue
 21008  			}
 21009  			x := v_0.Args[1]
 21010  			v_0_0 := v_0.Args[0]
 21011  			if v_0_0.Op != OpConst32 {
 21012  				continue
 21013  			}
 21014  			c := auxIntToInt32(v_0_0.AuxInt)
 21015  			if v_1.Op != OpLeq32 {
 21016  				continue
 21017  			}
 21018  			_ = v_1.Args[1]
 21019  			if x != v_1.Args[0] {
 21020  				continue
 21021  			}
 21022  			v_1_1 := v_1.Args[1]
 21023  			if v_1_1.Op != OpConst32 {
 21024  				continue
 21025  			}
 21026  			d := auxIntToInt32(v_1_1.AuxInt)
 21027  			if !(c >= d+1 && d+1 > d) {
 21028  				continue
 21029  			}
 21030  			v.reset(OpLess32U)
 21031  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21032  			v0.AuxInt = int32ToAuxInt(c - d - 1)
 21033  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 21034  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21035  			v2.AuxInt = int32ToAuxInt(d + 1)
 21036  			v1.AddArg2(x, v2)
 21037  			v.AddArg2(v0, v1)
 21038  			return true
 21039  		}
 21040  		break
 21041  	}
 21042  	// match: (OrB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
 21043  	// cond: c >= d+1 && d+1 > d
 21044  	// result: (Leq32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
 21045  	for {
 21046  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21047  			if v_0.Op != OpLeq32 {
 21048  				continue
 21049  			}
 21050  			x := v_0.Args[1]
 21051  			v_0_0 := v_0.Args[0]
 21052  			if v_0_0.Op != OpConst32 {
 21053  				continue
 21054  			}
 21055  			c := auxIntToInt32(v_0_0.AuxInt)
 21056  			if v_1.Op != OpLeq32 {
 21057  				continue
 21058  			}
 21059  			_ = v_1.Args[1]
 21060  			if x != v_1.Args[0] {
 21061  				continue
 21062  			}
 21063  			v_1_1 := v_1.Args[1]
 21064  			if v_1_1.Op != OpConst32 {
 21065  				continue
 21066  			}
 21067  			d := auxIntToInt32(v_1_1.AuxInt)
 21068  			if !(c >= d+1 && d+1 > d) {
 21069  				continue
 21070  			}
 21071  			v.reset(OpLeq32U)
 21072  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21073  			v0.AuxInt = int32ToAuxInt(c - d - 1)
 21074  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 21075  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21076  			v2.AuxInt = int32ToAuxInt(d + 1)
 21077  			v1.AddArg2(x, v2)
 21078  			v.AddArg2(v0, v1)
 21079  			return true
 21080  		}
 21081  		break
 21082  	}
 21083  	// match: (OrB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
 21084  	// cond: c >= d+1 && d+1 > d
 21085  	// result: (Less16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
 21086  	for {
 21087  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21088  			if v_0.Op != OpLess16 {
 21089  				continue
 21090  			}
 21091  			x := v_0.Args[1]
 21092  			v_0_0 := v_0.Args[0]
 21093  			if v_0_0.Op != OpConst16 {
 21094  				continue
 21095  			}
 21096  			c := auxIntToInt16(v_0_0.AuxInt)
 21097  			if v_1.Op != OpLeq16 {
 21098  				continue
 21099  			}
 21100  			_ = v_1.Args[1]
 21101  			if x != v_1.Args[0] {
 21102  				continue
 21103  			}
 21104  			v_1_1 := v_1.Args[1]
 21105  			if v_1_1.Op != OpConst16 {
 21106  				continue
 21107  			}
 21108  			d := auxIntToInt16(v_1_1.AuxInt)
 21109  			if !(c >= d+1 && d+1 > d) {
 21110  				continue
 21111  			}
 21112  			v.reset(OpLess16U)
 21113  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21114  			v0.AuxInt = int16ToAuxInt(c - d - 1)
 21115  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 21116  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21117  			v2.AuxInt = int16ToAuxInt(d + 1)
 21118  			v1.AddArg2(x, v2)
 21119  			v.AddArg2(v0, v1)
 21120  			return true
 21121  		}
 21122  		break
 21123  	}
 21124  	// match: (OrB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
 21125  	// cond: c >= d+1 && d+1 > d
 21126  	// result: (Leq16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
 21127  	for {
 21128  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21129  			if v_0.Op != OpLeq16 {
 21130  				continue
 21131  			}
 21132  			x := v_0.Args[1]
 21133  			v_0_0 := v_0.Args[0]
 21134  			if v_0_0.Op != OpConst16 {
 21135  				continue
 21136  			}
 21137  			c := auxIntToInt16(v_0_0.AuxInt)
 21138  			if v_1.Op != OpLeq16 {
 21139  				continue
 21140  			}
 21141  			_ = v_1.Args[1]
 21142  			if x != v_1.Args[0] {
 21143  				continue
 21144  			}
 21145  			v_1_1 := v_1.Args[1]
 21146  			if v_1_1.Op != OpConst16 {
 21147  				continue
 21148  			}
 21149  			d := auxIntToInt16(v_1_1.AuxInt)
 21150  			if !(c >= d+1 && d+1 > d) {
 21151  				continue
 21152  			}
 21153  			v.reset(OpLeq16U)
 21154  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21155  			v0.AuxInt = int16ToAuxInt(c - d - 1)
 21156  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 21157  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21158  			v2.AuxInt = int16ToAuxInt(d + 1)
 21159  			v1.AddArg2(x, v2)
 21160  			v.AddArg2(v0, v1)
 21161  			return true
 21162  		}
 21163  		break
 21164  	}
 21165  	// match: (OrB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
 21166  	// cond: c >= d+1 && d+1 > d
 21167  	// result: (Less8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
 21168  	for {
 21169  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21170  			if v_0.Op != OpLess8 {
 21171  				continue
 21172  			}
 21173  			x := v_0.Args[1]
 21174  			v_0_0 := v_0.Args[0]
 21175  			if v_0_0.Op != OpConst8 {
 21176  				continue
 21177  			}
 21178  			c := auxIntToInt8(v_0_0.AuxInt)
 21179  			if v_1.Op != OpLeq8 {
 21180  				continue
 21181  			}
 21182  			_ = v_1.Args[1]
 21183  			if x != v_1.Args[0] {
 21184  				continue
 21185  			}
 21186  			v_1_1 := v_1.Args[1]
 21187  			if v_1_1.Op != OpConst8 {
 21188  				continue
 21189  			}
 21190  			d := auxIntToInt8(v_1_1.AuxInt)
 21191  			if !(c >= d+1 && d+1 > d) {
 21192  				continue
 21193  			}
 21194  			v.reset(OpLess8U)
 21195  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21196  			v0.AuxInt = int8ToAuxInt(c - d - 1)
 21197  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 21198  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21199  			v2.AuxInt = int8ToAuxInt(d + 1)
 21200  			v1.AddArg2(x, v2)
 21201  			v.AddArg2(v0, v1)
 21202  			return true
 21203  		}
 21204  		break
 21205  	}
 21206  	// match: (OrB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
 21207  	// cond: c >= d+1 && d+1 > d
 21208  	// result: (Leq8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
 21209  	for {
 21210  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21211  			if v_0.Op != OpLeq8 {
 21212  				continue
 21213  			}
 21214  			x := v_0.Args[1]
 21215  			v_0_0 := v_0.Args[0]
 21216  			if v_0_0.Op != OpConst8 {
 21217  				continue
 21218  			}
 21219  			c := auxIntToInt8(v_0_0.AuxInt)
 21220  			if v_1.Op != OpLeq8 {
 21221  				continue
 21222  			}
 21223  			_ = v_1.Args[1]
 21224  			if x != v_1.Args[0] {
 21225  				continue
 21226  			}
 21227  			v_1_1 := v_1.Args[1]
 21228  			if v_1_1.Op != OpConst8 {
 21229  				continue
 21230  			}
 21231  			d := auxIntToInt8(v_1_1.AuxInt)
 21232  			if !(c >= d+1 && d+1 > d) {
 21233  				continue
 21234  			}
 21235  			v.reset(OpLeq8U)
 21236  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21237  			v0.AuxInt = int8ToAuxInt(c - d - 1)
 21238  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 21239  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21240  			v2.AuxInt = int8ToAuxInt(d + 1)
 21241  			v1.AddArg2(x, v2)
 21242  			v.AddArg2(v0, v1)
 21243  			return true
 21244  		}
 21245  		break
 21246  	}
 21247  	// match: (OrB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d])))
 21248  	// cond: uint64(c) >= uint64(d)
 21249  	// result: (Less64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
 21250  	for {
 21251  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21252  			if v_0.Op != OpLess64U {
 21253  				continue
 21254  			}
 21255  			x := v_0.Args[1]
 21256  			v_0_0 := v_0.Args[0]
 21257  			if v_0_0.Op != OpConst64 {
 21258  				continue
 21259  			}
 21260  			c := auxIntToInt64(v_0_0.AuxInt)
 21261  			if v_1.Op != OpLess64U {
 21262  				continue
 21263  			}
 21264  			_ = v_1.Args[1]
 21265  			if x != v_1.Args[0] {
 21266  				continue
 21267  			}
 21268  			v_1_1 := v_1.Args[1]
 21269  			if v_1_1.Op != OpConst64 {
 21270  				continue
 21271  			}
 21272  			d := auxIntToInt64(v_1_1.AuxInt)
 21273  			if !(uint64(c) >= uint64(d)) {
 21274  				continue
 21275  			}
 21276  			v.reset(OpLess64U)
 21277  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 21278  			v0.AuxInt = int64ToAuxInt(c - d)
 21279  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 21280  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 21281  			v2.AuxInt = int64ToAuxInt(d)
 21282  			v1.AddArg2(x, v2)
 21283  			v.AddArg2(v0, v1)
 21284  			return true
 21285  		}
 21286  		break
 21287  	}
 21288  	// match: (OrB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d])))
 21289  	// cond: uint64(c) >= uint64(d)
 21290  	// result: (Leq64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
 21291  	for {
 21292  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21293  			if v_0.Op != OpLeq64U {
 21294  				continue
 21295  			}
 21296  			x := v_0.Args[1]
 21297  			v_0_0 := v_0.Args[0]
 21298  			if v_0_0.Op != OpConst64 {
 21299  				continue
 21300  			}
 21301  			c := auxIntToInt64(v_0_0.AuxInt)
 21302  			if v_1.Op != OpLess64U {
 21303  				continue
 21304  			}
 21305  			_ = v_1.Args[1]
 21306  			if x != v_1.Args[0] {
 21307  				continue
 21308  			}
 21309  			v_1_1 := v_1.Args[1]
 21310  			if v_1_1.Op != OpConst64 {
 21311  				continue
 21312  			}
 21313  			d := auxIntToInt64(v_1_1.AuxInt)
 21314  			if !(uint64(c) >= uint64(d)) {
 21315  				continue
 21316  			}
 21317  			v.reset(OpLeq64U)
 21318  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 21319  			v0.AuxInt = int64ToAuxInt(c - d)
 21320  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 21321  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 21322  			v2.AuxInt = int64ToAuxInt(d)
 21323  			v1.AddArg2(x, v2)
 21324  			v.AddArg2(v0, v1)
 21325  			return true
 21326  		}
 21327  		break
 21328  	}
 21329  	// match: (OrB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d])))
 21330  	// cond: uint32(c) >= uint32(d)
 21331  	// result: (Less32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
 21332  	for {
 21333  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21334  			if v_0.Op != OpLess32U {
 21335  				continue
 21336  			}
 21337  			x := v_0.Args[1]
 21338  			v_0_0 := v_0.Args[0]
 21339  			if v_0_0.Op != OpConst32 {
 21340  				continue
 21341  			}
 21342  			c := auxIntToInt32(v_0_0.AuxInt)
 21343  			if v_1.Op != OpLess32U {
 21344  				continue
 21345  			}
 21346  			_ = v_1.Args[1]
 21347  			if x != v_1.Args[0] {
 21348  				continue
 21349  			}
 21350  			v_1_1 := v_1.Args[1]
 21351  			if v_1_1.Op != OpConst32 {
 21352  				continue
 21353  			}
 21354  			d := auxIntToInt32(v_1_1.AuxInt)
 21355  			if !(uint32(c) >= uint32(d)) {
 21356  				continue
 21357  			}
 21358  			v.reset(OpLess32U)
 21359  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21360  			v0.AuxInt = int32ToAuxInt(c - d)
 21361  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 21362  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21363  			v2.AuxInt = int32ToAuxInt(d)
 21364  			v1.AddArg2(x, v2)
 21365  			v.AddArg2(v0, v1)
 21366  			return true
 21367  		}
 21368  		break
 21369  	}
 21370  	// match: (OrB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d])))
 21371  	// cond: uint32(c) >= uint32(d)
 21372  	// result: (Leq32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
 21373  	for {
 21374  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21375  			if v_0.Op != OpLeq32U {
 21376  				continue
 21377  			}
 21378  			x := v_0.Args[1]
 21379  			v_0_0 := v_0.Args[0]
 21380  			if v_0_0.Op != OpConst32 {
 21381  				continue
 21382  			}
 21383  			c := auxIntToInt32(v_0_0.AuxInt)
 21384  			if v_1.Op != OpLess32U {
 21385  				continue
 21386  			}
 21387  			_ = v_1.Args[1]
 21388  			if x != v_1.Args[0] {
 21389  				continue
 21390  			}
 21391  			v_1_1 := v_1.Args[1]
 21392  			if v_1_1.Op != OpConst32 {
 21393  				continue
 21394  			}
 21395  			d := auxIntToInt32(v_1_1.AuxInt)
 21396  			if !(uint32(c) >= uint32(d)) {
 21397  				continue
 21398  			}
 21399  			v.reset(OpLeq32U)
 21400  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21401  			v0.AuxInt = int32ToAuxInt(c - d)
 21402  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 21403  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21404  			v2.AuxInt = int32ToAuxInt(d)
 21405  			v1.AddArg2(x, v2)
 21406  			v.AddArg2(v0, v1)
 21407  			return true
 21408  		}
 21409  		break
 21410  	}
 21411  	// match: (OrB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d])))
 21412  	// cond: uint16(c) >= uint16(d)
 21413  	// result: (Less16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
 21414  	for {
 21415  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21416  			if v_0.Op != OpLess16U {
 21417  				continue
 21418  			}
 21419  			x := v_0.Args[1]
 21420  			v_0_0 := v_0.Args[0]
 21421  			if v_0_0.Op != OpConst16 {
 21422  				continue
 21423  			}
 21424  			c := auxIntToInt16(v_0_0.AuxInt)
 21425  			if v_1.Op != OpLess16U {
 21426  				continue
 21427  			}
 21428  			_ = v_1.Args[1]
 21429  			if x != v_1.Args[0] {
 21430  				continue
 21431  			}
 21432  			v_1_1 := v_1.Args[1]
 21433  			if v_1_1.Op != OpConst16 {
 21434  				continue
 21435  			}
 21436  			d := auxIntToInt16(v_1_1.AuxInt)
 21437  			if !(uint16(c) >= uint16(d)) {
 21438  				continue
 21439  			}
 21440  			v.reset(OpLess16U)
 21441  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21442  			v0.AuxInt = int16ToAuxInt(c - d)
 21443  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 21444  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21445  			v2.AuxInt = int16ToAuxInt(d)
 21446  			v1.AddArg2(x, v2)
 21447  			v.AddArg2(v0, v1)
 21448  			return true
 21449  		}
 21450  		break
 21451  	}
 21452  	// match: (OrB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d])))
 21453  	// cond: uint16(c) >= uint16(d)
 21454  	// result: (Leq16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
 21455  	for {
 21456  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21457  			if v_0.Op != OpLeq16U {
 21458  				continue
 21459  			}
 21460  			x := v_0.Args[1]
 21461  			v_0_0 := v_0.Args[0]
 21462  			if v_0_0.Op != OpConst16 {
 21463  				continue
 21464  			}
 21465  			c := auxIntToInt16(v_0_0.AuxInt)
 21466  			if v_1.Op != OpLess16U {
 21467  				continue
 21468  			}
 21469  			_ = v_1.Args[1]
 21470  			if x != v_1.Args[0] {
 21471  				continue
 21472  			}
 21473  			v_1_1 := v_1.Args[1]
 21474  			if v_1_1.Op != OpConst16 {
 21475  				continue
 21476  			}
 21477  			d := auxIntToInt16(v_1_1.AuxInt)
 21478  			if !(uint16(c) >= uint16(d)) {
 21479  				continue
 21480  			}
 21481  			v.reset(OpLeq16U)
 21482  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21483  			v0.AuxInt = int16ToAuxInt(c - d)
 21484  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 21485  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21486  			v2.AuxInt = int16ToAuxInt(d)
 21487  			v1.AddArg2(x, v2)
 21488  			v.AddArg2(v0, v1)
 21489  			return true
 21490  		}
 21491  		break
 21492  	}
 21493  	// match: (OrB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d])))
 21494  	// cond: uint8(c) >= uint8(d)
 21495  	// result: (Less8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
 21496  	for {
 21497  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21498  			if v_0.Op != OpLess8U {
 21499  				continue
 21500  			}
 21501  			x := v_0.Args[1]
 21502  			v_0_0 := v_0.Args[0]
 21503  			if v_0_0.Op != OpConst8 {
 21504  				continue
 21505  			}
 21506  			c := auxIntToInt8(v_0_0.AuxInt)
 21507  			if v_1.Op != OpLess8U {
 21508  				continue
 21509  			}
 21510  			_ = v_1.Args[1]
 21511  			if x != v_1.Args[0] {
 21512  				continue
 21513  			}
 21514  			v_1_1 := v_1.Args[1]
 21515  			if v_1_1.Op != OpConst8 {
 21516  				continue
 21517  			}
 21518  			d := auxIntToInt8(v_1_1.AuxInt)
 21519  			if !(uint8(c) >= uint8(d)) {
 21520  				continue
 21521  			}
 21522  			v.reset(OpLess8U)
 21523  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21524  			v0.AuxInt = int8ToAuxInt(c - d)
 21525  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 21526  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21527  			v2.AuxInt = int8ToAuxInt(d)
 21528  			v1.AddArg2(x, v2)
 21529  			v.AddArg2(v0, v1)
 21530  			return true
 21531  		}
 21532  		break
 21533  	}
 21534  	// match: (OrB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d])))
 21535  	// cond: uint8(c) >= uint8(d)
 21536  	// result: (Leq8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
 21537  	for {
 21538  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21539  			if v_0.Op != OpLeq8U {
 21540  				continue
 21541  			}
 21542  			x := v_0.Args[1]
 21543  			v_0_0 := v_0.Args[0]
 21544  			if v_0_0.Op != OpConst8 {
 21545  				continue
 21546  			}
 21547  			c := auxIntToInt8(v_0_0.AuxInt)
 21548  			if v_1.Op != OpLess8U {
 21549  				continue
 21550  			}
 21551  			_ = v_1.Args[1]
 21552  			if x != v_1.Args[0] {
 21553  				continue
 21554  			}
 21555  			v_1_1 := v_1.Args[1]
 21556  			if v_1_1.Op != OpConst8 {
 21557  				continue
 21558  			}
 21559  			d := auxIntToInt8(v_1_1.AuxInt)
 21560  			if !(uint8(c) >= uint8(d)) {
 21561  				continue
 21562  			}
 21563  			v.reset(OpLeq8U)
 21564  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21565  			v0.AuxInt = int8ToAuxInt(c - d)
 21566  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 21567  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21568  			v2.AuxInt = int8ToAuxInt(d)
 21569  			v1.AddArg2(x, v2)
 21570  			v.AddArg2(v0, v1)
 21571  			return true
 21572  		}
 21573  		break
 21574  	}
 21575  	// match: (OrB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
 21576  	// cond: uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)
 21577  	// result: (Less64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
 21578  	for {
 21579  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21580  			if v_0.Op != OpLess64U {
 21581  				continue
 21582  			}
 21583  			x := v_0.Args[1]
 21584  			v_0_0 := v_0.Args[0]
 21585  			if v_0_0.Op != OpConst64 {
 21586  				continue
 21587  			}
 21588  			c := auxIntToInt64(v_0_0.AuxInt)
 21589  			if v_1.Op != OpLeq64U {
 21590  				continue
 21591  			}
 21592  			_ = v_1.Args[1]
 21593  			if x != v_1.Args[0] {
 21594  				continue
 21595  			}
 21596  			v_1_1 := v_1.Args[1]
 21597  			if v_1_1.Op != OpConst64 {
 21598  				continue
 21599  			}
 21600  			d := auxIntToInt64(v_1_1.AuxInt)
 21601  			if !(uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)) {
 21602  				continue
 21603  			}
 21604  			v.reset(OpLess64U)
 21605  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 21606  			v0.AuxInt = int64ToAuxInt(c - d - 1)
 21607  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 21608  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 21609  			v2.AuxInt = int64ToAuxInt(d + 1)
 21610  			v1.AddArg2(x, v2)
 21611  			v.AddArg2(v0, v1)
 21612  			return true
 21613  		}
 21614  		break
 21615  	}
 21616  	// match: (OrB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
 21617  	// cond: uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)
 21618  	// result: (Leq64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
 21619  	for {
 21620  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21621  			if v_0.Op != OpLeq64U {
 21622  				continue
 21623  			}
 21624  			x := v_0.Args[1]
 21625  			v_0_0 := v_0.Args[0]
 21626  			if v_0_0.Op != OpConst64 {
 21627  				continue
 21628  			}
 21629  			c := auxIntToInt64(v_0_0.AuxInt)
 21630  			if v_1.Op != OpLeq64U {
 21631  				continue
 21632  			}
 21633  			_ = v_1.Args[1]
 21634  			if x != v_1.Args[0] {
 21635  				continue
 21636  			}
 21637  			v_1_1 := v_1.Args[1]
 21638  			if v_1_1.Op != OpConst64 {
 21639  				continue
 21640  			}
 21641  			d := auxIntToInt64(v_1_1.AuxInt)
 21642  			if !(uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)) {
 21643  				continue
 21644  			}
 21645  			v.reset(OpLeq64U)
 21646  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 21647  			v0.AuxInt = int64ToAuxInt(c - d - 1)
 21648  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 21649  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 21650  			v2.AuxInt = int64ToAuxInt(d + 1)
 21651  			v1.AddArg2(x, v2)
 21652  			v.AddArg2(v0, v1)
 21653  			return true
 21654  		}
 21655  		break
 21656  	}
 21657  	// match: (OrB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
 21658  	// cond: uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)
 21659  	// result: (Less32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
 21660  	for {
 21661  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21662  			if v_0.Op != OpLess32U {
 21663  				continue
 21664  			}
 21665  			x := v_0.Args[1]
 21666  			v_0_0 := v_0.Args[0]
 21667  			if v_0_0.Op != OpConst32 {
 21668  				continue
 21669  			}
 21670  			c := auxIntToInt32(v_0_0.AuxInt)
 21671  			if v_1.Op != OpLeq32U {
 21672  				continue
 21673  			}
 21674  			_ = v_1.Args[1]
 21675  			if x != v_1.Args[0] {
 21676  				continue
 21677  			}
 21678  			v_1_1 := v_1.Args[1]
 21679  			if v_1_1.Op != OpConst32 {
 21680  				continue
 21681  			}
 21682  			d := auxIntToInt32(v_1_1.AuxInt)
 21683  			if !(uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)) {
 21684  				continue
 21685  			}
 21686  			v.reset(OpLess32U)
 21687  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21688  			v0.AuxInt = int32ToAuxInt(c - d - 1)
 21689  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 21690  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21691  			v2.AuxInt = int32ToAuxInt(d + 1)
 21692  			v1.AddArg2(x, v2)
 21693  			v.AddArg2(v0, v1)
 21694  			return true
 21695  		}
 21696  		break
 21697  	}
 21698  	// match: (OrB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
 21699  	// cond: uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)
 21700  	// result: (Leq32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
 21701  	for {
 21702  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21703  			if v_0.Op != OpLeq32U {
 21704  				continue
 21705  			}
 21706  			x := v_0.Args[1]
 21707  			v_0_0 := v_0.Args[0]
 21708  			if v_0_0.Op != OpConst32 {
 21709  				continue
 21710  			}
 21711  			c := auxIntToInt32(v_0_0.AuxInt)
 21712  			if v_1.Op != OpLeq32U {
 21713  				continue
 21714  			}
 21715  			_ = v_1.Args[1]
 21716  			if x != v_1.Args[0] {
 21717  				continue
 21718  			}
 21719  			v_1_1 := v_1.Args[1]
 21720  			if v_1_1.Op != OpConst32 {
 21721  				continue
 21722  			}
 21723  			d := auxIntToInt32(v_1_1.AuxInt)
 21724  			if !(uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)) {
 21725  				continue
 21726  			}
 21727  			v.reset(OpLeq32U)
 21728  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21729  			v0.AuxInt = int32ToAuxInt(c - d - 1)
 21730  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 21731  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 21732  			v2.AuxInt = int32ToAuxInt(d + 1)
 21733  			v1.AddArg2(x, v2)
 21734  			v.AddArg2(v0, v1)
 21735  			return true
 21736  		}
 21737  		break
 21738  	}
 21739  	// match: (OrB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
 21740  	// cond: uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)
 21741  	// result: (Less16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
 21742  	for {
 21743  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21744  			if v_0.Op != OpLess16U {
 21745  				continue
 21746  			}
 21747  			x := v_0.Args[1]
 21748  			v_0_0 := v_0.Args[0]
 21749  			if v_0_0.Op != OpConst16 {
 21750  				continue
 21751  			}
 21752  			c := auxIntToInt16(v_0_0.AuxInt)
 21753  			if v_1.Op != OpLeq16U {
 21754  				continue
 21755  			}
 21756  			_ = v_1.Args[1]
 21757  			if x != v_1.Args[0] {
 21758  				continue
 21759  			}
 21760  			v_1_1 := v_1.Args[1]
 21761  			if v_1_1.Op != OpConst16 {
 21762  				continue
 21763  			}
 21764  			d := auxIntToInt16(v_1_1.AuxInt)
 21765  			if !(uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)) {
 21766  				continue
 21767  			}
 21768  			v.reset(OpLess16U)
 21769  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21770  			v0.AuxInt = int16ToAuxInt(c - d - 1)
 21771  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 21772  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21773  			v2.AuxInt = int16ToAuxInt(d + 1)
 21774  			v1.AddArg2(x, v2)
 21775  			v.AddArg2(v0, v1)
 21776  			return true
 21777  		}
 21778  		break
 21779  	}
 21780  	// match: (OrB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
 21781  	// cond: uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)
 21782  	// result: (Leq16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
 21783  	for {
 21784  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21785  			if v_0.Op != OpLeq16U {
 21786  				continue
 21787  			}
 21788  			x := v_0.Args[1]
 21789  			v_0_0 := v_0.Args[0]
 21790  			if v_0_0.Op != OpConst16 {
 21791  				continue
 21792  			}
 21793  			c := auxIntToInt16(v_0_0.AuxInt)
 21794  			if v_1.Op != OpLeq16U {
 21795  				continue
 21796  			}
 21797  			_ = v_1.Args[1]
 21798  			if x != v_1.Args[0] {
 21799  				continue
 21800  			}
 21801  			v_1_1 := v_1.Args[1]
 21802  			if v_1_1.Op != OpConst16 {
 21803  				continue
 21804  			}
 21805  			d := auxIntToInt16(v_1_1.AuxInt)
 21806  			if !(uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)) {
 21807  				continue
 21808  			}
 21809  			v.reset(OpLeq16U)
 21810  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21811  			v0.AuxInt = int16ToAuxInt(c - d - 1)
 21812  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 21813  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 21814  			v2.AuxInt = int16ToAuxInt(d + 1)
 21815  			v1.AddArg2(x, v2)
 21816  			v.AddArg2(v0, v1)
 21817  			return true
 21818  		}
 21819  		break
 21820  	}
 21821  	// match: (OrB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
 21822  	// cond: uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)
 21823  	// result: (Less8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
 21824  	for {
 21825  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21826  			if v_0.Op != OpLess8U {
 21827  				continue
 21828  			}
 21829  			x := v_0.Args[1]
 21830  			v_0_0 := v_0.Args[0]
 21831  			if v_0_0.Op != OpConst8 {
 21832  				continue
 21833  			}
 21834  			c := auxIntToInt8(v_0_0.AuxInt)
 21835  			if v_1.Op != OpLeq8U {
 21836  				continue
 21837  			}
 21838  			_ = v_1.Args[1]
 21839  			if x != v_1.Args[0] {
 21840  				continue
 21841  			}
 21842  			v_1_1 := v_1.Args[1]
 21843  			if v_1_1.Op != OpConst8 {
 21844  				continue
 21845  			}
 21846  			d := auxIntToInt8(v_1_1.AuxInt)
 21847  			if !(uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)) {
 21848  				continue
 21849  			}
 21850  			v.reset(OpLess8U)
 21851  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21852  			v0.AuxInt = int8ToAuxInt(c - d - 1)
 21853  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 21854  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21855  			v2.AuxInt = int8ToAuxInt(d + 1)
 21856  			v1.AddArg2(x, v2)
 21857  			v.AddArg2(v0, v1)
 21858  			return true
 21859  		}
 21860  		break
 21861  	}
 21862  	// match: (OrB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
 21863  	// cond: uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)
 21864  	// result: (Leq8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
 21865  	for {
 21866  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21867  			if v_0.Op != OpLeq8U {
 21868  				continue
 21869  			}
 21870  			x := v_0.Args[1]
 21871  			v_0_0 := v_0.Args[0]
 21872  			if v_0_0.Op != OpConst8 {
 21873  				continue
 21874  			}
 21875  			c := auxIntToInt8(v_0_0.AuxInt)
 21876  			if v_1.Op != OpLeq8U {
 21877  				continue
 21878  			}
 21879  			_ = v_1.Args[1]
 21880  			if x != v_1.Args[0] {
 21881  				continue
 21882  			}
 21883  			v_1_1 := v_1.Args[1]
 21884  			if v_1_1.Op != OpConst8 {
 21885  				continue
 21886  			}
 21887  			d := auxIntToInt8(v_1_1.AuxInt)
 21888  			if !(uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)) {
 21889  				continue
 21890  			}
 21891  			v.reset(OpLeq8U)
 21892  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21893  			v0.AuxInt = int8ToAuxInt(c - d - 1)
 21894  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 21895  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 21896  			v2.AuxInt = int8ToAuxInt(d + 1)
 21897  			v1.AddArg2(x, v2)
 21898  			v.AddArg2(v0, v1)
 21899  			return true
 21900  		}
 21901  		break
 21902  	}
 21903  	return false
 21904  }
 21905  func rewriteValuegeneric_OpPhi(v *Value) bool {
 21906  	b := v.Block
 21907  	// match: (Phi (Const8 [c]) (Const8 [c]))
 21908  	// result: (Const8 [c])
 21909  	for {
 21910  		if len(v.Args) != 2 {
 21911  			break
 21912  		}
 21913  		_ = v.Args[1]
 21914  		v_0 := v.Args[0]
 21915  		if v_0.Op != OpConst8 {
 21916  			break
 21917  		}
 21918  		c := auxIntToInt8(v_0.AuxInt)
 21919  		v_1 := v.Args[1]
 21920  		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != c {
 21921  			break
 21922  		}
 21923  		v.reset(OpConst8)
 21924  		v.AuxInt = int8ToAuxInt(c)
 21925  		return true
 21926  	}
 21927  	// match: (Phi (Const16 [c]) (Const16 [c]))
 21928  	// result: (Const16 [c])
 21929  	for {
 21930  		if len(v.Args) != 2 {
 21931  			break
 21932  		}
 21933  		_ = v.Args[1]
 21934  		v_0 := v.Args[0]
 21935  		if v_0.Op != OpConst16 {
 21936  			break
 21937  		}
 21938  		c := auxIntToInt16(v_0.AuxInt)
 21939  		v_1 := v.Args[1]
 21940  		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != c {
 21941  			break
 21942  		}
 21943  		v.reset(OpConst16)
 21944  		v.AuxInt = int16ToAuxInt(c)
 21945  		return true
 21946  	}
 21947  	// match: (Phi (Const32 [c]) (Const32 [c]))
 21948  	// result: (Const32 [c])
 21949  	for {
 21950  		if len(v.Args) != 2 {
 21951  			break
 21952  		}
 21953  		_ = v.Args[1]
 21954  		v_0 := v.Args[0]
 21955  		if v_0.Op != OpConst32 {
 21956  			break
 21957  		}
 21958  		c := auxIntToInt32(v_0.AuxInt)
 21959  		v_1 := v.Args[1]
 21960  		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != c {
 21961  			break
 21962  		}
 21963  		v.reset(OpConst32)
 21964  		v.AuxInt = int32ToAuxInt(c)
 21965  		return true
 21966  	}
 21967  	// match: (Phi (Const64 [c]) (Const64 [c]))
 21968  	// result: (Const64 [c])
 21969  	for {
 21970  		if len(v.Args) != 2 {
 21971  			break
 21972  		}
 21973  		_ = v.Args[1]
 21974  		v_0 := v.Args[0]
 21975  		if v_0.Op != OpConst64 {
 21976  			break
 21977  		}
 21978  		c := auxIntToInt64(v_0.AuxInt)
 21979  		v_1 := v.Args[1]
 21980  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c {
 21981  			break
 21982  		}
 21983  		v.reset(OpConst64)
 21984  		v.AuxInt = int64ToAuxInt(c)
 21985  		return true
 21986  	}
 21987  	// match: (Phi <t> nx:(Not x) ny:(Not y))
 21988  	// cond: nx.Uses == 1 && ny.Uses == 1
 21989  	// result: (Not (Phi <t> x y))
 21990  	for {
 21991  		if len(v.Args) != 2 {
 21992  			break
 21993  		}
 21994  		t := v.Type
 21995  		_ = v.Args[1]
 21996  		nx := v.Args[0]
 21997  		if nx.Op != OpNot {
 21998  			break
 21999  		}
 22000  		x := nx.Args[0]
 22001  		ny := v.Args[1]
 22002  		if ny.Op != OpNot {
 22003  			break
 22004  		}
 22005  		y := ny.Args[0]
 22006  		if !(nx.Uses == 1 && ny.Uses == 1) {
 22007  			break
 22008  		}
 22009  		v.reset(OpNot)
 22010  		v0 := b.NewValue0(v.Pos, OpPhi, t)
 22011  		v0.AddArg2(x, y)
 22012  		v.AddArg(v0)
 22013  		return true
 22014  	}
 22015  	return false
 22016  }
 22017  func rewriteValuegeneric_OpPtrIndex(v *Value) bool {
 22018  	v_1 := v.Args[1]
 22019  	v_0 := v.Args[0]
 22020  	b := v.Block
 22021  	config := b.Func.Config
 22022  	typ := &b.Func.Config.Types
 22023  	// match: (PtrIndex <t> ptr idx)
 22024  	// cond: config.PtrSize == 4 && is32Bit(t.Elem().Size())
 22025  	// result: (AddPtr ptr (Mul32 <typ.Int> idx (Const32 <typ.Int> [int32(t.Elem().Size())])))
 22026  	for {
 22027  		t := v.Type
 22028  		ptr := v_0
 22029  		idx := v_1
 22030  		if !(config.PtrSize == 4 && is32Bit(t.Elem().Size())) {
 22031  			break
 22032  		}
 22033  		v.reset(OpAddPtr)
 22034  		v0 := b.NewValue0(v.Pos, OpMul32, typ.Int)
 22035  		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
 22036  		v1.AuxInt = int32ToAuxInt(int32(t.Elem().Size()))
 22037  		v0.AddArg2(idx, v1)
 22038  		v.AddArg2(ptr, v0)
 22039  		return true
 22040  	}
 22041  	// match: (PtrIndex <t> ptr idx)
 22042  	// cond: config.PtrSize == 8
 22043  	// result: (AddPtr ptr (Mul64 <typ.Int> idx (Const64 <typ.Int> [t.Elem().Size()])))
 22044  	for {
 22045  		t := v.Type
 22046  		ptr := v_0
 22047  		idx := v_1
 22048  		if !(config.PtrSize == 8) {
 22049  			break
 22050  		}
 22051  		v.reset(OpAddPtr)
 22052  		v0 := b.NewValue0(v.Pos, OpMul64, typ.Int)
 22053  		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
 22054  		v1.AuxInt = int64ToAuxInt(t.Elem().Size())
 22055  		v0.AddArg2(idx, v1)
 22056  		v.AddArg2(ptr, v0)
 22057  		return true
 22058  	}
 22059  	return false
 22060  }
 22061  func rewriteValuegeneric_OpRotateLeft16(v *Value) bool {
 22062  	v_1 := v.Args[1]
 22063  	v_0 := v.Args[0]
 22064  	b := v.Block
 22065  	config := b.Func.Config
 22066  	// match: (RotateLeft16 x (Const16 [c]))
 22067  	// cond: c%16 == 0
 22068  	// result: x
 22069  	for {
 22070  		x := v_0
 22071  		if v_1.Op != OpConst16 {
 22072  			break
 22073  		}
 22074  		c := auxIntToInt16(v_1.AuxInt)
 22075  		if !(c%16 == 0) {
 22076  			break
 22077  		}
 22078  		v.copyOf(x)
 22079  		return true
 22080  	}
 22081  	// match: (RotateLeft16 x (And64 y (Const64 [c])))
 22082  	// cond: c&15 == 15
 22083  	// result: (RotateLeft16 x y)
 22084  	for {
 22085  		x := v_0
 22086  		if v_1.Op != OpAnd64 {
 22087  			break
 22088  		}
 22089  		_ = v_1.Args[1]
 22090  		v_1_0 := v_1.Args[0]
 22091  		v_1_1 := v_1.Args[1]
 22092  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22093  			y := v_1_0
 22094  			if v_1_1.Op != OpConst64 {
 22095  				continue
 22096  			}
 22097  			c := auxIntToInt64(v_1_1.AuxInt)
 22098  			if !(c&15 == 15) {
 22099  				continue
 22100  			}
 22101  			v.reset(OpRotateLeft16)
 22102  			v.AddArg2(x, y)
 22103  			return true
 22104  		}
 22105  		break
 22106  	}
 22107  	// match: (RotateLeft16 x (And32 y (Const32 [c])))
 22108  	// cond: c&15 == 15
 22109  	// result: (RotateLeft16 x y)
 22110  	for {
 22111  		x := v_0
 22112  		if v_1.Op != OpAnd32 {
 22113  			break
 22114  		}
 22115  		_ = v_1.Args[1]
 22116  		v_1_0 := v_1.Args[0]
 22117  		v_1_1 := v_1.Args[1]
 22118  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22119  			y := v_1_0
 22120  			if v_1_1.Op != OpConst32 {
 22121  				continue
 22122  			}
 22123  			c := auxIntToInt32(v_1_1.AuxInt)
 22124  			if !(c&15 == 15) {
 22125  				continue
 22126  			}
 22127  			v.reset(OpRotateLeft16)
 22128  			v.AddArg2(x, y)
 22129  			return true
 22130  		}
 22131  		break
 22132  	}
 22133  	// match: (RotateLeft16 x (And16 y (Const16 [c])))
 22134  	// cond: c&15 == 15
 22135  	// result: (RotateLeft16 x y)
 22136  	for {
 22137  		x := v_0
 22138  		if v_1.Op != OpAnd16 {
 22139  			break
 22140  		}
 22141  		_ = v_1.Args[1]
 22142  		v_1_0 := v_1.Args[0]
 22143  		v_1_1 := v_1.Args[1]
 22144  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22145  			y := v_1_0
 22146  			if v_1_1.Op != OpConst16 {
 22147  				continue
 22148  			}
 22149  			c := auxIntToInt16(v_1_1.AuxInt)
 22150  			if !(c&15 == 15) {
 22151  				continue
 22152  			}
 22153  			v.reset(OpRotateLeft16)
 22154  			v.AddArg2(x, y)
 22155  			return true
 22156  		}
 22157  		break
 22158  	}
 22159  	// match: (RotateLeft16 x (And8 y (Const8 [c])))
 22160  	// cond: c&15 == 15
 22161  	// result: (RotateLeft16 x y)
 22162  	for {
 22163  		x := v_0
 22164  		if v_1.Op != OpAnd8 {
 22165  			break
 22166  		}
 22167  		_ = v_1.Args[1]
 22168  		v_1_0 := v_1.Args[0]
 22169  		v_1_1 := v_1.Args[1]
 22170  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22171  			y := v_1_0
 22172  			if v_1_1.Op != OpConst8 {
 22173  				continue
 22174  			}
 22175  			c := auxIntToInt8(v_1_1.AuxInt)
 22176  			if !(c&15 == 15) {
 22177  				continue
 22178  			}
 22179  			v.reset(OpRotateLeft16)
 22180  			v.AddArg2(x, y)
 22181  			return true
 22182  		}
 22183  		break
 22184  	}
 22185  	// match: (RotateLeft16 x (Neg64 (And64 y (Const64 [c]))))
 22186  	// cond: c&15 == 15
 22187  	// result: (RotateLeft16 x (Neg64 <y.Type> y))
 22188  	for {
 22189  		x := v_0
 22190  		if v_1.Op != OpNeg64 {
 22191  			break
 22192  		}
 22193  		v_1_0 := v_1.Args[0]
 22194  		if v_1_0.Op != OpAnd64 {
 22195  			break
 22196  		}
 22197  		_ = v_1_0.Args[1]
 22198  		v_1_0_0 := v_1_0.Args[0]
 22199  		v_1_0_1 := v_1_0.Args[1]
 22200  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 22201  			y := v_1_0_0
 22202  			if v_1_0_1.Op != OpConst64 {
 22203  				continue
 22204  			}
 22205  			c := auxIntToInt64(v_1_0_1.AuxInt)
 22206  			if !(c&15 == 15) {
 22207  				continue
 22208  			}
 22209  			v.reset(OpRotateLeft16)
 22210  			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 22211  			v0.AddArg(y)
 22212  			v.AddArg2(x, v0)
 22213  			return true
 22214  		}
 22215  		break
 22216  	}
 22217  	// match: (RotateLeft16 x (Neg32 (And32 y (Const32 [c]))))
 22218  	// cond: c&15 == 15
 22219  	// result: (RotateLeft16 x (Neg32 <y.Type> y))
 22220  	for {
 22221  		x := v_0
 22222  		if v_1.Op != OpNeg32 {
 22223  			break
 22224  		}
 22225  		v_1_0 := v_1.Args[0]
 22226  		if v_1_0.Op != OpAnd32 {
 22227  			break
 22228  		}
 22229  		_ = v_1_0.Args[1]
 22230  		v_1_0_0 := v_1_0.Args[0]
 22231  		v_1_0_1 := v_1_0.Args[1]
 22232  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 22233  			y := v_1_0_0
 22234  			if v_1_0_1.Op != OpConst32 {
 22235  				continue
 22236  			}
 22237  			c := auxIntToInt32(v_1_0_1.AuxInt)
 22238  			if !(c&15 == 15) {
 22239  				continue
 22240  			}
 22241  			v.reset(OpRotateLeft16)
 22242  			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 22243  			v0.AddArg(y)
 22244  			v.AddArg2(x, v0)
 22245  			return true
 22246  		}
 22247  		break
 22248  	}
 22249  	// match: (RotateLeft16 x (Neg16 (And16 y (Const16 [c]))))
 22250  	// cond: c&15 == 15
 22251  	// result: (RotateLeft16 x (Neg16 <y.Type> y))
 22252  	for {
 22253  		x := v_0
 22254  		if v_1.Op != OpNeg16 {
 22255  			break
 22256  		}
 22257  		v_1_0 := v_1.Args[0]
 22258  		if v_1_0.Op != OpAnd16 {
 22259  			break
 22260  		}
 22261  		_ = v_1_0.Args[1]
 22262  		v_1_0_0 := v_1_0.Args[0]
 22263  		v_1_0_1 := v_1_0.Args[1]
 22264  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 22265  			y := v_1_0_0
 22266  			if v_1_0_1.Op != OpConst16 {
 22267  				continue
 22268  			}
 22269  			c := auxIntToInt16(v_1_0_1.AuxInt)
 22270  			if !(c&15 == 15) {
 22271  				continue
 22272  			}
 22273  			v.reset(OpRotateLeft16)
 22274  			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 22275  			v0.AddArg(y)
 22276  			v.AddArg2(x, v0)
 22277  			return true
 22278  		}
 22279  		break
 22280  	}
 22281  	// match: (RotateLeft16 x (Neg8 (And8 y (Const8 [c]))))
 22282  	// cond: c&15 == 15
 22283  	// result: (RotateLeft16 x (Neg8 <y.Type> y))
 22284  	for {
 22285  		x := v_0
 22286  		if v_1.Op != OpNeg8 {
 22287  			break
 22288  		}
 22289  		v_1_0 := v_1.Args[0]
 22290  		if v_1_0.Op != OpAnd8 {
 22291  			break
 22292  		}
 22293  		_ = v_1_0.Args[1]
 22294  		v_1_0_0 := v_1_0.Args[0]
 22295  		v_1_0_1 := v_1_0.Args[1]
 22296  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 22297  			y := v_1_0_0
 22298  			if v_1_0_1.Op != OpConst8 {
 22299  				continue
 22300  			}
 22301  			c := auxIntToInt8(v_1_0_1.AuxInt)
 22302  			if !(c&15 == 15) {
 22303  				continue
 22304  			}
 22305  			v.reset(OpRotateLeft16)
 22306  			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 22307  			v0.AddArg(y)
 22308  			v.AddArg2(x, v0)
 22309  			return true
 22310  		}
 22311  		break
 22312  	}
 22313  	// match: (RotateLeft16 x (Add64 y (Const64 [c])))
 22314  	// cond: c&15 == 0
 22315  	// result: (RotateLeft16 x y)
 22316  	for {
 22317  		x := v_0
 22318  		if v_1.Op != OpAdd64 {
 22319  			break
 22320  		}
 22321  		_ = v_1.Args[1]
 22322  		v_1_0 := v_1.Args[0]
 22323  		v_1_1 := v_1.Args[1]
 22324  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22325  			y := v_1_0
 22326  			if v_1_1.Op != OpConst64 {
 22327  				continue
 22328  			}
 22329  			c := auxIntToInt64(v_1_1.AuxInt)
 22330  			if !(c&15 == 0) {
 22331  				continue
 22332  			}
 22333  			v.reset(OpRotateLeft16)
 22334  			v.AddArg2(x, y)
 22335  			return true
 22336  		}
 22337  		break
 22338  	}
 22339  	// match: (RotateLeft16 x (Add32 y (Const32 [c])))
 22340  	// cond: c&15 == 0
 22341  	// result: (RotateLeft16 x y)
 22342  	for {
 22343  		x := v_0
 22344  		if v_1.Op != OpAdd32 {
 22345  			break
 22346  		}
 22347  		_ = v_1.Args[1]
 22348  		v_1_0 := v_1.Args[0]
 22349  		v_1_1 := v_1.Args[1]
 22350  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22351  			y := v_1_0
 22352  			if v_1_1.Op != OpConst32 {
 22353  				continue
 22354  			}
 22355  			c := auxIntToInt32(v_1_1.AuxInt)
 22356  			if !(c&15 == 0) {
 22357  				continue
 22358  			}
 22359  			v.reset(OpRotateLeft16)
 22360  			v.AddArg2(x, y)
 22361  			return true
 22362  		}
 22363  		break
 22364  	}
 22365  	// match: (RotateLeft16 x (Add16 y (Const16 [c])))
 22366  	// cond: c&15 == 0
 22367  	// result: (RotateLeft16 x y)
 22368  	for {
 22369  		x := v_0
 22370  		if v_1.Op != OpAdd16 {
 22371  			break
 22372  		}
 22373  		_ = v_1.Args[1]
 22374  		v_1_0 := v_1.Args[0]
 22375  		v_1_1 := v_1.Args[1]
 22376  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22377  			y := v_1_0
 22378  			if v_1_1.Op != OpConst16 {
 22379  				continue
 22380  			}
 22381  			c := auxIntToInt16(v_1_1.AuxInt)
 22382  			if !(c&15 == 0) {
 22383  				continue
 22384  			}
 22385  			v.reset(OpRotateLeft16)
 22386  			v.AddArg2(x, y)
 22387  			return true
 22388  		}
 22389  		break
 22390  	}
 22391  	// match: (RotateLeft16 x (Add8 y (Const8 [c])))
 22392  	// cond: c&15 == 0
 22393  	// result: (RotateLeft16 x y)
 22394  	for {
 22395  		x := v_0
 22396  		if v_1.Op != OpAdd8 {
 22397  			break
 22398  		}
 22399  		_ = v_1.Args[1]
 22400  		v_1_0 := v_1.Args[0]
 22401  		v_1_1 := v_1.Args[1]
 22402  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22403  			y := v_1_0
 22404  			if v_1_1.Op != OpConst8 {
 22405  				continue
 22406  			}
 22407  			c := auxIntToInt8(v_1_1.AuxInt)
 22408  			if !(c&15 == 0) {
 22409  				continue
 22410  			}
 22411  			v.reset(OpRotateLeft16)
 22412  			v.AddArg2(x, y)
 22413  			return true
 22414  		}
 22415  		break
 22416  	}
 22417  	// match: (RotateLeft16 x (Sub64 (Const64 [c]) y))
 22418  	// cond: c&15 == 0
 22419  	// result: (RotateLeft16 x (Neg64 <y.Type> y))
 22420  	for {
 22421  		x := v_0
 22422  		if v_1.Op != OpSub64 {
 22423  			break
 22424  		}
 22425  		y := v_1.Args[1]
 22426  		v_1_0 := v_1.Args[0]
 22427  		if v_1_0.Op != OpConst64 {
 22428  			break
 22429  		}
 22430  		c := auxIntToInt64(v_1_0.AuxInt)
 22431  		if !(c&15 == 0) {
 22432  			break
 22433  		}
 22434  		v.reset(OpRotateLeft16)
 22435  		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 22436  		v0.AddArg(y)
 22437  		v.AddArg2(x, v0)
 22438  		return true
 22439  	}
 22440  	// match: (RotateLeft16 x (Sub32 (Const32 [c]) y))
 22441  	// cond: c&15 == 0
 22442  	// result: (RotateLeft16 x (Neg32 <y.Type> y))
 22443  	for {
 22444  		x := v_0
 22445  		if v_1.Op != OpSub32 {
 22446  			break
 22447  		}
 22448  		y := v_1.Args[1]
 22449  		v_1_0 := v_1.Args[0]
 22450  		if v_1_0.Op != OpConst32 {
 22451  			break
 22452  		}
 22453  		c := auxIntToInt32(v_1_0.AuxInt)
 22454  		if !(c&15 == 0) {
 22455  			break
 22456  		}
 22457  		v.reset(OpRotateLeft16)
 22458  		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 22459  		v0.AddArg(y)
 22460  		v.AddArg2(x, v0)
 22461  		return true
 22462  	}
 22463  	// match: (RotateLeft16 x (Sub16 (Const16 [c]) y))
 22464  	// cond: c&15 == 0
 22465  	// result: (RotateLeft16 x (Neg16 <y.Type> y))
 22466  	for {
 22467  		x := v_0
 22468  		if v_1.Op != OpSub16 {
 22469  			break
 22470  		}
 22471  		y := v_1.Args[1]
 22472  		v_1_0 := v_1.Args[0]
 22473  		if v_1_0.Op != OpConst16 {
 22474  			break
 22475  		}
 22476  		c := auxIntToInt16(v_1_0.AuxInt)
 22477  		if !(c&15 == 0) {
 22478  			break
 22479  		}
 22480  		v.reset(OpRotateLeft16)
 22481  		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 22482  		v0.AddArg(y)
 22483  		v.AddArg2(x, v0)
 22484  		return true
 22485  	}
 22486  	// match: (RotateLeft16 x (Sub8 (Const8 [c]) y))
 22487  	// cond: c&15 == 0
 22488  	// result: (RotateLeft16 x (Neg8 <y.Type> y))
 22489  	for {
 22490  		x := v_0
 22491  		if v_1.Op != OpSub8 {
 22492  			break
 22493  		}
 22494  		y := v_1.Args[1]
 22495  		v_1_0 := v_1.Args[0]
 22496  		if v_1_0.Op != OpConst8 {
 22497  			break
 22498  		}
 22499  		c := auxIntToInt8(v_1_0.AuxInt)
 22500  		if !(c&15 == 0) {
 22501  			break
 22502  		}
 22503  		v.reset(OpRotateLeft16)
 22504  		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 22505  		v0.AddArg(y)
 22506  		v.AddArg2(x, v0)
 22507  		return true
 22508  	}
 22509  	// match: (RotateLeft16 x (Const64 <t> [c]))
 22510  	// cond: config.PtrSize == 4
 22511  	// result: (RotateLeft16 x (Const32 <t> [int32(c)]))
 22512  	for {
 22513  		x := v_0
 22514  		if v_1.Op != OpConst64 {
 22515  			break
 22516  		}
 22517  		t := v_1.Type
 22518  		c := auxIntToInt64(v_1.AuxInt)
 22519  		if !(config.PtrSize == 4) {
 22520  			break
 22521  		}
 22522  		v.reset(OpRotateLeft16)
 22523  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 22524  		v0.AuxInt = int32ToAuxInt(int32(c))
 22525  		v.AddArg2(x, v0)
 22526  		return true
 22527  	}
 22528  	// match: (RotateLeft16 (RotateLeft16 x c) d)
 22529  	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
 22530  	// result: (RotateLeft16 x (Add64 <c.Type> c d))
 22531  	for {
 22532  		if v_0.Op != OpRotateLeft16 {
 22533  			break
 22534  		}
 22535  		c := v_0.Args[1]
 22536  		x := v_0.Args[0]
 22537  		d := v_1
 22538  		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
 22539  			break
 22540  		}
 22541  		v.reset(OpRotateLeft16)
 22542  		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
 22543  		v0.AddArg2(c, d)
 22544  		v.AddArg2(x, v0)
 22545  		return true
 22546  	}
 22547  	// match: (RotateLeft16 (RotateLeft16 x c) d)
 22548  	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
 22549  	// result: (RotateLeft16 x (Add32 <c.Type> c d))
 22550  	for {
 22551  		if v_0.Op != OpRotateLeft16 {
 22552  			break
 22553  		}
 22554  		c := v_0.Args[1]
 22555  		x := v_0.Args[0]
 22556  		d := v_1
 22557  		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
 22558  			break
 22559  		}
 22560  		v.reset(OpRotateLeft16)
 22561  		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
 22562  		v0.AddArg2(c, d)
 22563  		v.AddArg2(x, v0)
 22564  		return true
 22565  	}
 22566  	// match: (RotateLeft16 (RotateLeft16 x c) d)
 22567  	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
 22568  	// result: (RotateLeft16 x (Add16 <c.Type> c d))
 22569  	for {
 22570  		if v_0.Op != OpRotateLeft16 {
 22571  			break
 22572  		}
 22573  		c := v_0.Args[1]
 22574  		x := v_0.Args[0]
 22575  		d := v_1
 22576  		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
 22577  			break
 22578  		}
 22579  		v.reset(OpRotateLeft16)
 22580  		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
 22581  		v0.AddArg2(c, d)
 22582  		v.AddArg2(x, v0)
 22583  		return true
 22584  	}
 22585  	// match: (RotateLeft16 (RotateLeft16 x c) d)
 22586  	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
 22587  	// result: (RotateLeft16 x (Add8 <c.Type> c d))
 22588  	for {
 22589  		if v_0.Op != OpRotateLeft16 {
 22590  			break
 22591  		}
 22592  		c := v_0.Args[1]
 22593  		x := v_0.Args[0]
 22594  		d := v_1
 22595  		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
 22596  			break
 22597  		}
 22598  		v.reset(OpRotateLeft16)
 22599  		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
 22600  		v0.AddArg2(c, d)
 22601  		v.AddArg2(x, v0)
 22602  		return true
 22603  	}
 22604  	return false
 22605  }
 22606  func rewriteValuegeneric_OpRotateLeft32(v *Value) bool {
 22607  	v_1 := v.Args[1]
 22608  	v_0 := v.Args[0]
 22609  	b := v.Block
 22610  	config := b.Func.Config
 22611  	// match: (RotateLeft32 x (Const32 [c]))
 22612  	// cond: c%32 == 0
 22613  	// result: x
 22614  	for {
 22615  		x := v_0
 22616  		if v_1.Op != OpConst32 {
 22617  			break
 22618  		}
 22619  		c := auxIntToInt32(v_1.AuxInt)
 22620  		if !(c%32 == 0) {
 22621  			break
 22622  		}
 22623  		v.copyOf(x)
 22624  		return true
 22625  	}
 22626  	// match: (RotateLeft32 x (And64 y (Const64 [c])))
 22627  	// cond: c&31 == 31
 22628  	// result: (RotateLeft32 x y)
 22629  	for {
 22630  		x := v_0
 22631  		if v_1.Op != OpAnd64 {
 22632  			break
 22633  		}
 22634  		_ = v_1.Args[1]
 22635  		v_1_0 := v_1.Args[0]
 22636  		v_1_1 := v_1.Args[1]
 22637  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22638  			y := v_1_0
 22639  			if v_1_1.Op != OpConst64 {
 22640  				continue
 22641  			}
 22642  			c := auxIntToInt64(v_1_1.AuxInt)
 22643  			if !(c&31 == 31) {
 22644  				continue
 22645  			}
 22646  			v.reset(OpRotateLeft32)
 22647  			v.AddArg2(x, y)
 22648  			return true
 22649  		}
 22650  		break
 22651  	}
 22652  	// match: (RotateLeft32 x (And32 y (Const32 [c])))
 22653  	// cond: c&31 == 31
 22654  	// result: (RotateLeft32 x y)
 22655  	for {
 22656  		x := v_0
 22657  		if v_1.Op != OpAnd32 {
 22658  			break
 22659  		}
 22660  		_ = v_1.Args[1]
 22661  		v_1_0 := v_1.Args[0]
 22662  		v_1_1 := v_1.Args[1]
 22663  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22664  			y := v_1_0
 22665  			if v_1_1.Op != OpConst32 {
 22666  				continue
 22667  			}
 22668  			c := auxIntToInt32(v_1_1.AuxInt)
 22669  			if !(c&31 == 31) {
 22670  				continue
 22671  			}
 22672  			v.reset(OpRotateLeft32)
 22673  			v.AddArg2(x, y)
 22674  			return true
 22675  		}
 22676  		break
 22677  	}
 22678  	// match: (RotateLeft32 x (And16 y (Const16 [c])))
 22679  	// cond: c&31 == 31
 22680  	// result: (RotateLeft32 x y)
 22681  	for {
 22682  		x := v_0
 22683  		if v_1.Op != OpAnd16 {
 22684  			break
 22685  		}
 22686  		_ = v_1.Args[1]
 22687  		v_1_0 := v_1.Args[0]
 22688  		v_1_1 := v_1.Args[1]
 22689  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22690  			y := v_1_0
 22691  			if v_1_1.Op != OpConst16 {
 22692  				continue
 22693  			}
 22694  			c := auxIntToInt16(v_1_1.AuxInt)
 22695  			if !(c&31 == 31) {
 22696  				continue
 22697  			}
 22698  			v.reset(OpRotateLeft32)
 22699  			v.AddArg2(x, y)
 22700  			return true
 22701  		}
 22702  		break
 22703  	}
 22704  	// match: (RotateLeft32 x (And8 y (Const8 [c])))
 22705  	// cond: c&31 == 31
 22706  	// result: (RotateLeft32 x y)
 22707  	for {
 22708  		x := v_0
 22709  		if v_1.Op != OpAnd8 {
 22710  			break
 22711  		}
 22712  		_ = v_1.Args[1]
 22713  		v_1_0 := v_1.Args[0]
 22714  		v_1_1 := v_1.Args[1]
 22715  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22716  			y := v_1_0
 22717  			if v_1_1.Op != OpConst8 {
 22718  				continue
 22719  			}
 22720  			c := auxIntToInt8(v_1_1.AuxInt)
 22721  			if !(c&31 == 31) {
 22722  				continue
 22723  			}
 22724  			v.reset(OpRotateLeft32)
 22725  			v.AddArg2(x, y)
 22726  			return true
 22727  		}
 22728  		break
 22729  	}
 22730  	// match: (RotateLeft32 x (Neg64 (And64 y (Const64 [c]))))
 22731  	// cond: c&31 == 31
 22732  	// result: (RotateLeft32 x (Neg64 <y.Type> y))
 22733  	for {
 22734  		x := v_0
 22735  		if v_1.Op != OpNeg64 {
 22736  			break
 22737  		}
 22738  		v_1_0 := v_1.Args[0]
 22739  		if v_1_0.Op != OpAnd64 {
 22740  			break
 22741  		}
 22742  		_ = v_1_0.Args[1]
 22743  		v_1_0_0 := v_1_0.Args[0]
 22744  		v_1_0_1 := v_1_0.Args[1]
 22745  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 22746  			y := v_1_0_0
 22747  			if v_1_0_1.Op != OpConst64 {
 22748  				continue
 22749  			}
 22750  			c := auxIntToInt64(v_1_0_1.AuxInt)
 22751  			if !(c&31 == 31) {
 22752  				continue
 22753  			}
 22754  			v.reset(OpRotateLeft32)
 22755  			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 22756  			v0.AddArg(y)
 22757  			v.AddArg2(x, v0)
 22758  			return true
 22759  		}
 22760  		break
 22761  	}
 22762  	// match: (RotateLeft32 x (Neg32 (And32 y (Const32 [c]))))
 22763  	// cond: c&31 == 31
 22764  	// result: (RotateLeft32 x (Neg32 <y.Type> y))
 22765  	for {
 22766  		x := v_0
 22767  		if v_1.Op != OpNeg32 {
 22768  			break
 22769  		}
 22770  		v_1_0 := v_1.Args[0]
 22771  		if v_1_0.Op != OpAnd32 {
 22772  			break
 22773  		}
 22774  		_ = v_1_0.Args[1]
 22775  		v_1_0_0 := v_1_0.Args[0]
 22776  		v_1_0_1 := v_1_0.Args[1]
 22777  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 22778  			y := v_1_0_0
 22779  			if v_1_0_1.Op != OpConst32 {
 22780  				continue
 22781  			}
 22782  			c := auxIntToInt32(v_1_0_1.AuxInt)
 22783  			if !(c&31 == 31) {
 22784  				continue
 22785  			}
 22786  			v.reset(OpRotateLeft32)
 22787  			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 22788  			v0.AddArg(y)
 22789  			v.AddArg2(x, v0)
 22790  			return true
 22791  		}
 22792  		break
 22793  	}
 22794  	// match: (RotateLeft32 x (Neg16 (And16 y (Const16 [c]))))
 22795  	// cond: c&31 == 31
 22796  	// result: (RotateLeft32 x (Neg16 <y.Type> y))
 22797  	for {
 22798  		x := v_0
 22799  		if v_1.Op != OpNeg16 {
 22800  			break
 22801  		}
 22802  		v_1_0 := v_1.Args[0]
 22803  		if v_1_0.Op != OpAnd16 {
 22804  			break
 22805  		}
 22806  		_ = v_1_0.Args[1]
 22807  		v_1_0_0 := v_1_0.Args[0]
 22808  		v_1_0_1 := v_1_0.Args[1]
 22809  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 22810  			y := v_1_0_0
 22811  			if v_1_0_1.Op != OpConst16 {
 22812  				continue
 22813  			}
 22814  			c := auxIntToInt16(v_1_0_1.AuxInt)
 22815  			if !(c&31 == 31) {
 22816  				continue
 22817  			}
 22818  			v.reset(OpRotateLeft32)
 22819  			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 22820  			v0.AddArg(y)
 22821  			v.AddArg2(x, v0)
 22822  			return true
 22823  		}
 22824  		break
 22825  	}
 22826  	// match: (RotateLeft32 x (Neg8 (And8 y (Const8 [c]))))
 22827  	// cond: c&31 == 31
 22828  	// result: (RotateLeft32 x (Neg8 <y.Type> y))
 22829  	for {
 22830  		x := v_0
 22831  		if v_1.Op != OpNeg8 {
 22832  			break
 22833  		}
 22834  		v_1_0 := v_1.Args[0]
 22835  		if v_1_0.Op != OpAnd8 {
 22836  			break
 22837  		}
 22838  		_ = v_1_0.Args[1]
 22839  		v_1_0_0 := v_1_0.Args[0]
 22840  		v_1_0_1 := v_1_0.Args[1]
 22841  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 22842  			y := v_1_0_0
 22843  			if v_1_0_1.Op != OpConst8 {
 22844  				continue
 22845  			}
 22846  			c := auxIntToInt8(v_1_0_1.AuxInt)
 22847  			if !(c&31 == 31) {
 22848  				continue
 22849  			}
 22850  			v.reset(OpRotateLeft32)
 22851  			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 22852  			v0.AddArg(y)
 22853  			v.AddArg2(x, v0)
 22854  			return true
 22855  		}
 22856  		break
 22857  	}
 22858  	// match: (RotateLeft32 x (Add64 y (Const64 [c])))
 22859  	// cond: c&31 == 0
 22860  	// result: (RotateLeft32 x y)
 22861  	for {
 22862  		x := v_0
 22863  		if v_1.Op != OpAdd64 {
 22864  			break
 22865  		}
 22866  		_ = v_1.Args[1]
 22867  		v_1_0 := v_1.Args[0]
 22868  		v_1_1 := v_1.Args[1]
 22869  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22870  			y := v_1_0
 22871  			if v_1_1.Op != OpConst64 {
 22872  				continue
 22873  			}
 22874  			c := auxIntToInt64(v_1_1.AuxInt)
 22875  			if !(c&31 == 0) {
 22876  				continue
 22877  			}
 22878  			v.reset(OpRotateLeft32)
 22879  			v.AddArg2(x, y)
 22880  			return true
 22881  		}
 22882  		break
 22883  	}
 22884  	// match: (RotateLeft32 x (Add32 y (Const32 [c])))
 22885  	// cond: c&31 == 0
 22886  	// result: (RotateLeft32 x y)
 22887  	for {
 22888  		x := v_0
 22889  		if v_1.Op != OpAdd32 {
 22890  			break
 22891  		}
 22892  		_ = v_1.Args[1]
 22893  		v_1_0 := v_1.Args[0]
 22894  		v_1_1 := v_1.Args[1]
 22895  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22896  			y := v_1_0
 22897  			if v_1_1.Op != OpConst32 {
 22898  				continue
 22899  			}
 22900  			c := auxIntToInt32(v_1_1.AuxInt)
 22901  			if !(c&31 == 0) {
 22902  				continue
 22903  			}
 22904  			v.reset(OpRotateLeft32)
 22905  			v.AddArg2(x, y)
 22906  			return true
 22907  		}
 22908  		break
 22909  	}
 22910  	// match: (RotateLeft32 x (Add16 y (Const16 [c])))
 22911  	// cond: c&31 == 0
 22912  	// result: (RotateLeft32 x y)
 22913  	for {
 22914  		x := v_0
 22915  		if v_1.Op != OpAdd16 {
 22916  			break
 22917  		}
 22918  		_ = v_1.Args[1]
 22919  		v_1_0 := v_1.Args[0]
 22920  		v_1_1 := v_1.Args[1]
 22921  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22922  			y := v_1_0
 22923  			if v_1_1.Op != OpConst16 {
 22924  				continue
 22925  			}
 22926  			c := auxIntToInt16(v_1_1.AuxInt)
 22927  			if !(c&31 == 0) {
 22928  				continue
 22929  			}
 22930  			v.reset(OpRotateLeft32)
 22931  			v.AddArg2(x, y)
 22932  			return true
 22933  		}
 22934  		break
 22935  	}
 22936  	// match: (RotateLeft32 x (Add8 y (Const8 [c])))
 22937  	// cond: c&31 == 0
 22938  	// result: (RotateLeft32 x y)
 22939  	for {
 22940  		x := v_0
 22941  		if v_1.Op != OpAdd8 {
 22942  			break
 22943  		}
 22944  		_ = v_1.Args[1]
 22945  		v_1_0 := v_1.Args[0]
 22946  		v_1_1 := v_1.Args[1]
 22947  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 22948  			y := v_1_0
 22949  			if v_1_1.Op != OpConst8 {
 22950  				continue
 22951  			}
 22952  			c := auxIntToInt8(v_1_1.AuxInt)
 22953  			if !(c&31 == 0) {
 22954  				continue
 22955  			}
 22956  			v.reset(OpRotateLeft32)
 22957  			v.AddArg2(x, y)
 22958  			return true
 22959  		}
 22960  		break
 22961  	}
 22962  	// match: (RotateLeft32 x (Sub64 (Const64 [c]) y))
 22963  	// cond: c&31 == 0
 22964  	// result: (RotateLeft32 x (Neg64 <y.Type> y))
 22965  	for {
 22966  		x := v_0
 22967  		if v_1.Op != OpSub64 {
 22968  			break
 22969  		}
 22970  		y := v_1.Args[1]
 22971  		v_1_0 := v_1.Args[0]
 22972  		if v_1_0.Op != OpConst64 {
 22973  			break
 22974  		}
 22975  		c := auxIntToInt64(v_1_0.AuxInt)
 22976  		if !(c&31 == 0) {
 22977  			break
 22978  		}
 22979  		v.reset(OpRotateLeft32)
 22980  		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 22981  		v0.AddArg(y)
 22982  		v.AddArg2(x, v0)
 22983  		return true
 22984  	}
 22985  	// match: (RotateLeft32 x (Sub32 (Const32 [c]) y))
 22986  	// cond: c&31 == 0
 22987  	// result: (RotateLeft32 x (Neg32 <y.Type> y))
 22988  	for {
 22989  		x := v_0
 22990  		if v_1.Op != OpSub32 {
 22991  			break
 22992  		}
 22993  		y := v_1.Args[1]
 22994  		v_1_0 := v_1.Args[0]
 22995  		if v_1_0.Op != OpConst32 {
 22996  			break
 22997  		}
 22998  		c := auxIntToInt32(v_1_0.AuxInt)
 22999  		if !(c&31 == 0) {
 23000  			break
 23001  		}
 23002  		v.reset(OpRotateLeft32)
 23003  		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 23004  		v0.AddArg(y)
 23005  		v.AddArg2(x, v0)
 23006  		return true
 23007  	}
 23008  	// match: (RotateLeft32 x (Sub16 (Const16 [c]) y))
 23009  	// cond: c&31 == 0
 23010  	// result: (RotateLeft32 x (Neg16 <y.Type> y))
 23011  	for {
 23012  		x := v_0
 23013  		if v_1.Op != OpSub16 {
 23014  			break
 23015  		}
 23016  		y := v_1.Args[1]
 23017  		v_1_0 := v_1.Args[0]
 23018  		if v_1_0.Op != OpConst16 {
 23019  			break
 23020  		}
 23021  		c := auxIntToInt16(v_1_0.AuxInt)
 23022  		if !(c&31 == 0) {
 23023  			break
 23024  		}
 23025  		v.reset(OpRotateLeft32)
 23026  		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 23027  		v0.AddArg(y)
 23028  		v.AddArg2(x, v0)
 23029  		return true
 23030  	}
 23031  	// match: (RotateLeft32 x (Sub8 (Const8 [c]) y))
 23032  	// cond: c&31 == 0
 23033  	// result: (RotateLeft32 x (Neg8 <y.Type> y))
 23034  	for {
 23035  		x := v_0
 23036  		if v_1.Op != OpSub8 {
 23037  			break
 23038  		}
 23039  		y := v_1.Args[1]
 23040  		v_1_0 := v_1.Args[0]
 23041  		if v_1_0.Op != OpConst8 {
 23042  			break
 23043  		}
 23044  		c := auxIntToInt8(v_1_0.AuxInt)
 23045  		if !(c&31 == 0) {
 23046  			break
 23047  		}
 23048  		v.reset(OpRotateLeft32)
 23049  		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 23050  		v0.AddArg(y)
 23051  		v.AddArg2(x, v0)
 23052  		return true
 23053  	}
 23054  	// match: (RotateLeft32 x (Const64 <t> [c]))
 23055  	// cond: config.PtrSize == 4
 23056  	// result: (RotateLeft32 x (Const32 <t> [int32(c)]))
 23057  	for {
 23058  		x := v_0
 23059  		if v_1.Op != OpConst64 {
 23060  			break
 23061  		}
 23062  		t := v_1.Type
 23063  		c := auxIntToInt64(v_1.AuxInt)
 23064  		if !(config.PtrSize == 4) {
 23065  			break
 23066  		}
 23067  		v.reset(OpRotateLeft32)
 23068  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 23069  		v0.AuxInt = int32ToAuxInt(int32(c))
 23070  		v.AddArg2(x, v0)
 23071  		return true
 23072  	}
 23073  	// match: (RotateLeft32 (RotateLeft32 x c) d)
 23074  	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
 23075  	// result: (RotateLeft32 x (Add64 <c.Type> c d))
 23076  	for {
 23077  		if v_0.Op != OpRotateLeft32 {
 23078  			break
 23079  		}
 23080  		c := v_0.Args[1]
 23081  		x := v_0.Args[0]
 23082  		d := v_1
 23083  		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
 23084  			break
 23085  		}
 23086  		v.reset(OpRotateLeft32)
 23087  		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
 23088  		v0.AddArg2(c, d)
 23089  		v.AddArg2(x, v0)
 23090  		return true
 23091  	}
 23092  	// match: (RotateLeft32 (RotateLeft32 x c) d)
 23093  	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
 23094  	// result: (RotateLeft32 x (Add32 <c.Type> c d))
 23095  	for {
 23096  		if v_0.Op != OpRotateLeft32 {
 23097  			break
 23098  		}
 23099  		c := v_0.Args[1]
 23100  		x := v_0.Args[0]
 23101  		d := v_1
 23102  		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
 23103  			break
 23104  		}
 23105  		v.reset(OpRotateLeft32)
 23106  		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
 23107  		v0.AddArg2(c, d)
 23108  		v.AddArg2(x, v0)
 23109  		return true
 23110  	}
 23111  	// match: (RotateLeft32 (RotateLeft32 x c) d)
 23112  	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
 23113  	// result: (RotateLeft32 x (Add16 <c.Type> c d))
 23114  	for {
 23115  		if v_0.Op != OpRotateLeft32 {
 23116  			break
 23117  		}
 23118  		c := v_0.Args[1]
 23119  		x := v_0.Args[0]
 23120  		d := v_1
 23121  		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
 23122  			break
 23123  		}
 23124  		v.reset(OpRotateLeft32)
 23125  		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
 23126  		v0.AddArg2(c, d)
 23127  		v.AddArg2(x, v0)
 23128  		return true
 23129  	}
 23130  	// match: (RotateLeft32 (RotateLeft32 x c) d)
 23131  	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
 23132  	// result: (RotateLeft32 x (Add8 <c.Type> c d))
 23133  	for {
 23134  		if v_0.Op != OpRotateLeft32 {
 23135  			break
 23136  		}
 23137  		c := v_0.Args[1]
 23138  		x := v_0.Args[0]
 23139  		d := v_1
 23140  		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
 23141  			break
 23142  		}
 23143  		v.reset(OpRotateLeft32)
 23144  		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
 23145  		v0.AddArg2(c, d)
 23146  		v.AddArg2(x, v0)
 23147  		return true
 23148  	}
 23149  	return false
 23150  }
 23151  func rewriteValuegeneric_OpRotateLeft64(v *Value) bool {
 23152  	v_1 := v.Args[1]
 23153  	v_0 := v.Args[0]
 23154  	b := v.Block
 23155  	config := b.Func.Config
 23156  	// match: (RotateLeft64 x (Const64 [c]))
 23157  	// cond: c%64 == 0
 23158  	// result: x
 23159  	for {
 23160  		x := v_0
 23161  		if v_1.Op != OpConst64 {
 23162  			break
 23163  		}
 23164  		c := auxIntToInt64(v_1.AuxInt)
 23165  		if !(c%64 == 0) {
 23166  			break
 23167  		}
 23168  		v.copyOf(x)
 23169  		return true
 23170  	}
 23171  	// match: (RotateLeft64 x (And64 y (Const64 [c])))
 23172  	// cond: c&63 == 63
 23173  	// result: (RotateLeft64 x y)
 23174  	for {
 23175  		x := v_0
 23176  		if v_1.Op != OpAnd64 {
 23177  			break
 23178  		}
 23179  		_ = v_1.Args[1]
 23180  		v_1_0 := v_1.Args[0]
 23181  		v_1_1 := v_1.Args[1]
 23182  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23183  			y := v_1_0
 23184  			if v_1_1.Op != OpConst64 {
 23185  				continue
 23186  			}
 23187  			c := auxIntToInt64(v_1_1.AuxInt)
 23188  			if !(c&63 == 63) {
 23189  				continue
 23190  			}
 23191  			v.reset(OpRotateLeft64)
 23192  			v.AddArg2(x, y)
 23193  			return true
 23194  		}
 23195  		break
 23196  	}
 23197  	// match: (RotateLeft64 x (And32 y (Const32 [c])))
 23198  	// cond: c&63 == 63
 23199  	// result: (RotateLeft64 x y)
 23200  	for {
 23201  		x := v_0
 23202  		if v_1.Op != OpAnd32 {
 23203  			break
 23204  		}
 23205  		_ = v_1.Args[1]
 23206  		v_1_0 := v_1.Args[0]
 23207  		v_1_1 := v_1.Args[1]
 23208  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23209  			y := v_1_0
 23210  			if v_1_1.Op != OpConst32 {
 23211  				continue
 23212  			}
 23213  			c := auxIntToInt32(v_1_1.AuxInt)
 23214  			if !(c&63 == 63) {
 23215  				continue
 23216  			}
 23217  			v.reset(OpRotateLeft64)
 23218  			v.AddArg2(x, y)
 23219  			return true
 23220  		}
 23221  		break
 23222  	}
 23223  	// match: (RotateLeft64 x (And16 y (Const16 [c])))
 23224  	// cond: c&63 == 63
 23225  	// result: (RotateLeft64 x y)
 23226  	for {
 23227  		x := v_0
 23228  		if v_1.Op != OpAnd16 {
 23229  			break
 23230  		}
 23231  		_ = v_1.Args[1]
 23232  		v_1_0 := v_1.Args[0]
 23233  		v_1_1 := v_1.Args[1]
 23234  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23235  			y := v_1_0
 23236  			if v_1_1.Op != OpConst16 {
 23237  				continue
 23238  			}
 23239  			c := auxIntToInt16(v_1_1.AuxInt)
 23240  			if !(c&63 == 63) {
 23241  				continue
 23242  			}
 23243  			v.reset(OpRotateLeft64)
 23244  			v.AddArg2(x, y)
 23245  			return true
 23246  		}
 23247  		break
 23248  	}
 23249  	// match: (RotateLeft64 x (And8 y (Const8 [c])))
 23250  	// cond: c&63 == 63
 23251  	// result: (RotateLeft64 x y)
 23252  	for {
 23253  		x := v_0
 23254  		if v_1.Op != OpAnd8 {
 23255  			break
 23256  		}
 23257  		_ = v_1.Args[1]
 23258  		v_1_0 := v_1.Args[0]
 23259  		v_1_1 := v_1.Args[1]
 23260  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23261  			y := v_1_0
 23262  			if v_1_1.Op != OpConst8 {
 23263  				continue
 23264  			}
 23265  			c := auxIntToInt8(v_1_1.AuxInt)
 23266  			if !(c&63 == 63) {
 23267  				continue
 23268  			}
 23269  			v.reset(OpRotateLeft64)
 23270  			v.AddArg2(x, y)
 23271  			return true
 23272  		}
 23273  		break
 23274  	}
 23275  	// match: (RotateLeft64 x (Neg64 (And64 y (Const64 [c]))))
 23276  	// cond: c&63 == 63
 23277  	// result: (RotateLeft64 x (Neg64 <y.Type> y))
 23278  	for {
 23279  		x := v_0
 23280  		if v_1.Op != OpNeg64 {
 23281  			break
 23282  		}
 23283  		v_1_0 := v_1.Args[0]
 23284  		if v_1_0.Op != OpAnd64 {
 23285  			break
 23286  		}
 23287  		_ = v_1_0.Args[1]
 23288  		v_1_0_0 := v_1_0.Args[0]
 23289  		v_1_0_1 := v_1_0.Args[1]
 23290  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 23291  			y := v_1_0_0
 23292  			if v_1_0_1.Op != OpConst64 {
 23293  				continue
 23294  			}
 23295  			c := auxIntToInt64(v_1_0_1.AuxInt)
 23296  			if !(c&63 == 63) {
 23297  				continue
 23298  			}
 23299  			v.reset(OpRotateLeft64)
 23300  			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 23301  			v0.AddArg(y)
 23302  			v.AddArg2(x, v0)
 23303  			return true
 23304  		}
 23305  		break
 23306  	}
 23307  	// match: (RotateLeft64 x (Neg32 (And32 y (Const32 [c]))))
 23308  	// cond: c&63 == 63
 23309  	// result: (RotateLeft64 x (Neg32 <y.Type> y))
 23310  	for {
 23311  		x := v_0
 23312  		if v_1.Op != OpNeg32 {
 23313  			break
 23314  		}
 23315  		v_1_0 := v_1.Args[0]
 23316  		if v_1_0.Op != OpAnd32 {
 23317  			break
 23318  		}
 23319  		_ = v_1_0.Args[1]
 23320  		v_1_0_0 := v_1_0.Args[0]
 23321  		v_1_0_1 := v_1_0.Args[1]
 23322  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 23323  			y := v_1_0_0
 23324  			if v_1_0_1.Op != OpConst32 {
 23325  				continue
 23326  			}
 23327  			c := auxIntToInt32(v_1_0_1.AuxInt)
 23328  			if !(c&63 == 63) {
 23329  				continue
 23330  			}
 23331  			v.reset(OpRotateLeft64)
 23332  			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 23333  			v0.AddArg(y)
 23334  			v.AddArg2(x, v0)
 23335  			return true
 23336  		}
 23337  		break
 23338  	}
 23339  	// match: (RotateLeft64 x (Neg16 (And16 y (Const16 [c]))))
 23340  	// cond: c&63 == 63
 23341  	// result: (RotateLeft64 x (Neg16 <y.Type> y))
 23342  	for {
 23343  		x := v_0
 23344  		if v_1.Op != OpNeg16 {
 23345  			break
 23346  		}
 23347  		v_1_0 := v_1.Args[0]
 23348  		if v_1_0.Op != OpAnd16 {
 23349  			break
 23350  		}
 23351  		_ = v_1_0.Args[1]
 23352  		v_1_0_0 := v_1_0.Args[0]
 23353  		v_1_0_1 := v_1_0.Args[1]
 23354  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 23355  			y := v_1_0_0
 23356  			if v_1_0_1.Op != OpConst16 {
 23357  				continue
 23358  			}
 23359  			c := auxIntToInt16(v_1_0_1.AuxInt)
 23360  			if !(c&63 == 63) {
 23361  				continue
 23362  			}
 23363  			v.reset(OpRotateLeft64)
 23364  			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 23365  			v0.AddArg(y)
 23366  			v.AddArg2(x, v0)
 23367  			return true
 23368  		}
 23369  		break
 23370  	}
 23371  	// match: (RotateLeft64 x (Neg8 (And8 y (Const8 [c]))))
 23372  	// cond: c&63 == 63
 23373  	// result: (RotateLeft64 x (Neg8 <y.Type> y))
 23374  	for {
 23375  		x := v_0
 23376  		if v_1.Op != OpNeg8 {
 23377  			break
 23378  		}
 23379  		v_1_0 := v_1.Args[0]
 23380  		if v_1_0.Op != OpAnd8 {
 23381  			break
 23382  		}
 23383  		_ = v_1_0.Args[1]
 23384  		v_1_0_0 := v_1_0.Args[0]
 23385  		v_1_0_1 := v_1_0.Args[1]
 23386  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 23387  			y := v_1_0_0
 23388  			if v_1_0_1.Op != OpConst8 {
 23389  				continue
 23390  			}
 23391  			c := auxIntToInt8(v_1_0_1.AuxInt)
 23392  			if !(c&63 == 63) {
 23393  				continue
 23394  			}
 23395  			v.reset(OpRotateLeft64)
 23396  			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 23397  			v0.AddArg(y)
 23398  			v.AddArg2(x, v0)
 23399  			return true
 23400  		}
 23401  		break
 23402  	}
 23403  	// match: (RotateLeft64 x (Add64 y (Const64 [c])))
 23404  	// cond: c&63 == 0
 23405  	// result: (RotateLeft64 x y)
 23406  	for {
 23407  		x := v_0
 23408  		if v_1.Op != OpAdd64 {
 23409  			break
 23410  		}
 23411  		_ = v_1.Args[1]
 23412  		v_1_0 := v_1.Args[0]
 23413  		v_1_1 := v_1.Args[1]
 23414  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23415  			y := v_1_0
 23416  			if v_1_1.Op != OpConst64 {
 23417  				continue
 23418  			}
 23419  			c := auxIntToInt64(v_1_1.AuxInt)
 23420  			if !(c&63 == 0) {
 23421  				continue
 23422  			}
 23423  			v.reset(OpRotateLeft64)
 23424  			v.AddArg2(x, y)
 23425  			return true
 23426  		}
 23427  		break
 23428  	}
 23429  	// match: (RotateLeft64 x (Add32 y (Const32 [c])))
 23430  	// cond: c&63 == 0
 23431  	// result: (RotateLeft64 x y)
 23432  	for {
 23433  		x := v_0
 23434  		if v_1.Op != OpAdd32 {
 23435  			break
 23436  		}
 23437  		_ = v_1.Args[1]
 23438  		v_1_0 := v_1.Args[0]
 23439  		v_1_1 := v_1.Args[1]
 23440  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23441  			y := v_1_0
 23442  			if v_1_1.Op != OpConst32 {
 23443  				continue
 23444  			}
 23445  			c := auxIntToInt32(v_1_1.AuxInt)
 23446  			if !(c&63 == 0) {
 23447  				continue
 23448  			}
 23449  			v.reset(OpRotateLeft64)
 23450  			v.AddArg2(x, y)
 23451  			return true
 23452  		}
 23453  		break
 23454  	}
 23455  	// match: (RotateLeft64 x (Add16 y (Const16 [c])))
 23456  	// cond: c&63 == 0
 23457  	// result: (RotateLeft64 x y)
 23458  	for {
 23459  		x := v_0
 23460  		if v_1.Op != OpAdd16 {
 23461  			break
 23462  		}
 23463  		_ = v_1.Args[1]
 23464  		v_1_0 := v_1.Args[0]
 23465  		v_1_1 := v_1.Args[1]
 23466  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23467  			y := v_1_0
 23468  			if v_1_1.Op != OpConst16 {
 23469  				continue
 23470  			}
 23471  			c := auxIntToInt16(v_1_1.AuxInt)
 23472  			if !(c&63 == 0) {
 23473  				continue
 23474  			}
 23475  			v.reset(OpRotateLeft64)
 23476  			v.AddArg2(x, y)
 23477  			return true
 23478  		}
 23479  		break
 23480  	}
 23481  	// match: (RotateLeft64 x (Add8 y (Const8 [c])))
 23482  	// cond: c&63 == 0
 23483  	// result: (RotateLeft64 x y)
 23484  	for {
 23485  		x := v_0
 23486  		if v_1.Op != OpAdd8 {
 23487  			break
 23488  		}
 23489  		_ = v_1.Args[1]
 23490  		v_1_0 := v_1.Args[0]
 23491  		v_1_1 := v_1.Args[1]
 23492  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23493  			y := v_1_0
 23494  			if v_1_1.Op != OpConst8 {
 23495  				continue
 23496  			}
 23497  			c := auxIntToInt8(v_1_1.AuxInt)
 23498  			if !(c&63 == 0) {
 23499  				continue
 23500  			}
 23501  			v.reset(OpRotateLeft64)
 23502  			v.AddArg2(x, y)
 23503  			return true
 23504  		}
 23505  		break
 23506  	}
 23507  	// match: (RotateLeft64 x (Sub64 (Const64 [c]) y))
 23508  	// cond: c&63 == 0
 23509  	// result: (RotateLeft64 x (Neg64 <y.Type> y))
 23510  	for {
 23511  		x := v_0
 23512  		if v_1.Op != OpSub64 {
 23513  			break
 23514  		}
 23515  		y := v_1.Args[1]
 23516  		v_1_0 := v_1.Args[0]
 23517  		if v_1_0.Op != OpConst64 {
 23518  			break
 23519  		}
 23520  		c := auxIntToInt64(v_1_0.AuxInt)
 23521  		if !(c&63 == 0) {
 23522  			break
 23523  		}
 23524  		v.reset(OpRotateLeft64)
 23525  		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 23526  		v0.AddArg(y)
 23527  		v.AddArg2(x, v0)
 23528  		return true
 23529  	}
 23530  	// match: (RotateLeft64 x (Sub32 (Const32 [c]) y))
 23531  	// cond: c&63 == 0
 23532  	// result: (RotateLeft64 x (Neg32 <y.Type> y))
 23533  	for {
 23534  		x := v_0
 23535  		if v_1.Op != OpSub32 {
 23536  			break
 23537  		}
 23538  		y := v_1.Args[1]
 23539  		v_1_0 := v_1.Args[0]
 23540  		if v_1_0.Op != OpConst32 {
 23541  			break
 23542  		}
 23543  		c := auxIntToInt32(v_1_0.AuxInt)
 23544  		if !(c&63 == 0) {
 23545  			break
 23546  		}
 23547  		v.reset(OpRotateLeft64)
 23548  		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 23549  		v0.AddArg(y)
 23550  		v.AddArg2(x, v0)
 23551  		return true
 23552  	}
 23553  	// match: (RotateLeft64 x (Sub16 (Const16 [c]) y))
 23554  	// cond: c&63 == 0
 23555  	// result: (RotateLeft64 x (Neg16 <y.Type> y))
 23556  	for {
 23557  		x := v_0
 23558  		if v_1.Op != OpSub16 {
 23559  			break
 23560  		}
 23561  		y := v_1.Args[1]
 23562  		v_1_0 := v_1.Args[0]
 23563  		if v_1_0.Op != OpConst16 {
 23564  			break
 23565  		}
 23566  		c := auxIntToInt16(v_1_0.AuxInt)
 23567  		if !(c&63 == 0) {
 23568  			break
 23569  		}
 23570  		v.reset(OpRotateLeft64)
 23571  		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 23572  		v0.AddArg(y)
 23573  		v.AddArg2(x, v0)
 23574  		return true
 23575  	}
 23576  	// match: (RotateLeft64 x (Sub8 (Const8 [c]) y))
 23577  	// cond: c&63 == 0
 23578  	// result: (RotateLeft64 x (Neg8 <y.Type> y))
 23579  	for {
 23580  		x := v_0
 23581  		if v_1.Op != OpSub8 {
 23582  			break
 23583  		}
 23584  		y := v_1.Args[1]
 23585  		v_1_0 := v_1.Args[0]
 23586  		if v_1_0.Op != OpConst8 {
 23587  			break
 23588  		}
 23589  		c := auxIntToInt8(v_1_0.AuxInt)
 23590  		if !(c&63 == 0) {
 23591  			break
 23592  		}
 23593  		v.reset(OpRotateLeft64)
 23594  		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 23595  		v0.AddArg(y)
 23596  		v.AddArg2(x, v0)
 23597  		return true
 23598  	}
 23599  	// match: (RotateLeft64 x (Const64 <t> [c]))
 23600  	// cond: config.PtrSize == 4
 23601  	// result: (RotateLeft64 x (Const32 <t> [int32(c)]))
 23602  	for {
 23603  		x := v_0
 23604  		if v_1.Op != OpConst64 {
 23605  			break
 23606  		}
 23607  		t := v_1.Type
 23608  		c := auxIntToInt64(v_1.AuxInt)
 23609  		if !(config.PtrSize == 4) {
 23610  			break
 23611  		}
 23612  		v.reset(OpRotateLeft64)
 23613  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 23614  		v0.AuxInt = int32ToAuxInt(int32(c))
 23615  		v.AddArg2(x, v0)
 23616  		return true
 23617  	}
 23618  	// match: (RotateLeft64 (RotateLeft64 x c) d)
 23619  	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
 23620  	// result: (RotateLeft64 x (Add64 <c.Type> c d))
 23621  	for {
 23622  		if v_0.Op != OpRotateLeft64 {
 23623  			break
 23624  		}
 23625  		c := v_0.Args[1]
 23626  		x := v_0.Args[0]
 23627  		d := v_1
 23628  		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
 23629  			break
 23630  		}
 23631  		v.reset(OpRotateLeft64)
 23632  		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
 23633  		v0.AddArg2(c, d)
 23634  		v.AddArg2(x, v0)
 23635  		return true
 23636  	}
 23637  	// match: (RotateLeft64 (RotateLeft64 x c) d)
 23638  	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
 23639  	// result: (RotateLeft64 x (Add32 <c.Type> c d))
 23640  	for {
 23641  		if v_0.Op != OpRotateLeft64 {
 23642  			break
 23643  		}
 23644  		c := v_0.Args[1]
 23645  		x := v_0.Args[0]
 23646  		d := v_1
 23647  		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
 23648  			break
 23649  		}
 23650  		v.reset(OpRotateLeft64)
 23651  		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
 23652  		v0.AddArg2(c, d)
 23653  		v.AddArg2(x, v0)
 23654  		return true
 23655  	}
 23656  	// match: (RotateLeft64 (RotateLeft64 x c) d)
 23657  	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
 23658  	// result: (RotateLeft64 x (Add16 <c.Type> c d))
 23659  	for {
 23660  		if v_0.Op != OpRotateLeft64 {
 23661  			break
 23662  		}
 23663  		c := v_0.Args[1]
 23664  		x := v_0.Args[0]
 23665  		d := v_1
 23666  		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
 23667  			break
 23668  		}
 23669  		v.reset(OpRotateLeft64)
 23670  		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
 23671  		v0.AddArg2(c, d)
 23672  		v.AddArg2(x, v0)
 23673  		return true
 23674  	}
 23675  	// match: (RotateLeft64 (RotateLeft64 x c) d)
 23676  	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
 23677  	// result: (RotateLeft64 x (Add8 <c.Type> c d))
 23678  	for {
 23679  		if v_0.Op != OpRotateLeft64 {
 23680  			break
 23681  		}
 23682  		c := v_0.Args[1]
 23683  		x := v_0.Args[0]
 23684  		d := v_1
 23685  		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
 23686  			break
 23687  		}
 23688  		v.reset(OpRotateLeft64)
 23689  		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
 23690  		v0.AddArg2(c, d)
 23691  		v.AddArg2(x, v0)
 23692  		return true
 23693  	}
 23694  	return false
 23695  }
 23696  func rewriteValuegeneric_OpRotateLeft8(v *Value) bool {
 23697  	v_1 := v.Args[1]
 23698  	v_0 := v.Args[0]
 23699  	b := v.Block
 23700  	config := b.Func.Config
 23701  	// match: (RotateLeft8 x (Const8 [c]))
 23702  	// cond: c%8 == 0
 23703  	// result: x
 23704  	for {
 23705  		x := v_0
 23706  		if v_1.Op != OpConst8 {
 23707  			break
 23708  		}
 23709  		c := auxIntToInt8(v_1.AuxInt)
 23710  		if !(c%8 == 0) {
 23711  			break
 23712  		}
 23713  		v.copyOf(x)
 23714  		return true
 23715  	}
 23716  	// match: (RotateLeft8 x (And64 y (Const64 [c])))
 23717  	// cond: c&7 == 7
 23718  	// result: (RotateLeft8 x y)
 23719  	for {
 23720  		x := v_0
 23721  		if v_1.Op != OpAnd64 {
 23722  			break
 23723  		}
 23724  		_ = v_1.Args[1]
 23725  		v_1_0 := v_1.Args[0]
 23726  		v_1_1 := v_1.Args[1]
 23727  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23728  			y := v_1_0
 23729  			if v_1_1.Op != OpConst64 {
 23730  				continue
 23731  			}
 23732  			c := auxIntToInt64(v_1_1.AuxInt)
 23733  			if !(c&7 == 7) {
 23734  				continue
 23735  			}
 23736  			v.reset(OpRotateLeft8)
 23737  			v.AddArg2(x, y)
 23738  			return true
 23739  		}
 23740  		break
 23741  	}
 23742  	// match: (RotateLeft8 x (And32 y (Const32 [c])))
 23743  	// cond: c&7 == 7
 23744  	// result: (RotateLeft8 x y)
 23745  	for {
 23746  		x := v_0
 23747  		if v_1.Op != OpAnd32 {
 23748  			break
 23749  		}
 23750  		_ = v_1.Args[1]
 23751  		v_1_0 := v_1.Args[0]
 23752  		v_1_1 := v_1.Args[1]
 23753  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23754  			y := v_1_0
 23755  			if v_1_1.Op != OpConst32 {
 23756  				continue
 23757  			}
 23758  			c := auxIntToInt32(v_1_1.AuxInt)
 23759  			if !(c&7 == 7) {
 23760  				continue
 23761  			}
 23762  			v.reset(OpRotateLeft8)
 23763  			v.AddArg2(x, y)
 23764  			return true
 23765  		}
 23766  		break
 23767  	}
 23768  	// match: (RotateLeft8 x (And16 y (Const16 [c])))
 23769  	// cond: c&7 == 7
 23770  	// result: (RotateLeft8 x y)
 23771  	for {
 23772  		x := v_0
 23773  		if v_1.Op != OpAnd16 {
 23774  			break
 23775  		}
 23776  		_ = v_1.Args[1]
 23777  		v_1_0 := v_1.Args[0]
 23778  		v_1_1 := v_1.Args[1]
 23779  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23780  			y := v_1_0
 23781  			if v_1_1.Op != OpConst16 {
 23782  				continue
 23783  			}
 23784  			c := auxIntToInt16(v_1_1.AuxInt)
 23785  			if !(c&7 == 7) {
 23786  				continue
 23787  			}
 23788  			v.reset(OpRotateLeft8)
 23789  			v.AddArg2(x, y)
 23790  			return true
 23791  		}
 23792  		break
 23793  	}
 23794  	// match: (RotateLeft8 x (And8 y (Const8 [c])))
 23795  	// cond: c&7 == 7
 23796  	// result: (RotateLeft8 x y)
 23797  	for {
 23798  		x := v_0
 23799  		if v_1.Op != OpAnd8 {
 23800  			break
 23801  		}
 23802  		_ = v_1.Args[1]
 23803  		v_1_0 := v_1.Args[0]
 23804  		v_1_1 := v_1.Args[1]
 23805  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23806  			y := v_1_0
 23807  			if v_1_1.Op != OpConst8 {
 23808  				continue
 23809  			}
 23810  			c := auxIntToInt8(v_1_1.AuxInt)
 23811  			if !(c&7 == 7) {
 23812  				continue
 23813  			}
 23814  			v.reset(OpRotateLeft8)
 23815  			v.AddArg2(x, y)
 23816  			return true
 23817  		}
 23818  		break
 23819  	}
 23820  	// match: (RotateLeft8 x (Neg64 (And64 y (Const64 [c]))))
 23821  	// cond: c&7 == 7
 23822  	// result: (RotateLeft8 x (Neg64 <y.Type> y))
 23823  	for {
 23824  		x := v_0
 23825  		if v_1.Op != OpNeg64 {
 23826  			break
 23827  		}
 23828  		v_1_0 := v_1.Args[0]
 23829  		if v_1_0.Op != OpAnd64 {
 23830  			break
 23831  		}
 23832  		_ = v_1_0.Args[1]
 23833  		v_1_0_0 := v_1_0.Args[0]
 23834  		v_1_0_1 := v_1_0.Args[1]
 23835  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 23836  			y := v_1_0_0
 23837  			if v_1_0_1.Op != OpConst64 {
 23838  				continue
 23839  			}
 23840  			c := auxIntToInt64(v_1_0_1.AuxInt)
 23841  			if !(c&7 == 7) {
 23842  				continue
 23843  			}
 23844  			v.reset(OpRotateLeft8)
 23845  			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 23846  			v0.AddArg(y)
 23847  			v.AddArg2(x, v0)
 23848  			return true
 23849  		}
 23850  		break
 23851  	}
 23852  	// match: (RotateLeft8 x (Neg32 (And32 y (Const32 [c]))))
 23853  	// cond: c&7 == 7
 23854  	// result: (RotateLeft8 x (Neg32 <y.Type> y))
 23855  	for {
 23856  		x := v_0
 23857  		if v_1.Op != OpNeg32 {
 23858  			break
 23859  		}
 23860  		v_1_0 := v_1.Args[0]
 23861  		if v_1_0.Op != OpAnd32 {
 23862  			break
 23863  		}
 23864  		_ = v_1_0.Args[1]
 23865  		v_1_0_0 := v_1_0.Args[0]
 23866  		v_1_0_1 := v_1_0.Args[1]
 23867  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 23868  			y := v_1_0_0
 23869  			if v_1_0_1.Op != OpConst32 {
 23870  				continue
 23871  			}
 23872  			c := auxIntToInt32(v_1_0_1.AuxInt)
 23873  			if !(c&7 == 7) {
 23874  				continue
 23875  			}
 23876  			v.reset(OpRotateLeft8)
 23877  			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 23878  			v0.AddArg(y)
 23879  			v.AddArg2(x, v0)
 23880  			return true
 23881  		}
 23882  		break
 23883  	}
 23884  	// match: (RotateLeft8 x (Neg16 (And16 y (Const16 [c]))))
 23885  	// cond: c&7 == 7
 23886  	// result: (RotateLeft8 x (Neg16 <y.Type> y))
 23887  	for {
 23888  		x := v_0
 23889  		if v_1.Op != OpNeg16 {
 23890  			break
 23891  		}
 23892  		v_1_0 := v_1.Args[0]
 23893  		if v_1_0.Op != OpAnd16 {
 23894  			break
 23895  		}
 23896  		_ = v_1_0.Args[1]
 23897  		v_1_0_0 := v_1_0.Args[0]
 23898  		v_1_0_1 := v_1_0.Args[1]
 23899  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 23900  			y := v_1_0_0
 23901  			if v_1_0_1.Op != OpConst16 {
 23902  				continue
 23903  			}
 23904  			c := auxIntToInt16(v_1_0_1.AuxInt)
 23905  			if !(c&7 == 7) {
 23906  				continue
 23907  			}
 23908  			v.reset(OpRotateLeft8)
 23909  			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 23910  			v0.AddArg(y)
 23911  			v.AddArg2(x, v0)
 23912  			return true
 23913  		}
 23914  		break
 23915  	}
 23916  	// match: (RotateLeft8 x (Neg8 (And8 y (Const8 [c]))))
 23917  	// cond: c&7 == 7
 23918  	// result: (RotateLeft8 x (Neg8 <y.Type> y))
 23919  	for {
 23920  		x := v_0
 23921  		if v_1.Op != OpNeg8 {
 23922  			break
 23923  		}
 23924  		v_1_0 := v_1.Args[0]
 23925  		if v_1_0.Op != OpAnd8 {
 23926  			break
 23927  		}
 23928  		_ = v_1_0.Args[1]
 23929  		v_1_0_0 := v_1_0.Args[0]
 23930  		v_1_0_1 := v_1_0.Args[1]
 23931  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 23932  			y := v_1_0_0
 23933  			if v_1_0_1.Op != OpConst8 {
 23934  				continue
 23935  			}
 23936  			c := auxIntToInt8(v_1_0_1.AuxInt)
 23937  			if !(c&7 == 7) {
 23938  				continue
 23939  			}
 23940  			v.reset(OpRotateLeft8)
 23941  			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 23942  			v0.AddArg(y)
 23943  			v.AddArg2(x, v0)
 23944  			return true
 23945  		}
 23946  		break
 23947  	}
 23948  	// match: (RotateLeft8 x (Add64 y (Const64 [c])))
 23949  	// cond: c&7 == 0
 23950  	// result: (RotateLeft8 x y)
 23951  	for {
 23952  		x := v_0
 23953  		if v_1.Op != OpAdd64 {
 23954  			break
 23955  		}
 23956  		_ = v_1.Args[1]
 23957  		v_1_0 := v_1.Args[0]
 23958  		v_1_1 := v_1.Args[1]
 23959  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23960  			y := v_1_0
 23961  			if v_1_1.Op != OpConst64 {
 23962  				continue
 23963  			}
 23964  			c := auxIntToInt64(v_1_1.AuxInt)
 23965  			if !(c&7 == 0) {
 23966  				continue
 23967  			}
 23968  			v.reset(OpRotateLeft8)
 23969  			v.AddArg2(x, y)
 23970  			return true
 23971  		}
 23972  		break
 23973  	}
 23974  	// match: (RotateLeft8 x (Add32 y (Const32 [c])))
 23975  	// cond: c&7 == 0
 23976  	// result: (RotateLeft8 x y)
 23977  	for {
 23978  		x := v_0
 23979  		if v_1.Op != OpAdd32 {
 23980  			break
 23981  		}
 23982  		_ = v_1.Args[1]
 23983  		v_1_0 := v_1.Args[0]
 23984  		v_1_1 := v_1.Args[1]
 23985  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 23986  			y := v_1_0
 23987  			if v_1_1.Op != OpConst32 {
 23988  				continue
 23989  			}
 23990  			c := auxIntToInt32(v_1_1.AuxInt)
 23991  			if !(c&7 == 0) {
 23992  				continue
 23993  			}
 23994  			v.reset(OpRotateLeft8)
 23995  			v.AddArg2(x, y)
 23996  			return true
 23997  		}
 23998  		break
 23999  	}
 24000  	// match: (RotateLeft8 x (Add16 y (Const16 [c])))
 24001  	// cond: c&7 == 0
 24002  	// result: (RotateLeft8 x y)
 24003  	for {
 24004  		x := v_0
 24005  		if v_1.Op != OpAdd16 {
 24006  			break
 24007  		}
 24008  		_ = v_1.Args[1]
 24009  		v_1_0 := v_1.Args[0]
 24010  		v_1_1 := v_1.Args[1]
 24011  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 24012  			y := v_1_0
 24013  			if v_1_1.Op != OpConst16 {
 24014  				continue
 24015  			}
 24016  			c := auxIntToInt16(v_1_1.AuxInt)
 24017  			if !(c&7 == 0) {
 24018  				continue
 24019  			}
 24020  			v.reset(OpRotateLeft8)
 24021  			v.AddArg2(x, y)
 24022  			return true
 24023  		}
 24024  		break
 24025  	}
 24026  	// match: (RotateLeft8 x (Add8 y (Const8 [c])))
 24027  	// cond: c&7 == 0
 24028  	// result: (RotateLeft8 x y)
 24029  	for {
 24030  		x := v_0
 24031  		if v_1.Op != OpAdd8 {
 24032  			break
 24033  		}
 24034  		_ = v_1.Args[1]
 24035  		v_1_0 := v_1.Args[0]
 24036  		v_1_1 := v_1.Args[1]
 24037  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 24038  			y := v_1_0
 24039  			if v_1_1.Op != OpConst8 {
 24040  				continue
 24041  			}
 24042  			c := auxIntToInt8(v_1_1.AuxInt)
 24043  			if !(c&7 == 0) {
 24044  				continue
 24045  			}
 24046  			v.reset(OpRotateLeft8)
 24047  			v.AddArg2(x, y)
 24048  			return true
 24049  		}
 24050  		break
 24051  	}
 24052  	// match: (RotateLeft8 x (Sub64 (Const64 [c]) y))
 24053  	// cond: c&7 == 0
 24054  	// result: (RotateLeft8 x (Neg64 <y.Type> y))
 24055  	for {
 24056  		x := v_0
 24057  		if v_1.Op != OpSub64 {
 24058  			break
 24059  		}
 24060  		y := v_1.Args[1]
 24061  		v_1_0 := v_1.Args[0]
 24062  		if v_1_0.Op != OpConst64 {
 24063  			break
 24064  		}
 24065  		c := auxIntToInt64(v_1_0.AuxInt)
 24066  		if !(c&7 == 0) {
 24067  			break
 24068  		}
 24069  		v.reset(OpRotateLeft8)
 24070  		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 24071  		v0.AddArg(y)
 24072  		v.AddArg2(x, v0)
 24073  		return true
 24074  	}
 24075  	// match: (RotateLeft8 x (Sub32 (Const32 [c]) y))
 24076  	// cond: c&7 == 0
 24077  	// result: (RotateLeft8 x (Neg32 <y.Type> y))
 24078  	for {
 24079  		x := v_0
 24080  		if v_1.Op != OpSub32 {
 24081  			break
 24082  		}
 24083  		y := v_1.Args[1]
 24084  		v_1_0 := v_1.Args[0]
 24085  		if v_1_0.Op != OpConst32 {
 24086  			break
 24087  		}
 24088  		c := auxIntToInt32(v_1_0.AuxInt)
 24089  		if !(c&7 == 0) {
 24090  			break
 24091  		}
 24092  		v.reset(OpRotateLeft8)
 24093  		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 24094  		v0.AddArg(y)
 24095  		v.AddArg2(x, v0)
 24096  		return true
 24097  	}
 24098  	// match: (RotateLeft8 x (Sub16 (Const16 [c]) y))
 24099  	// cond: c&7 == 0
 24100  	// result: (RotateLeft8 x (Neg16 <y.Type> y))
 24101  	for {
 24102  		x := v_0
 24103  		if v_1.Op != OpSub16 {
 24104  			break
 24105  		}
 24106  		y := v_1.Args[1]
 24107  		v_1_0 := v_1.Args[0]
 24108  		if v_1_0.Op != OpConst16 {
 24109  			break
 24110  		}
 24111  		c := auxIntToInt16(v_1_0.AuxInt)
 24112  		if !(c&7 == 0) {
 24113  			break
 24114  		}
 24115  		v.reset(OpRotateLeft8)
 24116  		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 24117  		v0.AddArg(y)
 24118  		v.AddArg2(x, v0)
 24119  		return true
 24120  	}
 24121  	// match: (RotateLeft8 x (Sub8 (Const8 [c]) y))
 24122  	// cond: c&7 == 0
 24123  	// result: (RotateLeft8 x (Neg8 <y.Type> y))
 24124  	for {
 24125  		x := v_0
 24126  		if v_1.Op != OpSub8 {
 24127  			break
 24128  		}
 24129  		y := v_1.Args[1]
 24130  		v_1_0 := v_1.Args[0]
 24131  		if v_1_0.Op != OpConst8 {
 24132  			break
 24133  		}
 24134  		c := auxIntToInt8(v_1_0.AuxInt)
 24135  		if !(c&7 == 0) {
 24136  			break
 24137  		}
 24138  		v.reset(OpRotateLeft8)
 24139  		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 24140  		v0.AddArg(y)
 24141  		v.AddArg2(x, v0)
 24142  		return true
 24143  	}
 24144  	// match: (RotateLeft8 x (Const64 <t> [c]))
 24145  	// cond: config.PtrSize == 4
 24146  	// result: (RotateLeft8 x (Const32 <t> [int32(c)]))
 24147  	for {
 24148  		x := v_0
 24149  		if v_1.Op != OpConst64 {
 24150  			break
 24151  		}
 24152  		t := v_1.Type
 24153  		c := auxIntToInt64(v_1.AuxInt)
 24154  		if !(config.PtrSize == 4) {
 24155  			break
 24156  		}
 24157  		v.reset(OpRotateLeft8)
 24158  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 24159  		v0.AuxInt = int32ToAuxInt(int32(c))
 24160  		v.AddArg2(x, v0)
 24161  		return true
 24162  	}
 24163  	// match: (RotateLeft8 (RotateLeft8 x c) d)
 24164  	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
 24165  	// result: (RotateLeft8 x (Add64 <c.Type> c d))
 24166  	for {
 24167  		if v_0.Op != OpRotateLeft8 {
 24168  			break
 24169  		}
 24170  		c := v_0.Args[1]
 24171  		x := v_0.Args[0]
 24172  		d := v_1
 24173  		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
 24174  			break
 24175  		}
 24176  		v.reset(OpRotateLeft8)
 24177  		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
 24178  		v0.AddArg2(c, d)
 24179  		v.AddArg2(x, v0)
 24180  		return true
 24181  	}
 24182  	// match: (RotateLeft8 (RotateLeft8 x c) d)
 24183  	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
 24184  	// result: (RotateLeft8 x (Add32 <c.Type> c d))
 24185  	for {
 24186  		if v_0.Op != OpRotateLeft8 {
 24187  			break
 24188  		}
 24189  		c := v_0.Args[1]
 24190  		x := v_0.Args[0]
 24191  		d := v_1
 24192  		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
 24193  			break
 24194  		}
 24195  		v.reset(OpRotateLeft8)
 24196  		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
 24197  		v0.AddArg2(c, d)
 24198  		v.AddArg2(x, v0)
 24199  		return true
 24200  	}
 24201  	// match: (RotateLeft8 (RotateLeft8 x c) d)
 24202  	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
 24203  	// result: (RotateLeft8 x (Add16 <c.Type> c d))
 24204  	for {
 24205  		if v_0.Op != OpRotateLeft8 {
 24206  			break
 24207  		}
 24208  		c := v_0.Args[1]
 24209  		x := v_0.Args[0]
 24210  		d := v_1
 24211  		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
 24212  			break
 24213  		}
 24214  		v.reset(OpRotateLeft8)
 24215  		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
 24216  		v0.AddArg2(c, d)
 24217  		v.AddArg2(x, v0)
 24218  		return true
 24219  	}
 24220  	// match: (RotateLeft8 (RotateLeft8 x c) d)
 24221  	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
 24222  	// result: (RotateLeft8 x (Add8 <c.Type> c d))
 24223  	for {
 24224  		if v_0.Op != OpRotateLeft8 {
 24225  			break
 24226  		}
 24227  		c := v_0.Args[1]
 24228  		x := v_0.Args[0]
 24229  		d := v_1
 24230  		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
 24231  			break
 24232  		}
 24233  		v.reset(OpRotateLeft8)
 24234  		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
 24235  		v0.AddArg2(c, d)
 24236  		v.AddArg2(x, v0)
 24237  		return true
 24238  	}
 24239  	return false
 24240  }
 24241  func rewriteValuegeneric_OpRound32F(v *Value) bool {
 24242  	v_0 := v.Args[0]
 24243  	// match: (Round32F x:(Const32F))
 24244  	// result: x
 24245  	for {
 24246  		x := v_0
 24247  		if x.Op != OpConst32F {
 24248  			break
 24249  		}
 24250  		v.copyOf(x)
 24251  		return true
 24252  	}
 24253  	return false
 24254  }
 24255  func rewriteValuegeneric_OpRound64F(v *Value) bool {
 24256  	v_0 := v.Args[0]
 24257  	// match: (Round64F x:(Const64F))
 24258  	// result: x
 24259  	for {
 24260  		x := v_0
 24261  		if x.Op != OpConst64F {
 24262  			break
 24263  		}
 24264  		v.copyOf(x)
 24265  		return true
 24266  	}
 24267  	return false
 24268  }
 24269  func rewriteValuegeneric_OpRoundToEven(v *Value) bool {
 24270  	v_0 := v.Args[0]
 24271  	// match: (RoundToEven (Const64F [c]))
 24272  	// result: (Const64F [math.RoundToEven(c)])
 24273  	for {
 24274  		if v_0.Op != OpConst64F {
 24275  			break
 24276  		}
 24277  		c := auxIntToFloat64(v_0.AuxInt)
 24278  		v.reset(OpConst64F)
 24279  		v.AuxInt = float64ToAuxInt(math.RoundToEven(c))
 24280  		return true
 24281  	}
 24282  	return false
 24283  }
 24284  func rewriteValuegeneric_OpRsh16Ux16(v *Value) bool {
 24285  	v_1 := v.Args[1]
 24286  	v_0 := v.Args[0]
 24287  	b := v.Block
 24288  	// match: (Rsh16Ux16 <t> x (Const16 [c]))
 24289  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))]))
 24290  	for {
 24291  		t := v.Type
 24292  		x := v_0
 24293  		if v_1.Op != OpConst16 {
 24294  			break
 24295  		}
 24296  		c := auxIntToInt16(v_1.AuxInt)
 24297  		v.reset(OpRsh16Ux64)
 24298  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24299  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 24300  		v.AddArg2(x, v0)
 24301  		return true
 24302  	}
 24303  	// match: (Rsh16Ux16 (Const16 [0]) _)
 24304  	// result: (Const16 [0])
 24305  	for {
 24306  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 24307  			break
 24308  		}
 24309  		v.reset(OpConst16)
 24310  		v.AuxInt = int16ToAuxInt(0)
 24311  		return true
 24312  	}
 24313  	return false
 24314  }
 24315  func rewriteValuegeneric_OpRsh16Ux32(v *Value) bool {
 24316  	v_1 := v.Args[1]
 24317  	v_0 := v.Args[0]
 24318  	b := v.Block
 24319  	// match: (Rsh16Ux32 <t> x (Const32 [c]))
 24320  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))]))
 24321  	for {
 24322  		t := v.Type
 24323  		x := v_0
 24324  		if v_1.Op != OpConst32 {
 24325  			break
 24326  		}
 24327  		c := auxIntToInt32(v_1.AuxInt)
 24328  		v.reset(OpRsh16Ux64)
 24329  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24330  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 24331  		v.AddArg2(x, v0)
 24332  		return true
 24333  	}
 24334  	// match: (Rsh16Ux32 (Const16 [0]) _)
 24335  	// result: (Const16 [0])
 24336  	for {
 24337  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 24338  			break
 24339  		}
 24340  		v.reset(OpConst16)
 24341  		v.AuxInt = int16ToAuxInt(0)
 24342  		return true
 24343  	}
 24344  	return false
 24345  }
 24346  func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool {
 24347  	v_1 := v.Args[1]
 24348  	v_0 := v.Args[0]
 24349  	b := v.Block
 24350  	typ := &b.Func.Config.Types
 24351  	// match: (Rsh16Ux64 (Const16 [c]) (Const64 [d]))
 24352  	// result: (Const16 [int16(uint16(c) >> uint64(d))])
 24353  	for {
 24354  		if v_0.Op != OpConst16 {
 24355  			break
 24356  		}
 24357  		c := auxIntToInt16(v_0.AuxInt)
 24358  		if v_1.Op != OpConst64 {
 24359  			break
 24360  		}
 24361  		d := auxIntToInt64(v_1.AuxInt)
 24362  		v.reset(OpConst16)
 24363  		v.AuxInt = int16ToAuxInt(int16(uint16(c) >> uint64(d)))
 24364  		return true
 24365  	}
 24366  	// match: (Rsh16Ux64 x (Const64 [0]))
 24367  	// result: x
 24368  	for {
 24369  		x := v_0
 24370  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 24371  			break
 24372  		}
 24373  		v.copyOf(x)
 24374  		return true
 24375  	}
 24376  	// match: (Rsh16Ux64 (Const16 [0]) _)
 24377  	// result: (Const16 [0])
 24378  	for {
 24379  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 24380  			break
 24381  		}
 24382  		v.reset(OpConst16)
 24383  		v.AuxInt = int16ToAuxInt(0)
 24384  		return true
 24385  	}
 24386  	// match: (Rsh16Ux64 _ (Const64 [c]))
 24387  	// cond: uint64(c) >= 16
 24388  	// result: (Const16 [0])
 24389  	for {
 24390  		if v_1.Op != OpConst64 {
 24391  			break
 24392  		}
 24393  		c := auxIntToInt64(v_1.AuxInt)
 24394  		if !(uint64(c) >= 16) {
 24395  			break
 24396  		}
 24397  		v.reset(OpConst16)
 24398  		v.AuxInt = int16ToAuxInt(0)
 24399  		return true
 24400  	}
 24401  	// match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d]))
 24402  	// cond: !uaddOvf(c,d)
 24403  	// result: (Rsh16Ux64 x (Const64 <t> [c+d]))
 24404  	for {
 24405  		t := v.Type
 24406  		if v_0.Op != OpRsh16Ux64 {
 24407  			break
 24408  		}
 24409  		_ = v_0.Args[1]
 24410  		x := v_0.Args[0]
 24411  		v_0_1 := v_0.Args[1]
 24412  		if v_0_1.Op != OpConst64 {
 24413  			break
 24414  		}
 24415  		c := auxIntToInt64(v_0_1.AuxInt)
 24416  		if v_1.Op != OpConst64 {
 24417  			break
 24418  		}
 24419  		d := auxIntToInt64(v_1.AuxInt)
 24420  		if !(!uaddOvf(c, d)) {
 24421  			break
 24422  		}
 24423  		v.reset(OpRsh16Ux64)
 24424  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24425  		v0.AuxInt = int64ToAuxInt(c + d)
 24426  		v.AddArg2(x, v0)
 24427  		return true
 24428  	}
 24429  	// match: (Rsh16Ux64 (Rsh16x64 x _) (Const64 <t> [15]))
 24430  	// result: (Rsh16Ux64 x (Const64 <t> [15]))
 24431  	for {
 24432  		if v_0.Op != OpRsh16x64 {
 24433  			break
 24434  		}
 24435  		x := v_0.Args[0]
 24436  		if v_1.Op != OpConst64 {
 24437  			break
 24438  		}
 24439  		t := v_1.Type
 24440  		if auxIntToInt64(v_1.AuxInt) != 15 {
 24441  			break
 24442  		}
 24443  		v.reset(OpRsh16Ux64)
 24444  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24445  		v0.AuxInt = int64ToAuxInt(15)
 24446  		v.AddArg2(x, v0)
 24447  		return true
 24448  	}
 24449  	// match: (Rsh16Ux64 i:(Lsh16x64 x (Const64 [c])) (Const64 [c]))
 24450  	// cond: c >= 0 && c < 16 && i.Uses == 1
 24451  	// result: (And16 x (Const16 <v.Type> [int16(^uint16(0)>>c)]))
 24452  	for {
 24453  		i := v_0
 24454  		if i.Op != OpLsh16x64 {
 24455  			break
 24456  		}
 24457  		_ = i.Args[1]
 24458  		x := i.Args[0]
 24459  		i_1 := i.Args[1]
 24460  		if i_1.Op != OpConst64 {
 24461  			break
 24462  		}
 24463  		c := auxIntToInt64(i_1.AuxInt)
 24464  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
 24465  			break
 24466  		}
 24467  		v.reset(OpAnd16)
 24468  		v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
 24469  		v0.AuxInt = int16ToAuxInt(int16(^uint16(0) >> c))
 24470  		v.AddArg2(x, v0)
 24471  		return true
 24472  	}
 24473  	// match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 24474  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 24475  	// result: (Rsh16Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 24476  	for {
 24477  		if v_0.Op != OpLsh16x64 {
 24478  			break
 24479  		}
 24480  		_ = v_0.Args[1]
 24481  		v_0_0 := v_0.Args[0]
 24482  		if v_0_0.Op != OpRsh16Ux64 {
 24483  			break
 24484  		}
 24485  		_ = v_0_0.Args[1]
 24486  		x := v_0_0.Args[0]
 24487  		v_0_0_1 := v_0_0.Args[1]
 24488  		if v_0_0_1.Op != OpConst64 {
 24489  			break
 24490  		}
 24491  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 24492  		v_0_1 := v_0.Args[1]
 24493  		if v_0_1.Op != OpConst64 {
 24494  			break
 24495  		}
 24496  		c2 := auxIntToInt64(v_0_1.AuxInt)
 24497  		if v_1.Op != OpConst64 {
 24498  			break
 24499  		}
 24500  		c3 := auxIntToInt64(v_1.AuxInt)
 24501  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 24502  			break
 24503  		}
 24504  		v.reset(OpRsh16Ux64)
 24505  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 24506  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 24507  		v.AddArg2(x, v0)
 24508  		return true
 24509  	}
 24510  	// match: (Rsh16Ux64 (Lsh16x64 x (Const64 [8])) (Const64 [8]))
 24511  	// result: (ZeroExt8to16 (Trunc16to8 <typ.UInt8> x))
 24512  	for {
 24513  		if v_0.Op != OpLsh16x64 {
 24514  			break
 24515  		}
 24516  		_ = v_0.Args[1]
 24517  		x := v_0.Args[0]
 24518  		v_0_1 := v_0.Args[1]
 24519  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 8 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 8 {
 24520  			break
 24521  		}
 24522  		v.reset(OpZeroExt8to16)
 24523  		v0 := b.NewValue0(v.Pos, OpTrunc16to8, typ.UInt8)
 24524  		v0.AddArg(x)
 24525  		v.AddArg(v0)
 24526  		return true
 24527  	}
 24528  	return false
 24529  }
 24530  func rewriteValuegeneric_OpRsh16Ux8(v *Value) bool {
 24531  	v_1 := v.Args[1]
 24532  	v_0 := v.Args[0]
 24533  	b := v.Block
 24534  	// match: (Rsh16Ux8 <t> x (Const8 [c]))
 24535  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))]))
 24536  	for {
 24537  		t := v.Type
 24538  		x := v_0
 24539  		if v_1.Op != OpConst8 {
 24540  			break
 24541  		}
 24542  		c := auxIntToInt8(v_1.AuxInt)
 24543  		v.reset(OpRsh16Ux64)
 24544  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24545  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 24546  		v.AddArg2(x, v0)
 24547  		return true
 24548  	}
 24549  	// match: (Rsh16Ux8 (Const16 [0]) _)
 24550  	// result: (Const16 [0])
 24551  	for {
 24552  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 24553  			break
 24554  		}
 24555  		v.reset(OpConst16)
 24556  		v.AuxInt = int16ToAuxInt(0)
 24557  		return true
 24558  	}
 24559  	return false
 24560  }
 24561  func rewriteValuegeneric_OpRsh16x16(v *Value) bool {
 24562  	v_1 := v.Args[1]
 24563  	v_0 := v.Args[0]
 24564  	b := v.Block
 24565  	// match: (Rsh16x16 <t> x (Const16 [c]))
 24566  	// result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))]))
 24567  	for {
 24568  		t := v.Type
 24569  		x := v_0
 24570  		if v_1.Op != OpConst16 {
 24571  			break
 24572  		}
 24573  		c := auxIntToInt16(v_1.AuxInt)
 24574  		v.reset(OpRsh16x64)
 24575  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24576  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 24577  		v.AddArg2(x, v0)
 24578  		return true
 24579  	}
 24580  	// match: (Rsh16x16 (Const16 [0]) _)
 24581  	// result: (Const16 [0])
 24582  	for {
 24583  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 24584  			break
 24585  		}
 24586  		v.reset(OpConst16)
 24587  		v.AuxInt = int16ToAuxInt(0)
 24588  		return true
 24589  	}
 24590  	return false
 24591  }
 24592  func rewriteValuegeneric_OpRsh16x32(v *Value) bool {
 24593  	v_1 := v.Args[1]
 24594  	v_0 := v.Args[0]
 24595  	b := v.Block
 24596  	// match: (Rsh16x32 <t> x (Const32 [c]))
 24597  	// result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))]))
 24598  	for {
 24599  		t := v.Type
 24600  		x := v_0
 24601  		if v_1.Op != OpConst32 {
 24602  			break
 24603  		}
 24604  		c := auxIntToInt32(v_1.AuxInt)
 24605  		v.reset(OpRsh16x64)
 24606  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24607  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 24608  		v.AddArg2(x, v0)
 24609  		return true
 24610  	}
 24611  	// match: (Rsh16x32 (Const16 [0]) _)
 24612  	// result: (Const16 [0])
 24613  	for {
 24614  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 24615  			break
 24616  		}
 24617  		v.reset(OpConst16)
 24618  		v.AuxInt = int16ToAuxInt(0)
 24619  		return true
 24620  	}
 24621  	return false
 24622  }
 24623  func rewriteValuegeneric_OpRsh16x64(v *Value) bool {
 24624  	v_1 := v.Args[1]
 24625  	v_0 := v.Args[0]
 24626  	b := v.Block
 24627  	typ := &b.Func.Config.Types
 24628  	// match: (Rsh16x64 (Const16 [c]) (Const64 [d]))
 24629  	// result: (Const16 [c >> uint64(d)])
 24630  	for {
 24631  		if v_0.Op != OpConst16 {
 24632  			break
 24633  		}
 24634  		c := auxIntToInt16(v_0.AuxInt)
 24635  		if v_1.Op != OpConst64 {
 24636  			break
 24637  		}
 24638  		d := auxIntToInt64(v_1.AuxInt)
 24639  		v.reset(OpConst16)
 24640  		v.AuxInt = int16ToAuxInt(c >> uint64(d))
 24641  		return true
 24642  	}
 24643  	// match: (Rsh16x64 x (Const64 [0]))
 24644  	// result: x
 24645  	for {
 24646  		x := v_0
 24647  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 24648  			break
 24649  		}
 24650  		v.copyOf(x)
 24651  		return true
 24652  	}
 24653  	// match: (Rsh16x64 (Const16 [0]) _)
 24654  	// result: (Const16 [0])
 24655  	for {
 24656  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 24657  			break
 24658  		}
 24659  		v.reset(OpConst16)
 24660  		v.AuxInt = int16ToAuxInt(0)
 24661  		return true
 24662  	}
 24663  	// match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d]))
 24664  	// cond: !uaddOvf(c,d)
 24665  	// result: (Rsh16x64 x (Const64 <t> [c+d]))
 24666  	for {
 24667  		t := v.Type
 24668  		if v_0.Op != OpRsh16x64 {
 24669  			break
 24670  		}
 24671  		_ = v_0.Args[1]
 24672  		x := v_0.Args[0]
 24673  		v_0_1 := v_0.Args[1]
 24674  		if v_0_1.Op != OpConst64 {
 24675  			break
 24676  		}
 24677  		c := auxIntToInt64(v_0_1.AuxInt)
 24678  		if v_1.Op != OpConst64 {
 24679  			break
 24680  		}
 24681  		d := auxIntToInt64(v_1.AuxInt)
 24682  		if !(!uaddOvf(c, d)) {
 24683  			break
 24684  		}
 24685  		v.reset(OpRsh16x64)
 24686  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24687  		v0.AuxInt = int64ToAuxInt(c + d)
 24688  		v.AddArg2(x, v0)
 24689  		return true
 24690  	}
 24691  	// match: (Rsh16x64 (Lsh16x64 x (Const64 [8])) (Const64 [8]))
 24692  	// result: (SignExt8to16 (Trunc16to8 <typ.Int8> x))
 24693  	for {
 24694  		if v_0.Op != OpLsh16x64 {
 24695  			break
 24696  		}
 24697  		_ = v_0.Args[1]
 24698  		x := v_0.Args[0]
 24699  		v_0_1 := v_0.Args[1]
 24700  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 8 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 8 {
 24701  			break
 24702  		}
 24703  		v.reset(OpSignExt8to16)
 24704  		v0 := b.NewValue0(v.Pos, OpTrunc16to8, typ.Int8)
 24705  		v0.AddArg(x)
 24706  		v.AddArg(v0)
 24707  		return true
 24708  	}
 24709  	return false
 24710  }
 24711  func rewriteValuegeneric_OpRsh16x8(v *Value) bool {
 24712  	v_1 := v.Args[1]
 24713  	v_0 := v.Args[0]
 24714  	b := v.Block
 24715  	// match: (Rsh16x8 <t> x (Const8 [c]))
 24716  	// result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))]))
 24717  	for {
 24718  		t := v.Type
 24719  		x := v_0
 24720  		if v_1.Op != OpConst8 {
 24721  			break
 24722  		}
 24723  		c := auxIntToInt8(v_1.AuxInt)
 24724  		v.reset(OpRsh16x64)
 24725  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24726  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 24727  		v.AddArg2(x, v0)
 24728  		return true
 24729  	}
 24730  	// match: (Rsh16x8 (Const16 [0]) _)
 24731  	// result: (Const16 [0])
 24732  	for {
 24733  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 24734  			break
 24735  		}
 24736  		v.reset(OpConst16)
 24737  		v.AuxInt = int16ToAuxInt(0)
 24738  		return true
 24739  	}
 24740  	return false
 24741  }
 24742  func rewriteValuegeneric_OpRsh32Ux16(v *Value) bool {
 24743  	v_1 := v.Args[1]
 24744  	v_0 := v.Args[0]
 24745  	b := v.Block
 24746  	// match: (Rsh32Ux16 <t> x (Const16 [c]))
 24747  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))]))
 24748  	for {
 24749  		t := v.Type
 24750  		x := v_0
 24751  		if v_1.Op != OpConst16 {
 24752  			break
 24753  		}
 24754  		c := auxIntToInt16(v_1.AuxInt)
 24755  		v.reset(OpRsh32Ux64)
 24756  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24757  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 24758  		v.AddArg2(x, v0)
 24759  		return true
 24760  	}
 24761  	// match: (Rsh32Ux16 (Const32 [0]) _)
 24762  	// result: (Const32 [0])
 24763  	for {
 24764  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 24765  			break
 24766  		}
 24767  		v.reset(OpConst32)
 24768  		v.AuxInt = int32ToAuxInt(0)
 24769  		return true
 24770  	}
 24771  	return false
 24772  }
 24773  func rewriteValuegeneric_OpRsh32Ux32(v *Value) bool {
 24774  	v_1 := v.Args[1]
 24775  	v_0 := v.Args[0]
 24776  	b := v.Block
 24777  	// match: (Rsh32Ux32 <t> x (Const32 [c]))
 24778  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))]))
 24779  	for {
 24780  		t := v.Type
 24781  		x := v_0
 24782  		if v_1.Op != OpConst32 {
 24783  			break
 24784  		}
 24785  		c := auxIntToInt32(v_1.AuxInt)
 24786  		v.reset(OpRsh32Ux64)
 24787  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24788  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 24789  		v.AddArg2(x, v0)
 24790  		return true
 24791  	}
 24792  	// match: (Rsh32Ux32 (Const32 [0]) _)
 24793  	// result: (Const32 [0])
 24794  	for {
 24795  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 24796  			break
 24797  		}
 24798  		v.reset(OpConst32)
 24799  		v.AuxInt = int32ToAuxInt(0)
 24800  		return true
 24801  	}
 24802  	return false
 24803  }
 24804  func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool {
 24805  	v_1 := v.Args[1]
 24806  	v_0 := v.Args[0]
 24807  	b := v.Block
 24808  	typ := &b.Func.Config.Types
 24809  	// match: (Rsh32Ux64 (Const32 [c]) (Const64 [d]))
 24810  	// result: (Const32 [int32(uint32(c) >> uint64(d))])
 24811  	for {
 24812  		if v_0.Op != OpConst32 {
 24813  			break
 24814  		}
 24815  		c := auxIntToInt32(v_0.AuxInt)
 24816  		if v_1.Op != OpConst64 {
 24817  			break
 24818  		}
 24819  		d := auxIntToInt64(v_1.AuxInt)
 24820  		v.reset(OpConst32)
 24821  		v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
 24822  		return true
 24823  	}
 24824  	// match: (Rsh32Ux64 x (Const64 [0]))
 24825  	// result: x
 24826  	for {
 24827  		x := v_0
 24828  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 24829  			break
 24830  		}
 24831  		v.copyOf(x)
 24832  		return true
 24833  	}
 24834  	// match: (Rsh32Ux64 (Const32 [0]) _)
 24835  	// result: (Const32 [0])
 24836  	for {
 24837  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 24838  			break
 24839  		}
 24840  		v.reset(OpConst32)
 24841  		v.AuxInt = int32ToAuxInt(0)
 24842  		return true
 24843  	}
 24844  	// match: (Rsh32Ux64 _ (Const64 [c]))
 24845  	// cond: uint64(c) >= 32
 24846  	// result: (Const32 [0])
 24847  	for {
 24848  		if v_1.Op != OpConst64 {
 24849  			break
 24850  		}
 24851  		c := auxIntToInt64(v_1.AuxInt)
 24852  		if !(uint64(c) >= 32) {
 24853  			break
 24854  		}
 24855  		v.reset(OpConst32)
 24856  		v.AuxInt = int32ToAuxInt(0)
 24857  		return true
 24858  	}
 24859  	// match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d]))
 24860  	// cond: !uaddOvf(c,d)
 24861  	// result: (Rsh32Ux64 x (Const64 <t> [c+d]))
 24862  	for {
 24863  		t := v.Type
 24864  		if v_0.Op != OpRsh32Ux64 {
 24865  			break
 24866  		}
 24867  		_ = v_0.Args[1]
 24868  		x := v_0.Args[0]
 24869  		v_0_1 := v_0.Args[1]
 24870  		if v_0_1.Op != OpConst64 {
 24871  			break
 24872  		}
 24873  		c := auxIntToInt64(v_0_1.AuxInt)
 24874  		if v_1.Op != OpConst64 {
 24875  			break
 24876  		}
 24877  		d := auxIntToInt64(v_1.AuxInt)
 24878  		if !(!uaddOvf(c, d)) {
 24879  			break
 24880  		}
 24881  		v.reset(OpRsh32Ux64)
 24882  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24883  		v0.AuxInt = int64ToAuxInt(c + d)
 24884  		v.AddArg2(x, v0)
 24885  		return true
 24886  	}
 24887  	// match: (Rsh32Ux64 (Rsh32x64 x _) (Const64 <t> [31]))
 24888  	// result: (Rsh32Ux64 x (Const64 <t> [31]))
 24889  	for {
 24890  		if v_0.Op != OpRsh32x64 {
 24891  			break
 24892  		}
 24893  		x := v_0.Args[0]
 24894  		if v_1.Op != OpConst64 {
 24895  			break
 24896  		}
 24897  		t := v_1.Type
 24898  		if auxIntToInt64(v_1.AuxInt) != 31 {
 24899  			break
 24900  		}
 24901  		v.reset(OpRsh32Ux64)
 24902  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 24903  		v0.AuxInt = int64ToAuxInt(31)
 24904  		v.AddArg2(x, v0)
 24905  		return true
 24906  	}
 24907  	// match: (Rsh32Ux64 i:(Lsh32x64 x (Const64 [c])) (Const64 [c]))
 24908  	// cond: c >= 0 && c < 32 && i.Uses == 1
 24909  	// result: (And32 x (Const32 <v.Type> [int32(^uint32(0)>>c)]))
 24910  	for {
 24911  		i := v_0
 24912  		if i.Op != OpLsh32x64 {
 24913  			break
 24914  		}
 24915  		_ = i.Args[1]
 24916  		x := i.Args[0]
 24917  		i_1 := i.Args[1]
 24918  		if i_1.Op != OpConst64 {
 24919  			break
 24920  		}
 24921  		c := auxIntToInt64(i_1.AuxInt)
 24922  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
 24923  			break
 24924  		}
 24925  		v.reset(OpAnd32)
 24926  		v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
 24927  		v0.AuxInt = int32ToAuxInt(int32(^uint32(0) >> c))
 24928  		v.AddArg2(x, v0)
 24929  		return true
 24930  	}
 24931  	// match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 24932  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 24933  	// result: (Rsh32Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 24934  	for {
 24935  		if v_0.Op != OpLsh32x64 {
 24936  			break
 24937  		}
 24938  		_ = v_0.Args[1]
 24939  		v_0_0 := v_0.Args[0]
 24940  		if v_0_0.Op != OpRsh32Ux64 {
 24941  			break
 24942  		}
 24943  		_ = v_0_0.Args[1]
 24944  		x := v_0_0.Args[0]
 24945  		v_0_0_1 := v_0_0.Args[1]
 24946  		if v_0_0_1.Op != OpConst64 {
 24947  			break
 24948  		}
 24949  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 24950  		v_0_1 := v_0.Args[1]
 24951  		if v_0_1.Op != OpConst64 {
 24952  			break
 24953  		}
 24954  		c2 := auxIntToInt64(v_0_1.AuxInt)
 24955  		if v_1.Op != OpConst64 {
 24956  			break
 24957  		}
 24958  		c3 := auxIntToInt64(v_1.AuxInt)
 24959  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 24960  			break
 24961  		}
 24962  		v.reset(OpRsh32Ux64)
 24963  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 24964  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 24965  		v.AddArg2(x, v0)
 24966  		return true
 24967  	}
 24968  	// match: (Rsh32Ux64 (Lsh32x64 x (Const64 [24])) (Const64 [24]))
 24969  	// result: (ZeroExt8to32 (Trunc32to8 <typ.UInt8> x))
 24970  	for {
 24971  		if v_0.Op != OpLsh32x64 {
 24972  			break
 24973  		}
 24974  		_ = v_0.Args[1]
 24975  		x := v_0.Args[0]
 24976  		v_0_1 := v_0.Args[1]
 24977  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 24 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 24 {
 24978  			break
 24979  		}
 24980  		v.reset(OpZeroExt8to32)
 24981  		v0 := b.NewValue0(v.Pos, OpTrunc32to8, typ.UInt8)
 24982  		v0.AddArg(x)
 24983  		v.AddArg(v0)
 24984  		return true
 24985  	}
 24986  	// match: (Rsh32Ux64 (Lsh32x64 x (Const64 [16])) (Const64 [16]))
 24987  	// result: (ZeroExt16to32 (Trunc32to16 <typ.UInt16> x))
 24988  	for {
 24989  		if v_0.Op != OpLsh32x64 {
 24990  			break
 24991  		}
 24992  		_ = v_0.Args[1]
 24993  		x := v_0.Args[0]
 24994  		v_0_1 := v_0.Args[1]
 24995  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 16 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 16 {
 24996  			break
 24997  		}
 24998  		v.reset(OpZeroExt16to32)
 24999  		v0 := b.NewValue0(v.Pos, OpTrunc32to16, typ.UInt16)
 25000  		v0.AddArg(x)
 25001  		v.AddArg(v0)
 25002  		return true
 25003  	}
 25004  	return false
 25005  }
 25006  func rewriteValuegeneric_OpRsh32Ux8(v *Value) bool {
 25007  	v_1 := v.Args[1]
 25008  	v_0 := v.Args[0]
 25009  	b := v.Block
 25010  	// match: (Rsh32Ux8 <t> x (Const8 [c]))
 25011  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))]))
 25012  	for {
 25013  		t := v.Type
 25014  		x := v_0
 25015  		if v_1.Op != OpConst8 {
 25016  			break
 25017  		}
 25018  		c := auxIntToInt8(v_1.AuxInt)
 25019  		v.reset(OpRsh32Ux64)
 25020  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25021  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 25022  		v.AddArg2(x, v0)
 25023  		return true
 25024  	}
 25025  	// match: (Rsh32Ux8 (Const32 [0]) _)
 25026  	// result: (Const32 [0])
 25027  	for {
 25028  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 25029  			break
 25030  		}
 25031  		v.reset(OpConst32)
 25032  		v.AuxInt = int32ToAuxInt(0)
 25033  		return true
 25034  	}
 25035  	return false
 25036  }
 25037  func rewriteValuegeneric_OpRsh32x16(v *Value) bool {
 25038  	v_1 := v.Args[1]
 25039  	v_0 := v.Args[0]
 25040  	b := v.Block
 25041  	// match: (Rsh32x16 <t> x (Const16 [c]))
 25042  	// result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))]))
 25043  	for {
 25044  		t := v.Type
 25045  		x := v_0
 25046  		if v_1.Op != OpConst16 {
 25047  			break
 25048  		}
 25049  		c := auxIntToInt16(v_1.AuxInt)
 25050  		v.reset(OpRsh32x64)
 25051  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25052  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 25053  		v.AddArg2(x, v0)
 25054  		return true
 25055  	}
 25056  	// match: (Rsh32x16 (Const32 [0]) _)
 25057  	// result: (Const32 [0])
 25058  	for {
 25059  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 25060  			break
 25061  		}
 25062  		v.reset(OpConst32)
 25063  		v.AuxInt = int32ToAuxInt(0)
 25064  		return true
 25065  	}
 25066  	return false
 25067  }
 25068  func rewriteValuegeneric_OpRsh32x32(v *Value) bool {
 25069  	v_1 := v.Args[1]
 25070  	v_0 := v.Args[0]
 25071  	b := v.Block
 25072  	// match: (Rsh32x32 <t> x (Const32 [c]))
 25073  	// result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))]))
 25074  	for {
 25075  		t := v.Type
 25076  		x := v_0
 25077  		if v_1.Op != OpConst32 {
 25078  			break
 25079  		}
 25080  		c := auxIntToInt32(v_1.AuxInt)
 25081  		v.reset(OpRsh32x64)
 25082  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25083  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 25084  		v.AddArg2(x, v0)
 25085  		return true
 25086  	}
 25087  	// match: (Rsh32x32 (Const32 [0]) _)
 25088  	// result: (Const32 [0])
 25089  	for {
 25090  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 25091  			break
 25092  		}
 25093  		v.reset(OpConst32)
 25094  		v.AuxInt = int32ToAuxInt(0)
 25095  		return true
 25096  	}
 25097  	return false
 25098  }
 25099  func rewriteValuegeneric_OpRsh32x64(v *Value) bool {
 25100  	v_1 := v.Args[1]
 25101  	v_0 := v.Args[0]
 25102  	b := v.Block
 25103  	typ := &b.Func.Config.Types
 25104  	// match: (Rsh32x64 (Const32 [c]) (Const64 [d]))
 25105  	// result: (Const32 [c >> uint64(d)])
 25106  	for {
 25107  		if v_0.Op != OpConst32 {
 25108  			break
 25109  		}
 25110  		c := auxIntToInt32(v_0.AuxInt)
 25111  		if v_1.Op != OpConst64 {
 25112  			break
 25113  		}
 25114  		d := auxIntToInt64(v_1.AuxInt)
 25115  		v.reset(OpConst32)
 25116  		v.AuxInt = int32ToAuxInt(c >> uint64(d))
 25117  		return true
 25118  	}
 25119  	// match: (Rsh32x64 x (Const64 [0]))
 25120  	// result: x
 25121  	for {
 25122  		x := v_0
 25123  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 25124  			break
 25125  		}
 25126  		v.copyOf(x)
 25127  		return true
 25128  	}
 25129  	// match: (Rsh32x64 (Const32 [0]) _)
 25130  	// result: (Const32 [0])
 25131  	for {
 25132  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 25133  			break
 25134  		}
 25135  		v.reset(OpConst32)
 25136  		v.AuxInt = int32ToAuxInt(0)
 25137  		return true
 25138  	}
 25139  	// match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d]))
 25140  	// cond: !uaddOvf(c,d)
 25141  	// result: (Rsh32x64 x (Const64 <t> [c+d]))
 25142  	for {
 25143  		t := v.Type
 25144  		if v_0.Op != OpRsh32x64 {
 25145  			break
 25146  		}
 25147  		_ = v_0.Args[1]
 25148  		x := v_0.Args[0]
 25149  		v_0_1 := v_0.Args[1]
 25150  		if v_0_1.Op != OpConst64 {
 25151  			break
 25152  		}
 25153  		c := auxIntToInt64(v_0_1.AuxInt)
 25154  		if v_1.Op != OpConst64 {
 25155  			break
 25156  		}
 25157  		d := auxIntToInt64(v_1.AuxInt)
 25158  		if !(!uaddOvf(c, d)) {
 25159  			break
 25160  		}
 25161  		v.reset(OpRsh32x64)
 25162  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25163  		v0.AuxInt = int64ToAuxInt(c + d)
 25164  		v.AddArg2(x, v0)
 25165  		return true
 25166  	}
 25167  	// match: (Rsh32x64 (Lsh32x64 x (Const64 [24])) (Const64 [24]))
 25168  	// result: (SignExt8to32 (Trunc32to8 <typ.Int8> x))
 25169  	for {
 25170  		if v_0.Op != OpLsh32x64 {
 25171  			break
 25172  		}
 25173  		_ = v_0.Args[1]
 25174  		x := v_0.Args[0]
 25175  		v_0_1 := v_0.Args[1]
 25176  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 24 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 24 {
 25177  			break
 25178  		}
 25179  		v.reset(OpSignExt8to32)
 25180  		v0 := b.NewValue0(v.Pos, OpTrunc32to8, typ.Int8)
 25181  		v0.AddArg(x)
 25182  		v.AddArg(v0)
 25183  		return true
 25184  	}
 25185  	// match: (Rsh32x64 (Lsh32x64 x (Const64 [16])) (Const64 [16]))
 25186  	// result: (SignExt16to32 (Trunc32to16 <typ.Int16> x))
 25187  	for {
 25188  		if v_0.Op != OpLsh32x64 {
 25189  			break
 25190  		}
 25191  		_ = v_0.Args[1]
 25192  		x := v_0.Args[0]
 25193  		v_0_1 := v_0.Args[1]
 25194  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 16 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 16 {
 25195  			break
 25196  		}
 25197  		v.reset(OpSignExt16to32)
 25198  		v0 := b.NewValue0(v.Pos, OpTrunc32to16, typ.Int16)
 25199  		v0.AddArg(x)
 25200  		v.AddArg(v0)
 25201  		return true
 25202  	}
 25203  	return false
 25204  }
 25205  func rewriteValuegeneric_OpRsh32x8(v *Value) bool {
 25206  	v_1 := v.Args[1]
 25207  	v_0 := v.Args[0]
 25208  	b := v.Block
 25209  	// match: (Rsh32x8 <t> x (Const8 [c]))
 25210  	// result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))]))
 25211  	for {
 25212  		t := v.Type
 25213  		x := v_0
 25214  		if v_1.Op != OpConst8 {
 25215  			break
 25216  		}
 25217  		c := auxIntToInt8(v_1.AuxInt)
 25218  		v.reset(OpRsh32x64)
 25219  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25220  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 25221  		v.AddArg2(x, v0)
 25222  		return true
 25223  	}
 25224  	// match: (Rsh32x8 (Const32 [0]) _)
 25225  	// result: (Const32 [0])
 25226  	for {
 25227  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 25228  			break
 25229  		}
 25230  		v.reset(OpConst32)
 25231  		v.AuxInt = int32ToAuxInt(0)
 25232  		return true
 25233  	}
 25234  	return false
 25235  }
 25236  func rewriteValuegeneric_OpRsh64Ux16(v *Value) bool {
 25237  	v_1 := v.Args[1]
 25238  	v_0 := v.Args[0]
 25239  	b := v.Block
 25240  	// match: (Rsh64Ux16 <t> x (Const16 [c]))
 25241  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))]))
 25242  	for {
 25243  		t := v.Type
 25244  		x := v_0
 25245  		if v_1.Op != OpConst16 {
 25246  			break
 25247  		}
 25248  		c := auxIntToInt16(v_1.AuxInt)
 25249  		v.reset(OpRsh64Ux64)
 25250  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25251  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 25252  		v.AddArg2(x, v0)
 25253  		return true
 25254  	}
 25255  	// match: (Rsh64Ux16 (Const64 [0]) _)
 25256  	// result: (Const64 [0])
 25257  	for {
 25258  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 25259  			break
 25260  		}
 25261  		v.reset(OpConst64)
 25262  		v.AuxInt = int64ToAuxInt(0)
 25263  		return true
 25264  	}
 25265  	return false
 25266  }
 25267  func rewriteValuegeneric_OpRsh64Ux32(v *Value) bool {
 25268  	v_1 := v.Args[1]
 25269  	v_0 := v.Args[0]
 25270  	b := v.Block
 25271  	// match: (Rsh64Ux32 <t> x (Const32 [c]))
 25272  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))]))
 25273  	for {
 25274  		t := v.Type
 25275  		x := v_0
 25276  		if v_1.Op != OpConst32 {
 25277  			break
 25278  		}
 25279  		c := auxIntToInt32(v_1.AuxInt)
 25280  		v.reset(OpRsh64Ux64)
 25281  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25282  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 25283  		v.AddArg2(x, v0)
 25284  		return true
 25285  	}
 25286  	// match: (Rsh64Ux32 (Const64 [0]) _)
 25287  	// result: (Const64 [0])
 25288  	for {
 25289  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 25290  			break
 25291  		}
 25292  		v.reset(OpConst64)
 25293  		v.AuxInt = int64ToAuxInt(0)
 25294  		return true
 25295  	}
 25296  	return false
 25297  }
 25298  func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool {
 25299  	v_1 := v.Args[1]
 25300  	v_0 := v.Args[0]
 25301  	b := v.Block
 25302  	typ := &b.Func.Config.Types
 25303  	// match: (Rsh64Ux64 (Const64 [c]) (Const64 [d]))
 25304  	// result: (Const64 [int64(uint64(c) >> uint64(d))])
 25305  	for {
 25306  		if v_0.Op != OpConst64 {
 25307  			break
 25308  		}
 25309  		c := auxIntToInt64(v_0.AuxInt)
 25310  		if v_1.Op != OpConst64 {
 25311  			break
 25312  		}
 25313  		d := auxIntToInt64(v_1.AuxInt)
 25314  		v.reset(OpConst64)
 25315  		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
 25316  		return true
 25317  	}
 25318  	// match: (Rsh64Ux64 x (Const64 [0]))
 25319  	// result: x
 25320  	for {
 25321  		x := v_0
 25322  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 25323  			break
 25324  		}
 25325  		v.copyOf(x)
 25326  		return true
 25327  	}
 25328  	// match: (Rsh64Ux64 (Const64 [0]) _)
 25329  	// result: (Const64 [0])
 25330  	for {
 25331  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 25332  			break
 25333  		}
 25334  		v.reset(OpConst64)
 25335  		v.AuxInt = int64ToAuxInt(0)
 25336  		return true
 25337  	}
 25338  	// match: (Rsh64Ux64 _ (Const64 [c]))
 25339  	// cond: uint64(c) >= 64
 25340  	// result: (Const64 [0])
 25341  	for {
 25342  		if v_1.Op != OpConst64 {
 25343  			break
 25344  		}
 25345  		c := auxIntToInt64(v_1.AuxInt)
 25346  		if !(uint64(c) >= 64) {
 25347  			break
 25348  		}
 25349  		v.reset(OpConst64)
 25350  		v.AuxInt = int64ToAuxInt(0)
 25351  		return true
 25352  	}
 25353  	// match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d]))
 25354  	// cond: !uaddOvf(c,d)
 25355  	// result: (Rsh64Ux64 x (Const64 <t> [c+d]))
 25356  	for {
 25357  		t := v.Type
 25358  		if v_0.Op != OpRsh64Ux64 {
 25359  			break
 25360  		}
 25361  		_ = v_0.Args[1]
 25362  		x := v_0.Args[0]
 25363  		v_0_1 := v_0.Args[1]
 25364  		if v_0_1.Op != OpConst64 {
 25365  			break
 25366  		}
 25367  		c := auxIntToInt64(v_0_1.AuxInt)
 25368  		if v_1.Op != OpConst64 {
 25369  			break
 25370  		}
 25371  		d := auxIntToInt64(v_1.AuxInt)
 25372  		if !(!uaddOvf(c, d)) {
 25373  			break
 25374  		}
 25375  		v.reset(OpRsh64Ux64)
 25376  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25377  		v0.AuxInt = int64ToAuxInt(c + d)
 25378  		v.AddArg2(x, v0)
 25379  		return true
 25380  	}
 25381  	// match: (Rsh64Ux64 (Rsh64x64 x _) (Const64 <t> [63]))
 25382  	// result: (Rsh64Ux64 x (Const64 <t> [63]))
 25383  	for {
 25384  		if v_0.Op != OpRsh64x64 {
 25385  			break
 25386  		}
 25387  		x := v_0.Args[0]
 25388  		if v_1.Op != OpConst64 {
 25389  			break
 25390  		}
 25391  		t := v_1.Type
 25392  		if auxIntToInt64(v_1.AuxInt) != 63 {
 25393  			break
 25394  		}
 25395  		v.reset(OpRsh64Ux64)
 25396  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25397  		v0.AuxInt = int64ToAuxInt(63)
 25398  		v.AddArg2(x, v0)
 25399  		return true
 25400  	}
 25401  	// match: (Rsh64Ux64 i:(Lsh64x64 x (Const64 [c])) (Const64 [c]))
 25402  	// cond: c >= 0 && c < 64 && i.Uses == 1
 25403  	// result: (And64 x (Const64 <v.Type> [int64(^uint64(0)>>c)]))
 25404  	for {
 25405  		i := v_0
 25406  		if i.Op != OpLsh64x64 {
 25407  			break
 25408  		}
 25409  		_ = i.Args[1]
 25410  		x := i.Args[0]
 25411  		i_1 := i.Args[1]
 25412  		if i_1.Op != OpConst64 {
 25413  			break
 25414  		}
 25415  		c := auxIntToInt64(i_1.AuxInt)
 25416  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
 25417  			break
 25418  		}
 25419  		v.reset(OpAnd64)
 25420  		v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
 25421  		v0.AuxInt = int64ToAuxInt(int64(^uint64(0) >> c))
 25422  		v.AddArg2(x, v0)
 25423  		return true
 25424  	}
 25425  	// match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 25426  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 25427  	// result: (Rsh64Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 25428  	for {
 25429  		if v_0.Op != OpLsh64x64 {
 25430  			break
 25431  		}
 25432  		_ = v_0.Args[1]
 25433  		v_0_0 := v_0.Args[0]
 25434  		if v_0_0.Op != OpRsh64Ux64 {
 25435  			break
 25436  		}
 25437  		_ = v_0_0.Args[1]
 25438  		x := v_0_0.Args[0]
 25439  		v_0_0_1 := v_0_0.Args[1]
 25440  		if v_0_0_1.Op != OpConst64 {
 25441  			break
 25442  		}
 25443  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 25444  		v_0_1 := v_0.Args[1]
 25445  		if v_0_1.Op != OpConst64 {
 25446  			break
 25447  		}
 25448  		c2 := auxIntToInt64(v_0_1.AuxInt)
 25449  		if v_1.Op != OpConst64 {
 25450  			break
 25451  		}
 25452  		c3 := auxIntToInt64(v_1.AuxInt)
 25453  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 25454  			break
 25455  		}
 25456  		v.reset(OpRsh64Ux64)
 25457  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 25458  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 25459  		v.AddArg2(x, v0)
 25460  		return true
 25461  	}
 25462  	// match: (Rsh64Ux64 (Lsh64x64 x (Const64 [56])) (Const64 [56]))
 25463  	// result: (ZeroExt8to64 (Trunc64to8 <typ.UInt8> x))
 25464  	for {
 25465  		if v_0.Op != OpLsh64x64 {
 25466  			break
 25467  		}
 25468  		_ = v_0.Args[1]
 25469  		x := v_0.Args[0]
 25470  		v_0_1 := v_0.Args[1]
 25471  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 56 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 56 {
 25472  			break
 25473  		}
 25474  		v.reset(OpZeroExt8to64)
 25475  		v0 := b.NewValue0(v.Pos, OpTrunc64to8, typ.UInt8)
 25476  		v0.AddArg(x)
 25477  		v.AddArg(v0)
 25478  		return true
 25479  	}
 25480  	// match: (Rsh64Ux64 (Lsh64x64 x (Const64 [48])) (Const64 [48]))
 25481  	// result: (ZeroExt16to64 (Trunc64to16 <typ.UInt16> x))
 25482  	for {
 25483  		if v_0.Op != OpLsh64x64 {
 25484  			break
 25485  		}
 25486  		_ = v_0.Args[1]
 25487  		x := v_0.Args[0]
 25488  		v_0_1 := v_0.Args[1]
 25489  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 48 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 48 {
 25490  			break
 25491  		}
 25492  		v.reset(OpZeroExt16to64)
 25493  		v0 := b.NewValue0(v.Pos, OpTrunc64to16, typ.UInt16)
 25494  		v0.AddArg(x)
 25495  		v.AddArg(v0)
 25496  		return true
 25497  	}
 25498  	// match: (Rsh64Ux64 (Lsh64x64 x (Const64 [32])) (Const64 [32]))
 25499  	// result: (ZeroExt32to64 (Trunc64to32 <typ.UInt32> x))
 25500  	for {
 25501  		if v_0.Op != OpLsh64x64 {
 25502  			break
 25503  		}
 25504  		_ = v_0.Args[1]
 25505  		x := v_0.Args[0]
 25506  		v_0_1 := v_0.Args[1]
 25507  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 32 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 32 {
 25508  			break
 25509  		}
 25510  		v.reset(OpZeroExt32to64)
 25511  		v0 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
 25512  		v0.AddArg(x)
 25513  		v.AddArg(v0)
 25514  		return true
 25515  	}
 25516  	return false
 25517  }
 25518  func rewriteValuegeneric_OpRsh64Ux8(v *Value) bool {
 25519  	v_1 := v.Args[1]
 25520  	v_0 := v.Args[0]
 25521  	b := v.Block
 25522  	// match: (Rsh64Ux8 <t> x (Const8 [c]))
 25523  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))]))
 25524  	for {
 25525  		t := v.Type
 25526  		x := v_0
 25527  		if v_1.Op != OpConst8 {
 25528  			break
 25529  		}
 25530  		c := auxIntToInt8(v_1.AuxInt)
 25531  		v.reset(OpRsh64Ux64)
 25532  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25533  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 25534  		v.AddArg2(x, v0)
 25535  		return true
 25536  	}
 25537  	// match: (Rsh64Ux8 (Const64 [0]) _)
 25538  	// result: (Const64 [0])
 25539  	for {
 25540  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 25541  			break
 25542  		}
 25543  		v.reset(OpConst64)
 25544  		v.AuxInt = int64ToAuxInt(0)
 25545  		return true
 25546  	}
 25547  	return false
 25548  }
 25549  func rewriteValuegeneric_OpRsh64x16(v *Value) bool {
 25550  	v_1 := v.Args[1]
 25551  	v_0 := v.Args[0]
 25552  	b := v.Block
 25553  	// match: (Rsh64x16 <t> x (Const16 [c]))
 25554  	// result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))]))
 25555  	for {
 25556  		t := v.Type
 25557  		x := v_0
 25558  		if v_1.Op != OpConst16 {
 25559  			break
 25560  		}
 25561  		c := auxIntToInt16(v_1.AuxInt)
 25562  		v.reset(OpRsh64x64)
 25563  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25564  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 25565  		v.AddArg2(x, v0)
 25566  		return true
 25567  	}
 25568  	// match: (Rsh64x16 (Const64 [0]) _)
 25569  	// result: (Const64 [0])
 25570  	for {
 25571  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 25572  			break
 25573  		}
 25574  		v.reset(OpConst64)
 25575  		v.AuxInt = int64ToAuxInt(0)
 25576  		return true
 25577  	}
 25578  	return false
 25579  }
 25580  func rewriteValuegeneric_OpRsh64x32(v *Value) bool {
 25581  	v_1 := v.Args[1]
 25582  	v_0 := v.Args[0]
 25583  	b := v.Block
 25584  	// match: (Rsh64x32 <t> x (Const32 [c]))
 25585  	// result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))]))
 25586  	for {
 25587  		t := v.Type
 25588  		x := v_0
 25589  		if v_1.Op != OpConst32 {
 25590  			break
 25591  		}
 25592  		c := auxIntToInt32(v_1.AuxInt)
 25593  		v.reset(OpRsh64x64)
 25594  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25595  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 25596  		v.AddArg2(x, v0)
 25597  		return true
 25598  	}
 25599  	// match: (Rsh64x32 (Const64 [0]) _)
 25600  	// result: (Const64 [0])
 25601  	for {
 25602  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 25603  			break
 25604  		}
 25605  		v.reset(OpConst64)
 25606  		v.AuxInt = int64ToAuxInt(0)
 25607  		return true
 25608  	}
 25609  	return false
 25610  }
 25611  func rewriteValuegeneric_OpRsh64x64(v *Value) bool {
 25612  	v_1 := v.Args[1]
 25613  	v_0 := v.Args[0]
 25614  	b := v.Block
 25615  	typ := &b.Func.Config.Types
 25616  	// match: (Rsh64x64 (Const64 [c]) (Const64 [d]))
 25617  	// result: (Const64 [c >> uint64(d)])
 25618  	for {
 25619  		if v_0.Op != OpConst64 {
 25620  			break
 25621  		}
 25622  		c := auxIntToInt64(v_0.AuxInt)
 25623  		if v_1.Op != OpConst64 {
 25624  			break
 25625  		}
 25626  		d := auxIntToInt64(v_1.AuxInt)
 25627  		v.reset(OpConst64)
 25628  		v.AuxInt = int64ToAuxInt(c >> uint64(d))
 25629  		return true
 25630  	}
 25631  	// match: (Rsh64x64 x (Const64 [0]))
 25632  	// result: x
 25633  	for {
 25634  		x := v_0
 25635  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 25636  			break
 25637  		}
 25638  		v.copyOf(x)
 25639  		return true
 25640  	}
 25641  	// match: (Rsh64x64 (Const64 [0]) _)
 25642  	// result: (Const64 [0])
 25643  	for {
 25644  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 25645  			break
 25646  		}
 25647  		v.reset(OpConst64)
 25648  		v.AuxInt = int64ToAuxInt(0)
 25649  		return true
 25650  	}
 25651  	// match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d]))
 25652  	// cond: !uaddOvf(c,d)
 25653  	// result: (Rsh64x64 x (Const64 <t> [c+d]))
 25654  	for {
 25655  		t := v.Type
 25656  		if v_0.Op != OpRsh64x64 {
 25657  			break
 25658  		}
 25659  		_ = v_0.Args[1]
 25660  		x := v_0.Args[0]
 25661  		v_0_1 := v_0.Args[1]
 25662  		if v_0_1.Op != OpConst64 {
 25663  			break
 25664  		}
 25665  		c := auxIntToInt64(v_0_1.AuxInt)
 25666  		if v_1.Op != OpConst64 {
 25667  			break
 25668  		}
 25669  		d := auxIntToInt64(v_1.AuxInt)
 25670  		if !(!uaddOvf(c, d)) {
 25671  			break
 25672  		}
 25673  		v.reset(OpRsh64x64)
 25674  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25675  		v0.AuxInt = int64ToAuxInt(c + d)
 25676  		v.AddArg2(x, v0)
 25677  		return true
 25678  	}
 25679  	// match: (Rsh64x64 (Lsh64x64 x (Const64 [56])) (Const64 [56]))
 25680  	// result: (SignExt8to64 (Trunc64to8 <typ.Int8> x))
 25681  	for {
 25682  		if v_0.Op != OpLsh64x64 {
 25683  			break
 25684  		}
 25685  		_ = v_0.Args[1]
 25686  		x := v_0.Args[0]
 25687  		v_0_1 := v_0.Args[1]
 25688  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 56 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 56 {
 25689  			break
 25690  		}
 25691  		v.reset(OpSignExt8to64)
 25692  		v0 := b.NewValue0(v.Pos, OpTrunc64to8, typ.Int8)
 25693  		v0.AddArg(x)
 25694  		v.AddArg(v0)
 25695  		return true
 25696  	}
 25697  	// match: (Rsh64x64 (Lsh64x64 x (Const64 [48])) (Const64 [48]))
 25698  	// result: (SignExt16to64 (Trunc64to16 <typ.Int16> x))
 25699  	for {
 25700  		if v_0.Op != OpLsh64x64 {
 25701  			break
 25702  		}
 25703  		_ = v_0.Args[1]
 25704  		x := v_0.Args[0]
 25705  		v_0_1 := v_0.Args[1]
 25706  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 48 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 48 {
 25707  			break
 25708  		}
 25709  		v.reset(OpSignExt16to64)
 25710  		v0 := b.NewValue0(v.Pos, OpTrunc64to16, typ.Int16)
 25711  		v0.AddArg(x)
 25712  		v.AddArg(v0)
 25713  		return true
 25714  	}
 25715  	// match: (Rsh64x64 (Lsh64x64 x (Const64 [32])) (Const64 [32]))
 25716  	// result: (SignExt32to64 (Trunc64to32 <typ.Int32> x))
 25717  	for {
 25718  		if v_0.Op != OpLsh64x64 {
 25719  			break
 25720  		}
 25721  		_ = v_0.Args[1]
 25722  		x := v_0.Args[0]
 25723  		v_0_1 := v_0.Args[1]
 25724  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 32 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 32 {
 25725  			break
 25726  		}
 25727  		v.reset(OpSignExt32to64)
 25728  		v0 := b.NewValue0(v.Pos, OpTrunc64to32, typ.Int32)
 25729  		v0.AddArg(x)
 25730  		v.AddArg(v0)
 25731  		return true
 25732  	}
 25733  	return false
 25734  }
 25735  func rewriteValuegeneric_OpRsh64x8(v *Value) bool {
 25736  	v_1 := v.Args[1]
 25737  	v_0 := v.Args[0]
 25738  	b := v.Block
 25739  	// match: (Rsh64x8 <t> x (Const8 [c]))
 25740  	// result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))]))
 25741  	for {
 25742  		t := v.Type
 25743  		x := v_0
 25744  		if v_1.Op != OpConst8 {
 25745  			break
 25746  		}
 25747  		c := auxIntToInt8(v_1.AuxInt)
 25748  		v.reset(OpRsh64x64)
 25749  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25750  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 25751  		v.AddArg2(x, v0)
 25752  		return true
 25753  	}
 25754  	// match: (Rsh64x8 (Const64 [0]) _)
 25755  	// result: (Const64 [0])
 25756  	for {
 25757  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 25758  			break
 25759  		}
 25760  		v.reset(OpConst64)
 25761  		v.AuxInt = int64ToAuxInt(0)
 25762  		return true
 25763  	}
 25764  	return false
 25765  }
 25766  func rewriteValuegeneric_OpRsh8Ux16(v *Value) bool {
 25767  	v_1 := v.Args[1]
 25768  	v_0 := v.Args[0]
 25769  	b := v.Block
 25770  	// match: (Rsh8Ux16 <t> x (Const16 [c]))
 25771  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))]))
 25772  	for {
 25773  		t := v.Type
 25774  		x := v_0
 25775  		if v_1.Op != OpConst16 {
 25776  			break
 25777  		}
 25778  		c := auxIntToInt16(v_1.AuxInt)
 25779  		v.reset(OpRsh8Ux64)
 25780  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25781  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 25782  		v.AddArg2(x, v0)
 25783  		return true
 25784  	}
 25785  	// match: (Rsh8Ux16 (Const8 [0]) _)
 25786  	// result: (Const8 [0])
 25787  	for {
 25788  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 25789  			break
 25790  		}
 25791  		v.reset(OpConst8)
 25792  		v.AuxInt = int8ToAuxInt(0)
 25793  		return true
 25794  	}
 25795  	return false
 25796  }
 25797  func rewriteValuegeneric_OpRsh8Ux32(v *Value) bool {
 25798  	v_1 := v.Args[1]
 25799  	v_0 := v.Args[0]
 25800  	b := v.Block
 25801  	// match: (Rsh8Ux32 <t> x (Const32 [c]))
 25802  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))]))
 25803  	for {
 25804  		t := v.Type
 25805  		x := v_0
 25806  		if v_1.Op != OpConst32 {
 25807  			break
 25808  		}
 25809  		c := auxIntToInt32(v_1.AuxInt)
 25810  		v.reset(OpRsh8Ux64)
 25811  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25812  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 25813  		v.AddArg2(x, v0)
 25814  		return true
 25815  	}
 25816  	// match: (Rsh8Ux32 (Const8 [0]) _)
 25817  	// result: (Const8 [0])
 25818  	for {
 25819  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 25820  			break
 25821  		}
 25822  		v.reset(OpConst8)
 25823  		v.AuxInt = int8ToAuxInt(0)
 25824  		return true
 25825  	}
 25826  	return false
 25827  }
 25828  func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool {
 25829  	v_1 := v.Args[1]
 25830  	v_0 := v.Args[0]
 25831  	b := v.Block
 25832  	typ := &b.Func.Config.Types
 25833  	// match: (Rsh8Ux64 (Const8 [c]) (Const64 [d]))
 25834  	// result: (Const8 [int8(uint8(c) >> uint64(d))])
 25835  	for {
 25836  		if v_0.Op != OpConst8 {
 25837  			break
 25838  		}
 25839  		c := auxIntToInt8(v_0.AuxInt)
 25840  		if v_1.Op != OpConst64 {
 25841  			break
 25842  		}
 25843  		d := auxIntToInt64(v_1.AuxInt)
 25844  		v.reset(OpConst8)
 25845  		v.AuxInt = int8ToAuxInt(int8(uint8(c) >> uint64(d)))
 25846  		return true
 25847  	}
 25848  	// match: (Rsh8Ux64 x (Const64 [0]))
 25849  	// result: x
 25850  	for {
 25851  		x := v_0
 25852  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 25853  			break
 25854  		}
 25855  		v.copyOf(x)
 25856  		return true
 25857  	}
 25858  	// match: (Rsh8Ux64 (Const8 [0]) _)
 25859  	// result: (Const8 [0])
 25860  	for {
 25861  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 25862  			break
 25863  		}
 25864  		v.reset(OpConst8)
 25865  		v.AuxInt = int8ToAuxInt(0)
 25866  		return true
 25867  	}
 25868  	// match: (Rsh8Ux64 _ (Const64 [c]))
 25869  	// cond: uint64(c) >= 8
 25870  	// result: (Const8 [0])
 25871  	for {
 25872  		if v_1.Op != OpConst64 {
 25873  			break
 25874  		}
 25875  		c := auxIntToInt64(v_1.AuxInt)
 25876  		if !(uint64(c) >= 8) {
 25877  			break
 25878  		}
 25879  		v.reset(OpConst8)
 25880  		v.AuxInt = int8ToAuxInt(0)
 25881  		return true
 25882  	}
 25883  	// match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d]))
 25884  	// cond: !uaddOvf(c,d)
 25885  	// result: (Rsh8Ux64 x (Const64 <t> [c+d]))
 25886  	for {
 25887  		t := v.Type
 25888  		if v_0.Op != OpRsh8Ux64 {
 25889  			break
 25890  		}
 25891  		_ = v_0.Args[1]
 25892  		x := v_0.Args[0]
 25893  		v_0_1 := v_0.Args[1]
 25894  		if v_0_1.Op != OpConst64 {
 25895  			break
 25896  		}
 25897  		c := auxIntToInt64(v_0_1.AuxInt)
 25898  		if v_1.Op != OpConst64 {
 25899  			break
 25900  		}
 25901  		d := auxIntToInt64(v_1.AuxInt)
 25902  		if !(!uaddOvf(c, d)) {
 25903  			break
 25904  		}
 25905  		v.reset(OpRsh8Ux64)
 25906  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25907  		v0.AuxInt = int64ToAuxInt(c + d)
 25908  		v.AddArg2(x, v0)
 25909  		return true
 25910  	}
 25911  	// match: (Rsh8Ux64 (Rsh8x64 x _) (Const64 <t> [7] ))
 25912  	// result: (Rsh8Ux64 x (Const64 <t> [7] ))
 25913  	for {
 25914  		if v_0.Op != OpRsh8x64 {
 25915  			break
 25916  		}
 25917  		x := v_0.Args[0]
 25918  		if v_1.Op != OpConst64 {
 25919  			break
 25920  		}
 25921  		t := v_1.Type
 25922  		if auxIntToInt64(v_1.AuxInt) != 7 {
 25923  			break
 25924  		}
 25925  		v.reset(OpRsh8Ux64)
 25926  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 25927  		v0.AuxInt = int64ToAuxInt(7)
 25928  		v.AddArg2(x, v0)
 25929  		return true
 25930  	}
 25931  	// match: (Rsh8Ux64 i:(Lsh8x64 x (Const64 [c])) (Const64 [c]))
 25932  	// cond: c >= 0 && c < 8 && i.Uses == 1
 25933  	// result: (And8 x (Const8 <v.Type> [int8 (^uint8 (0)>>c)]))
 25934  	for {
 25935  		i := v_0
 25936  		if i.Op != OpLsh8x64 {
 25937  			break
 25938  		}
 25939  		_ = i.Args[1]
 25940  		x := i.Args[0]
 25941  		i_1 := i.Args[1]
 25942  		if i_1.Op != OpConst64 {
 25943  			break
 25944  		}
 25945  		c := auxIntToInt64(i_1.AuxInt)
 25946  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
 25947  			break
 25948  		}
 25949  		v.reset(OpAnd8)
 25950  		v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
 25951  		v0.AuxInt = int8ToAuxInt(int8(^uint8(0) >> c))
 25952  		v.AddArg2(x, v0)
 25953  		return true
 25954  	}
 25955  	// match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 25956  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 25957  	// result: (Rsh8Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 25958  	for {
 25959  		if v_0.Op != OpLsh8x64 {
 25960  			break
 25961  		}
 25962  		_ = v_0.Args[1]
 25963  		v_0_0 := v_0.Args[0]
 25964  		if v_0_0.Op != OpRsh8Ux64 {
 25965  			break
 25966  		}
 25967  		_ = v_0_0.Args[1]
 25968  		x := v_0_0.Args[0]
 25969  		v_0_0_1 := v_0_0.Args[1]
 25970  		if v_0_0_1.Op != OpConst64 {
 25971  			break
 25972  		}
 25973  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 25974  		v_0_1 := v_0.Args[1]
 25975  		if v_0_1.Op != OpConst64 {
 25976  			break
 25977  		}
 25978  		c2 := auxIntToInt64(v_0_1.AuxInt)
 25979  		if v_1.Op != OpConst64 {
 25980  			break
 25981  		}
 25982  		c3 := auxIntToInt64(v_1.AuxInt)
 25983  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 25984  			break
 25985  		}
 25986  		v.reset(OpRsh8Ux64)
 25987  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 25988  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 25989  		v.AddArg2(x, v0)
 25990  		return true
 25991  	}
 25992  	return false
 25993  }
 25994  func rewriteValuegeneric_OpRsh8Ux8(v *Value) bool {
 25995  	v_1 := v.Args[1]
 25996  	v_0 := v.Args[0]
 25997  	b := v.Block
 25998  	// match: (Rsh8Ux8 <t> x (Const8 [c]))
 25999  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))]))
 26000  	for {
 26001  		t := v.Type
 26002  		x := v_0
 26003  		if v_1.Op != OpConst8 {
 26004  			break
 26005  		}
 26006  		c := auxIntToInt8(v_1.AuxInt)
 26007  		v.reset(OpRsh8Ux64)
 26008  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 26009  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 26010  		v.AddArg2(x, v0)
 26011  		return true
 26012  	}
 26013  	// match: (Rsh8Ux8 (Const8 [0]) _)
 26014  	// result: (Const8 [0])
 26015  	for {
 26016  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 26017  			break
 26018  		}
 26019  		v.reset(OpConst8)
 26020  		v.AuxInt = int8ToAuxInt(0)
 26021  		return true
 26022  	}
 26023  	return false
 26024  }
 26025  func rewriteValuegeneric_OpRsh8x16(v *Value) bool {
 26026  	v_1 := v.Args[1]
 26027  	v_0 := v.Args[0]
 26028  	b := v.Block
 26029  	// match: (Rsh8x16 <t> x (Const16 [c]))
 26030  	// result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))]))
 26031  	for {
 26032  		t := v.Type
 26033  		x := v_0
 26034  		if v_1.Op != OpConst16 {
 26035  			break
 26036  		}
 26037  		c := auxIntToInt16(v_1.AuxInt)
 26038  		v.reset(OpRsh8x64)
 26039  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 26040  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 26041  		v.AddArg2(x, v0)
 26042  		return true
 26043  	}
 26044  	// match: (Rsh8x16 (Const8 [0]) _)
 26045  	// result: (Const8 [0])
 26046  	for {
 26047  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 26048  			break
 26049  		}
 26050  		v.reset(OpConst8)
 26051  		v.AuxInt = int8ToAuxInt(0)
 26052  		return true
 26053  	}
 26054  	return false
 26055  }
 26056  func rewriteValuegeneric_OpRsh8x32(v *Value) bool {
 26057  	v_1 := v.Args[1]
 26058  	v_0 := v.Args[0]
 26059  	b := v.Block
 26060  	// match: (Rsh8x32 <t> x (Const32 [c]))
 26061  	// result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))]))
 26062  	for {
 26063  		t := v.Type
 26064  		x := v_0
 26065  		if v_1.Op != OpConst32 {
 26066  			break
 26067  		}
 26068  		c := auxIntToInt32(v_1.AuxInt)
 26069  		v.reset(OpRsh8x64)
 26070  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 26071  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 26072  		v.AddArg2(x, v0)
 26073  		return true
 26074  	}
 26075  	// match: (Rsh8x32 (Const8 [0]) _)
 26076  	// result: (Const8 [0])
 26077  	for {
 26078  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 26079  			break
 26080  		}
 26081  		v.reset(OpConst8)
 26082  		v.AuxInt = int8ToAuxInt(0)
 26083  		return true
 26084  	}
 26085  	return false
 26086  }
 26087  func rewriteValuegeneric_OpRsh8x64(v *Value) bool {
 26088  	v_1 := v.Args[1]
 26089  	v_0 := v.Args[0]
 26090  	b := v.Block
 26091  	// match: (Rsh8x64 (Const8 [c]) (Const64 [d]))
 26092  	// result: (Const8 [c >> uint64(d)])
 26093  	for {
 26094  		if v_0.Op != OpConst8 {
 26095  			break
 26096  		}
 26097  		c := auxIntToInt8(v_0.AuxInt)
 26098  		if v_1.Op != OpConst64 {
 26099  			break
 26100  		}
 26101  		d := auxIntToInt64(v_1.AuxInt)
 26102  		v.reset(OpConst8)
 26103  		v.AuxInt = int8ToAuxInt(c >> uint64(d))
 26104  		return true
 26105  	}
 26106  	// match: (Rsh8x64 x (Const64 [0]))
 26107  	// result: x
 26108  	for {
 26109  		x := v_0
 26110  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 26111  			break
 26112  		}
 26113  		v.copyOf(x)
 26114  		return true
 26115  	}
 26116  	// match: (Rsh8x64 (Const8 [0]) _)
 26117  	// result: (Const8 [0])
 26118  	for {
 26119  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 26120  			break
 26121  		}
 26122  		v.reset(OpConst8)
 26123  		v.AuxInt = int8ToAuxInt(0)
 26124  		return true
 26125  	}
 26126  	// match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d]))
 26127  	// cond: !uaddOvf(c,d)
 26128  	// result: (Rsh8x64 x (Const64 <t> [c+d]))
 26129  	for {
 26130  		t := v.Type
 26131  		if v_0.Op != OpRsh8x64 {
 26132  			break
 26133  		}
 26134  		_ = v_0.Args[1]
 26135  		x := v_0.Args[0]
 26136  		v_0_1 := v_0.Args[1]
 26137  		if v_0_1.Op != OpConst64 {
 26138  			break
 26139  		}
 26140  		c := auxIntToInt64(v_0_1.AuxInt)
 26141  		if v_1.Op != OpConst64 {
 26142  			break
 26143  		}
 26144  		d := auxIntToInt64(v_1.AuxInt)
 26145  		if !(!uaddOvf(c, d)) {
 26146  			break
 26147  		}
 26148  		v.reset(OpRsh8x64)
 26149  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 26150  		v0.AuxInt = int64ToAuxInt(c + d)
 26151  		v.AddArg2(x, v0)
 26152  		return true
 26153  	}
 26154  	return false
 26155  }
 26156  func rewriteValuegeneric_OpRsh8x8(v *Value) bool {
 26157  	v_1 := v.Args[1]
 26158  	v_0 := v.Args[0]
 26159  	b := v.Block
 26160  	// match: (Rsh8x8 <t> x (Const8 [c]))
 26161  	// result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))]))
 26162  	for {
 26163  		t := v.Type
 26164  		x := v_0
 26165  		if v_1.Op != OpConst8 {
 26166  			break
 26167  		}
 26168  		c := auxIntToInt8(v_1.AuxInt)
 26169  		v.reset(OpRsh8x64)
 26170  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 26171  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 26172  		v.AddArg2(x, v0)
 26173  		return true
 26174  	}
 26175  	// match: (Rsh8x8 (Const8 [0]) _)
 26176  	// result: (Const8 [0])
 26177  	for {
 26178  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 26179  			break
 26180  		}
 26181  		v.reset(OpConst8)
 26182  		v.AuxInt = int8ToAuxInt(0)
 26183  		return true
 26184  	}
 26185  	return false
 26186  }
 26187  func rewriteValuegeneric_OpSelect0(v *Value) bool {
 26188  	v_0 := v.Args[0]
 26189  	// match: (Select0 (Div128u (Const64 [0]) lo y))
 26190  	// result: (Div64u lo y)
 26191  	for {
 26192  		if v_0.Op != OpDiv128u {
 26193  			break
 26194  		}
 26195  		y := v_0.Args[2]
 26196  		v_0_0 := v_0.Args[0]
 26197  		if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
 26198  			break
 26199  		}
 26200  		lo := v_0.Args[1]
 26201  		v.reset(OpDiv64u)
 26202  		v.AddArg2(lo, y)
 26203  		return true
 26204  	}
 26205  	// match: (Select0 (Mul32uover (Const32 [1]) x))
 26206  	// result: x
 26207  	for {
 26208  		if v_0.Op != OpMul32uover {
 26209  			break
 26210  		}
 26211  		_ = v_0.Args[1]
 26212  		v_0_0 := v_0.Args[0]
 26213  		v_0_1 := v_0.Args[1]
 26214  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 26215  			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 1 {
 26216  				continue
 26217  			}
 26218  			x := v_0_1
 26219  			v.copyOf(x)
 26220  			return true
 26221  		}
 26222  		break
 26223  	}
 26224  	// match: (Select0 (Mul64uover (Const64 [1]) x))
 26225  	// result: x
 26226  	for {
 26227  		if v_0.Op != OpMul64uover {
 26228  			break
 26229  		}
 26230  		_ = v_0.Args[1]
 26231  		v_0_0 := v_0.Args[0]
 26232  		v_0_1 := v_0.Args[1]
 26233  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 26234  			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 1 {
 26235  				continue
 26236  			}
 26237  			x := v_0_1
 26238  			v.copyOf(x)
 26239  			return true
 26240  		}
 26241  		break
 26242  	}
 26243  	// match: (Select0 (Mul64uover (Const64 [0]) x))
 26244  	// result: (Const64 [0])
 26245  	for {
 26246  		if v_0.Op != OpMul64uover {
 26247  			break
 26248  		}
 26249  		_ = v_0.Args[1]
 26250  		v_0_0 := v_0.Args[0]
 26251  		v_0_1 := v_0.Args[1]
 26252  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 26253  			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
 26254  				continue
 26255  			}
 26256  			v.reset(OpConst64)
 26257  			v.AuxInt = int64ToAuxInt(0)
 26258  			return true
 26259  		}
 26260  		break
 26261  	}
 26262  	// match: (Select0 (Mul32uover (Const32 [0]) x))
 26263  	// result: (Const32 [0])
 26264  	for {
 26265  		if v_0.Op != OpMul32uover {
 26266  			break
 26267  		}
 26268  		_ = v_0.Args[1]
 26269  		v_0_0 := v_0.Args[0]
 26270  		v_0_1 := v_0.Args[1]
 26271  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 26272  			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 0 {
 26273  				continue
 26274  			}
 26275  			v.reset(OpConst32)
 26276  			v.AuxInt = int32ToAuxInt(0)
 26277  			return true
 26278  		}
 26279  		break
 26280  	}
 26281  	return false
 26282  }
 26283  func rewriteValuegeneric_OpSelect1(v *Value) bool {
 26284  	v_0 := v.Args[0]
 26285  	// match: (Select1 (Div128u (Const64 [0]) lo y))
 26286  	// result: (Mod64u lo y)
 26287  	for {
 26288  		if v_0.Op != OpDiv128u {
 26289  			break
 26290  		}
 26291  		y := v_0.Args[2]
 26292  		v_0_0 := v_0.Args[0]
 26293  		if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
 26294  			break
 26295  		}
 26296  		lo := v_0.Args[1]
 26297  		v.reset(OpMod64u)
 26298  		v.AddArg2(lo, y)
 26299  		return true
 26300  	}
 26301  	// match: (Select1 (Mul32uover (Const32 [1]) x))
 26302  	// result: (ConstBool [false])
 26303  	for {
 26304  		if v_0.Op != OpMul32uover {
 26305  			break
 26306  		}
 26307  		_ = v_0.Args[1]
 26308  		v_0_0 := v_0.Args[0]
 26309  		v_0_1 := v_0.Args[1]
 26310  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 26311  			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 1 {
 26312  				continue
 26313  			}
 26314  			v.reset(OpConstBool)
 26315  			v.AuxInt = boolToAuxInt(false)
 26316  			return true
 26317  		}
 26318  		break
 26319  	}
 26320  	// match: (Select1 (Mul64uover (Const64 [1]) x))
 26321  	// result: (ConstBool [false])
 26322  	for {
 26323  		if v_0.Op != OpMul64uover {
 26324  			break
 26325  		}
 26326  		_ = v_0.Args[1]
 26327  		v_0_0 := v_0.Args[0]
 26328  		v_0_1 := v_0.Args[1]
 26329  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 26330  			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 1 {
 26331  				continue
 26332  			}
 26333  			v.reset(OpConstBool)
 26334  			v.AuxInt = boolToAuxInt(false)
 26335  			return true
 26336  		}
 26337  		break
 26338  	}
 26339  	// match: (Select1 (Mul64uover (Const64 [0]) x))
 26340  	// result: (ConstBool [false])
 26341  	for {
 26342  		if v_0.Op != OpMul64uover {
 26343  			break
 26344  		}
 26345  		_ = v_0.Args[1]
 26346  		v_0_0 := v_0.Args[0]
 26347  		v_0_1 := v_0.Args[1]
 26348  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 26349  			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
 26350  				continue
 26351  			}
 26352  			v.reset(OpConstBool)
 26353  			v.AuxInt = boolToAuxInt(false)
 26354  			return true
 26355  		}
 26356  		break
 26357  	}
 26358  	// match: (Select1 (Mul32uover (Const32 [0]) x))
 26359  	// result: (ConstBool [false])
 26360  	for {
 26361  		if v_0.Op != OpMul32uover {
 26362  			break
 26363  		}
 26364  		_ = v_0.Args[1]
 26365  		v_0_0 := v_0.Args[0]
 26366  		v_0_1 := v_0.Args[1]
 26367  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 26368  			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 0 {
 26369  				continue
 26370  			}
 26371  			v.reset(OpConstBool)
 26372  			v.AuxInt = boolToAuxInt(false)
 26373  			return true
 26374  		}
 26375  		break
 26376  	}
 26377  	return false
 26378  }
 26379  func rewriteValuegeneric_OpSelectN(v *Value) bool {
 26380  	v_0 := v.Args[0]
 26381  	b := v.Block
 26382  	config := b.Func.Config
 26383  	// match: (SelectN [0] (MakeResult x ___))
 26384  	// result: x
 26385  	for {
 26386  		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpMakeResult || len(v_0.Args) < 1 {
 26387  			break
 26388  		}
 26389  		x := v_0.Args[0]
 26390  		v.copyOf(x)
 26391  		return true
 26392  	}
 26393  	// match: (SelectN [1] (MakeResult x y ___))
 26394  	// result: y
 26395  	for {
 26396  		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpMakeResult || len(v_0.Args) < 2 {
 26397  			break
 26398  		}
 26399  		y := v_0.Args[1]
 26400  		v.copyOf(y)
 26401  		return true
 26402  	}
 26403  	// match: (SelectN [2] (MakeResult x y z ___))
 26404  	// result: z
 26405  	for {
 26406  		if auxIntToInt64(v.AuxInt) != 2 || v_0.Op != OpMakeResult || len(v_0.Args) < 3 {
 26407  			break
 26408  		}
 26409  		z := v_0.Args[2]
 26410  		v.copyOf(z)
 26411  		return true
 26412  	}
 26413  	// match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const64 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
 26414  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
 26415  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 26416  	for {
 26417  		if auxIntToInt64(v.AuxInt) != 0 {
 26418  			break
 26419  		}
 26420  		call := v_0
 26421  		if call.Op != OpStaticCall || len(call.Args) != 1 {
 26422  			break
 26423  		}
 26424  		sym := auxToCall(call.Aux)
 26425  		s1 := call.Args[0]
 26426  		if s1.Op != OpStore {
 26427  			break
 26428  		}
 26429  		_ = s1.Args[2]
 26430  		s1_1 := s1.Args[1]
 26431  		if s1_1.Op != OpConst64 {
 26432  			break
 26433  		}
 26434  		sz := auxIntToInt64(s1_1.AuxInt)
 26435  		s2 := s1.Args[2]
 26436  		if s2.Op != OpStore {
 26437  			break
 26438  		}
 26439  		_ = s2.Args[2]
 26440  		src := s2.Args[1]
 26441  		s3 := s2.Args[2]
 26442  		if s3.Op != OpStore {
 26443  			break
 26444  		}
 26445  		mem := s3.Args[2]
 26446  		dst := s3.Args[1]
 26447  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
 26448  			break
 26449  		}
 26450  		v.reset(OpMove)
 26451  		v.AuxInt = int64ToAuxInt(int64(sz))
 26452  		v.Aux = typeToAux(types.Types[types.TUINT8])
 26453  		v.AddArg3(dst, src, mem)
 26454  		return true
 26455  	}
 26456  	// match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const32 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
 26457  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
 26458  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 26459  	for {
 26460  		if auxIntToInt64(v.AuxInt) != 0 {
 26461  			break
 26462  		}
 26463  		call := v_0
 26464  		if call.Op != OpStaticCall || len(call.Args) != 1 {
 26465  			break
 26466  		}
 26467  		sym := auxToCall(call.Aux)
 26468  		s1 := call.Args[0]
 26469  		if s1.Op != OpStore {
 26470  			break
 26471  		}
 26472  		_ = s1.Args[2]
 26473  		s1_1 := s1.Args[1]
 26474  		if s1_1.Op != OpConst32 {
 26475  			break
 26476  		}
 26477  		sz := auxIntToInt32(s1_1.AuxInt)
 26478  		s2 := s1.Args[2]
 26479  		if s2.Op != OpStore {
 26480  			break
 26481  		}
 26482  		_ = s2.Args[2]
 26483  		src := s2.Args[1]
 26484  		s3 := s2.Args[2]
 26485  		if s3.Op != OpStore {
 26486  			break
 26487  		}
 26488  		mem := s3.Args[2]
 26489  		dst := s3.Args[1]
 26490  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
 26491  			break
 26492  		}
 26493  		v.reset(OpMove)
 26494  		v.AuxInt = int64ToAuxInt(int64(sz))
 26495  		v.Aux = typeToAux(types.Types[types.TUINT8])
 26496  		v.AddArg3(dst, src, mem)
 26497  		return true
 26498  	}
 26499  	// match: (SelectN [0] call:(StaticCall {sym} dst src (Const64 [sz]) mem))
 26500  	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
 26501  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 26502  	for {
 26503  		if auxIntToInt64(v.AuxInt) != 0 {
 26504  			break
 26505  		}
 26506  		call := v_0
 26507  		if call.Op != OpStaticCall || len(call.Args) != 4 {
 26508  			break
 26509  		}
 26510  		sym := auxToCall(call.Aux)
 26511  		mem := call.Args[3]
 26512  		dst := call.Args[0]
 26513  		src := call.Args[1]
 26514  		call_2 := call.Args[2]
 26515  		if call_2.Op != OpConst64 {
 26516  			break
 26517  		}
 26518  		sz := auxIntToInt64(call_2.AuxInt)
 26519  		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
 26520  			break
 26521  		}
 26522  		v.reset(OpMove)
 26523  		v.AuxInt = int64ToAuxInt(int64(sz))
 26524  		v.Aux = typeToAux(types.Types[types.TUINT8])
 26525  		v.AddArg3(dst, src, mem)
 26526  		return true
 26527  	}
 26528  	// match: (SelectN [0] call:(StaticCall {sym} dst src (Const32 [sz]) mem))
 26529  	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
 26530  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 26531  	for {
 26532  		if auxIntToInt64(v.AuxInt) != 0 {
 26533  			break
 26534  		}
 26535  		call := v_0
 26536  		if call.Op != OpStaticCall || len(call.Args) != 4 {
 26537  			break
 26538  		}
 26539  		sym := auxToCall(call.Aux)
 26540  		mem := call.Args[3]
 26541  		dst := call.Args[0]
 26542  		src := call.Args[1]
 26543  		call_2 := call.Args[2]
 26544  		if call_2.Op != OpConst32 {
 26545  			break
 26546  		}
 26547  		sz := auxIntToInt32(call_2.AuxInt)
 26548  		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
 26549  			break
 26550  		}
 26551  		v.reset(OpMove)
 26552  		v.AuxInt = int64ToAuxInt(int64(sz))
 26553  		v.Aux = typeToAux(types.Types[types.TUINT8])
 26554  		v.AddArg3(dst, src, mem)
 26555  		return true
 26556  	}
 26557  	// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const64 [sz]) mem))
 26558  	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
 26559  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 26560  	for {
 26561  		if auxIntToInt64(v.AuxInt) != 0 {
 26562  			break
 26563  		}
 26564  		call := v_0
 26565  		if call.Op != OpStaticLECall || len(call.Args) != 4 {
 26566  			break
 26567  		}
 26568  		sym := auxToCall(call.Aux)
 26569  		mem := call.Args[3]
 26570  		dst := call.Args[0]
 26571  		src := call.Args[1]
 26572  		call_2 := call.Args[2]
 26573  		if call_2.Op != OpConst64 {
 26574  			break
 26575  		}
 26576  		sz := auxIntToInt64(call_2.AuxInt)
 26577  		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
 26578  			break
 26579  		}
 26580  		v.reset(OpMove)
 26581  		v.AuxInt = int64ToAuxInt(int64(sz))
 26582  		v.Aux = typeToAux(types.Types[types.TUINT8])
 26583  		v.AddArg3(dst, src, mem)
 26584  		return true
 26585  	}
 26586  	// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const32 [sz]) mem))
 26587  	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
 26588  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 26589  	for {
 26590  		if auxIntToInt64(v.AuxInt) != 0 {
 26591  			break
 26592  		}
 26593  		call := v_0
 26594  		if call.Op != OpStaticLECall || len(call.Args) != 4 {
 26595  			break
 26596  		}
 26597  		sym := auxToCall(call.Aux)
 26598  		mem := call.Args[3]
 26599  		dst := call.Args[0]
 26600  		src := call.Args[1]
 26601  		call_2 := call.Args[2]
 26602  		if call_2.Op != OpConst32 {
 26603  			break
 26604  		}
 26605  		sz := auxIntToInt32(call_2.AuxInt)
 26606  		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
 26607  			break
 26608  		}
 26609  		v.reset(OpMove)
 26610  		v.AuxInt = int64ToAuxInt(int64(sz))
 26611  		v.Aux = typeToAux(types.Types[types.TUINT8])
 26612  		v.AddArg3(dst, src, mem)
 26613  		return true
 26614  	}
 26615  	// match: (SelectN [0] call:(StaticLECall {sym} a x))
 26616  	// cond: needRaceCleanup(sym, call) && clobber(call)
 26617  	// result: x
 26618  	for {
 26619  		if auxIntToInt64(v.AuxInt) != 0 {
 26620  			break
 26621  		}
 26622  		call := v_0
 26623  		if call.Op != OpStaticLECall || len(call.Args) != 2 {
 26624  			break
 26625  		}
 26626  		sym := auxToCall(call.Aux)
 26627  		x := call.Args[1]
 26628  		if !(needRaceCleanup(sym, call) && clobber(call)) {
 26629  			break
 26630  		}
 26631  		v.copyOf(x)
 26632  		return true
 26633  	}
 26634  	// match: (SelectN [0] call:(StaticLECall {sym} x))
 26635  	// cond: needRaceCleanup(sym, call) && clobber(call)
 26636  	// result: x
 26637  	for {
 26638  		if auxIntToInt64(v.AuxInt) != 0 {
 26639  			break
 26640  		}
 26641  		call := v_0
 26642  		if call.Op != OpStaticLECall || len(call.Args) != 1 {
 26643  			break
 26644  		}
 26645  		sym := auxToCall(call.Aux)
 26646  		x := call.Args[0]
 26647  		if !(needRaceCleanup(sym, call) && clobber(call)) {
 26648  			break
 26649  		}
 26650  		v.copyOf(x)
 26651  		return true
 26652  	}
 26653  	// match: (SelectN [1] (StaticCall {sym} _ newLen:(Const64) _ _ _ _))
 26654  	// cond: v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")
 26655  	// result: newLen
 26656  	for {
 26657  		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStaticCall || len(v_0.Args) != 6 {
 26658  			break
 26659  		}
 26660  		sym := auxToCall(v_0.Aux)
 26661  		_ = v_0.Args[1]
 26662  		newLen := v_0.Args[1]
 26663  		if newLen.Op != OpConst64 || !(v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")) {
 26664  			break
 26665  		}
 26666  		v.copyOf(newLen)
 26667  		return true
 26668  	}
 26669  	// match: (SelectN [1] (StaticCall {sym} _ newLen:(Const32) _ _ _ _))
 26670  	// cond: v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")
 26671  	// result: newLen
 26672  	for {
 26673  		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStaticCall || len(v_0.Args) != 6 {
 26674  			break
 26675  		}
 26676  		sym := auxToCall(v_0.Aux)
 26677  		_ = v_0.Args[1]
 26678  		newLen := v_0.Args[1]
 26679  		if newLen.Op != OpConst32 || !(v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")) {
 26680  			break
 26681  		}
 26682  		v.copyOf(newLen)
 26683  		return true
 26684  	}
 26685  	return false
 26686  }
 26687  func rewriteValuegeneric_OpSignExt16to32(v *Value) bool {
 26688  	v_0 := v.Args[0]
 26689  	// match: (SignExt16to32 (Const16 [c]))
 26690  	// result: (Const32 [int32(c)])
 26691  	for {
 26692  		if v_0.Op != OpConst16 {
 26693  			break
 26694  		}
 26695  		c := auxIntToInt16(v_0.AuxInt)
 26696  		v.reset(OpConst32)
 26697  		v.AuxInt = int32ToAuxInt(int32(c))
 26698  		return true
 26699  	}
 26700  	// match: (SignExt16to32 (Trunc32to16 x:(Rsh32x64 _ (Const64 [s]))))
 26701  	// cond: s >= 16
 26702  	// result: x
 26703  	for {
 26704  		if v_0.Op != OpTrunc32to16 {
 26705  			break
 26706  		}
 26707  		x := v_0.Args[0]
 26708  		if x.Op != OpRsh32x64 {
 26709  			break
 26710  		}
 26711  		_ = x.Args[1]
 26712  		x_1 := x.Args[1]
 26713  		if x_1.Op != OpConst64 {
 26714  			break
 26715  		}
 26716  		s := auxIntToInt64(x_1.AuxInt)
 26717  		if !(s >= 16) {
 26718  			break
 26719  		}
 26720  		v.copyOf(x)
 26721  		return true
 26722  	}
 26723  	return false
 26724  }
 26725  func rewriteValuegeneric_OpSignExt16to64(v *Value) bool {
 26726  	v_0 := v.Args[0]
 26727  	// match: (SignExt16to64 (Const16 [c]))
 26728  	// result: (Const64 [int64(c)])
 26729  	for {
 26730  		if v_0.Op != OpConst16 {
 26731  			break
 26732  		}
 26733  		c := auxIntToInt16(v_0.AuxInt)
 26734  		v.reset(OpConst64)
 26735  		v.AuxInt = int64ToAuxInt(int64(c))
 26736  		return true
 26737  	}
 26738  	// match: (SignExt16to64 (Trunc64to16 x:(Rsh64x64 _ (Const64 [s]))))
 26739  	// cond: s >= 48
 26740  	// result: x
 26741  	for {
 26742  		if v_0.Op != OpTrunc64to16 {
 26743  			break
 26744  		}
 26745  		x := v_0.Args[0]
 26746  		if x.Op != OpRsh64x64 {
 26747  			break
 26748  		}
 26749  		_ = x.Args[1]
 26750  		x_1 := x.Args[1]
 26751  		if x_1.Op != OpConst64 {
 26752  			break
 26753  		}
 26754  		s := auxIntToInt64(x_1.AuxInt)
 26755  		if !(s >= 48) {
 26756  			break
 26757  		}
 26758  		v.copyOf(x)
 26759  		return true
 26760  	}
 26761  	return false
 26762  }
 26763  func rewriteValuegeneric_OpSignExt32to64(v *Value) bool {
 26764  	v_0 := v.Args[0]
 26765  	// match: (SignExt32to64 (Const32 [c]))
 26766  	// result: (Const64 [int64(c)])
 26767  	for {
 26768  		if v_0.Op != OpConst32 {
 26769  			break
 26770  		}
 26771  		c := auxIntToInt32(v_0.AuxInt)
 26772  		v.reset(OpConst64)
 26773  		v.AuxInt = int64ToAuxInt(int64(c))
 26774  		return true
 26775  	}
 26776  	// match: (SignExt32to64 (Trunc64to32 x:(Rsh64x64 _ (Const64 [s]))))
 26777  	// cond: s >= 32
 26778  	// result: x
 26779  	for {
 26780  		if v_0.Op != OpTrunc64to32 {
 26781  			break
 26782  		}
 26783  		x := v_0.Args[0]
 26784  		if x.Op != OpRsh64x64 {
 26785  			break
 26786  		}
 26787  		_ = x.Args[1]
 26788  		x_1 := x.Args[1]
 26789  		if x_1.Op != OpConst64 {
 26790  			break
 26791  		}
 26792  		s := auxIntToInt64(x_1.AuxInt)
 26793  		if !(s >= 32) {
 26794  			break
 26795  		}
 26796  		v.copyOf(x)
 26797  		return true
 26798  	}
 26799  	return false
 26800  }
 26801  func rewriteValuegeneric_OpSignExt8to16(v *Value) bool {
 26802  	v_0 := v.Args[0]
 26803  	// match: (SignExt8to16 (Const8 [c]))
 26804  	// result: (Const16 [int16(c)])
 26805  	for {
 26806  		if v_0.Op != OpConst8 {
 26807  			break
 26808  		}
 26809  		c := auxIntToInt8(v_0.AuxInt)
 26810  		v.reset(OpConst16)
 26811  		v.AuxInt = int16ToAuxInt(int16(c))
 26812  		return true
 26813  	}
 26814  	// match: (SignExt8to16 (Trunc16to8 x:(Rsh16x64 _ (Const64 [s]))))
 26815  	// cond: s >= 8
 26816  	// result: x
 26817  	for {
 26818  		if v_0.Op != OpTrunc16to8 {
 26819  			break
 26820  		}
 26821  		x := v_0.Args[0]
 26822  		if x.Op != OpRsh16x64 {
 26823  			break
 26824  		}
 26825  		_ = x.Args[1]
 26826  		x_1 := x.Args[1]
 26827  		if x_1.Op != OpConst64 {
 26828  			break
 26829  		}
 26830  		s := auxIntToInt64(x_1.AuxInt)
 26831  		if !(s >= 8) {
 26832  			break
 26833  		}
 26834  		v.copyOf(x)
 26835  		return true
 26836  	}
 26837  	return false
 26838  }
 26839  func rewriteValuegeneric_OpSignExt8to32(v *Value) bool {
 26840  	v_0 := v.Args[0]
 26841  	// match: (SignExt8to32 (Const8 [c]))
 26842  	// result: (Const32 [int32(c)])
 26843  	for {
 26844  		if v_0.Op != OpConst8 {
 26845  			break
 26846  		}
 26847  		c := auxIntToInt8(v_0.AuxInt)
 26848  		v.reset(OpConst32)
 26849  		v.AuxInt = int32ToAuxInt(int32(c))
 26850  		return true
 26851  	}
 26852  	// match: (SignExt8to32 (Trunc32to8 x:(Rsh32x64 _ (Const64 [s]))))
 26853  	// cond: s >= 24
 26854  	// result: x
 26855  	for {
 26856  		if v_0.Op != OpTrunc32to8 {
 26857  			break
 26858  		}
 26859  		x := v_0.Args[0]
 26860  		if x.Op != OpRsh32x64 {
 26861  			break
 26862  		}
 26863  		_ = x.Args[1]
 26864  		x_1 := x.Args[1]
 26865  		if x_1.Op != OpConst64 {
 26866  			break
 26867  		}
 26868  		s := auxIntToInt64(x_1.AuxInt)
 26869  		if !(s >= 24) {
 26870  			break
 26871  		}
 26872  		v.copyOf(x)
 26873  		return true
 26874  	}
 26875  	return false
 26876  }
 26877  func rewriteValuegeneric_OpSignExt8to64(v *Value) bool {
 26878  	v_0 := v.Args[0]
 26879  	// match: (SignExt8to64 (Const8 [c]))
 26880  	// result: (Const64 [int64(c)])
 26881  	for {
 26882  		if v_0.Op != OpConst8 {
 26883  			break
 26884  		}
 26885  		c := auxIntToInt8(v_0.AuxInt)
 26886  		v.reset(OpConst64)
 26887  		v.AuxInt = int64ToAuxInt(int64(c))
 26888  		return true
 26889  	}
 26890  	// match: (SignExt8to64 (Trunc64to8 x:(Rsh64x64 _ (Const64 [s]))))
 26891  	// cond: s >= 56
 26892  	// result: x
 26893  	for {
 26894  		if v_0.Op != OpTrunc64to8 {
 26895  			break
 26896  		}
 26897  		x := v_0.Args[0]
 26898  		if x.Op != OpRsh64x64 {
 26899  			break
 26900  		}
 26901  		_ = x.Args[1]
 26902  		x_1 := x.Args[1]
 26903  		if x_1.Op != OpConst64 {
 26904  			break
 26905  		}
 26906  		s := auxIntToInt64(x_1.AuxInt)
 26907  		if !(s >= 56) {
 26908  			break
 26909  		}
 26910  		v.copyOf(x)
 26911  		return true
 26912  	}
 26913  	return false
 26914  }
 26915  func rewriteValuegeneric_OpSliceCap(v *Value) bool {
 26916  	v_0 := v.Args[0]
 26917  	// match: (SliceCap (SliceMake _ _ (Const64 <t> [c])))
 26918  	// result: (Const64 <t> [c])
 26919  	for {
 26920  		if v_0.Op != OpSliceMake {
 26921  			break
 26922  		}
 26923  		_ = v_0.Args[2]
 26924  		v_0_2 := v_0.Args[2]
 26925  		if v_0_2.Op != OpConst64 {
 26926  			break
 26927  		}
 26928  		t := v_0_2.Type
 26929  		c := auxIntToInt64(v_0_2.AuxInt)
 26930  		v.reset(OpConst64)
 26931  		v.Type = t
 26932  		v.AuxInt = int64ToAuxInt(c)
 26933  		return true
 26934  	}
 26935  	// match: (SliceCap (SliceMake _ _ (Const32 <t> [c])))
 26936  	// result: (Const32 <t> [c])
 26937  	for {
 26938  		if v_0.Op != OpSliceMake {
 26939  			break
 26940  		}
 26941  		_ = v_0.Args[2]
 26942  		v_0_2 := v_0.Args[2]
 26943  		if v_0_2.Op != OpConst32 {
 26944  			break
 26945  		}
 26946  		t := v_0_2.Type
 26947  		c := auxIntToInt32(v_0_2.AuxInt)
 26948  		v.reset(OpConst32)
 26949  		v.Type = t
 26950  		v.AuxInt = int32ToAuxInt(c)
 26951  		return true
 26952  	}
 26953  	// match: (SliceCap (SliceMake _ _ (SliceCap x)))
 26954  	// result: (SliceCap x)
 26955  	for {
 26956  		if v_0.Op != OpSliceMake {
 26957  			break
 26958  		}
 26959  		_ = v_0.Args[2]
 26960  		v_0_2 := v_0.Args[2]
 26961  		if v_0_2.Op != OpSliceCap {
 26962  			break
 26963  		}
 26964  		x := v_0_2.Args[0]
 26965  		v.reset(OpSliceCap)
 26966  		v.AddArg(x)
 26967  		return true
 26968  	}
 26969  	// match: (SliceCap (SliceMake _ _ (SliceLen x)))
 26970  	// result: (SliceLen x)
 26971  	for {
 26972  		if v_0.Op != OpSliceMake {
 26973  			break
 26974  		}
 26975  		_ = v_0.Args[2]
 26976  		v_0_2 := v_0.Args[2]
 26977  		if v_0_2.Op != OpSliceLen {
 26978  			break
 26979  		}
 26980  		x := v_0_2.Args[0]
 26981  		v.reset(OpSliceLen)
 26982  		v.AddArg(x)
 26983  		return true
 26984  	}
 26985  	return false
 26986  }
 26987  func rewriteValuegeneric_OpSliceLen(v *Value) bool {
 26988  	v_0 := v.Args[0]
 26989  	// match: (SliceLen (SliceMake _ (Const64 <t> [c]) _))
 26990  	// result: (Const64 <t> [c])
 26991  	for {
 26992  		if v_0.Op != OpSliceMake {
 26993  			break
 26994  		}
 26995  		_ = v_0.Args[1]
 26996  		v_0_1 := v_0.Args[1]
 26997  		if v_0_1.Op != OpConst64 {
 26998  			break
 26999  		}
 27000  		t := v_0_1.Type
 27001  		c := auxIntToInt64(v_0_1.AuxInt)
 27002  		v.reset(OpConst64)
 27003  		v.Type = t
 27004  		v.AuxInt = int64ToAuxInt(c)
 27005  		return true
 27006  	}
 27007  	// match: (SliceLen (SliceMake _ (Const32 <t> [c]) _))
 27008  	// result: (Const32 <t> [c])
 27009  	for {
 27010  		if v_0.Op != OpSliceMake {
 27011  			break
 27012  		}
 27013  		_ = v_0.Args[1]
 27014  		v_0_1 := v_0.Args[1]
 27015  		if v_0_1.Op != OpConst32 {
 27016  			break
 27017  		}
 27018  		t := v_0_1.Type
 27019  		c := auxIntToInt32(v_0_1.AuxInt)
 27020  		v.reset(OpConst32)
 27021  		v.Type = t
 27022  		v.AuxInt = int32ToAuxInt(c)
 27023  		return true
 27024  	}
 27025  	// match: (SliceLen (SliceMake _ (SliceLen x) _))
 27026  	// result: (SliceLen x)
 27027  	for {
 27028  		if v_0.Op != OpSliceMake {
 27029  			break
 27030  		}
 27031  		_ = v_0.Args[1]
 27032  		v_0_1 := v_0.Args[1]
 27033  		if v_0_1.Op != OpSliceLen {
 27034  			break
 27035  		}
 27036  		x := v_0_1.Args[0]
 27037  		v.reset(OpSliceLen)
 27038  		v.AddArg(x)
 27039  		return true
 27040  	}
 27041  	// match: (SliceLen (SelectN [0] (StaticLECall {sym} _ newLen:(Const64) _ _ _ _)))
 27042  	// cond: isSameCall(sym, "runtime.growslice")
 27043  	// result: newLen
 27044  	for {
 27045  		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
 27046  			break
 27047  		}
 27048  		v_0_0 := v_0.Args[0]
 27049  		if v_0_0.Op != OpStaticLECall || len(v_0_0.Args) != 6 {
 27050  			break
 27051  		}
 27052  		sym := auxToCall(v_0_0.Aux)
 27053  		_ = v_0_0.Args[1]
 27054  		newLen := v_0_0.Args[1]
 27055  		if newLen.Op != OpConst64 || !(isSameCall(sym, "runtime.growslice")) {
 27056  			break
 27057  		}
 27058  		v.copyOf(newLen)
 27059  		return true
 27060  	}
 27061  	// match: (SliceLen (SelectN [0] (StaticLECall {sym} _ newLen:(Const32) _ _ _ _)))
 27062  	// cond: isSameCall(sym, "runtime.growslice")
 27063  	// result: newLen
 27064  	for {
 27065  		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
 27066  			break
 27067  		}
 27068  		v_0_0 := v_0.Args[0]
 27069  		if v_0_0.Op != OpStaticLECall || len(v_0_0.Args) != 6 {
 27070  			break
 27071  		}
 27072  		sym := auxToCall(v_0_0.Aux)
 27073  		_ = v_0_0.Args[1]
 27074  		newLen := v_0_0.Args[1]
 27075  		if newLen.Op != OpConst32 || !(isSameCall(sym, "runtime.growslice")) {
 27076  			break
 27077  		}
 27078  		v.copyOf(newLen)
 27079  		return true
 27080  	}
 27081  	return false
 27082  }
 27083  func rewriteValuegeneric_OpSlicePtr(v *Value) bool {
 27084  	v_0 := v.Args[0]
 27085  	// match: (SlicePtr (SliceMake (SlicePtr x) _ _))
 27086  	// result: (SlicePtr x)
 27087  	for {
 27088  		if v_0.Op != OpSliceMake {
 27089  			break
 27090  		}
 27091  		v_0_0 := v_0.Args[0]
 27092  		if v_0_0.Op != OpSlicePtr {
 27093  			break
 27094  		}
 27095  		x := v_0_0.Args[0]
 27096  		v.reset(OpSlicePtr)
 27097  		v.AddArg(x)
 27098  		return true
 27099  	}
 27100  	return false
 27101  }
 27102  func rewriteValuegeneric_OpSlicemask(v *Value) bool {
 27103  	v_0 := v.Args[0]
 27104  	// match: (Slicemask (Const32 [x]))
 27105  	// cond: x > 0
 27106  	// result: (Const32 [-1])
 27107  	for {
 27108  		if v_0.Op != OpConst32 {
 27109  			break
 27110  		}
 27111  		x := auxIntToInt32(v_0.AuxInt)
 27112  		if !(x > 0) {
 27113  			break
 27114  		}
 27115  		v.reset(OpConst32)
 27116  		v.AuxInt = int32ToAuxInt(-1)
 27117  		return true
 27118  	}
 27119  	// match: (Slicemask (Const32 [0]))
 27120  	// result: (Const32 [0])
 27121  	for {
 27122  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 27123  			break
 27124  		}
 27125  		v.reset(OpConst32)
 27126  		v.AuxInt = int32ToAuxInt(0)
 27127  		return true
 27128  	}
 27129  	// match: (Slicemask (Const64 [x]))
 27130  	// cond: x > 0
 27131  	// result: (Const64 [-1])
 27132  	for {
 27133  		if v_0.Op != OpConst64 {
 27134  			break
 27135  		}
 27136  		x := auxIntToInt64(v_0.AuxInt)
 27137  		if !(x > 0) {
 27138  			break
 27139  		}
 27140  		v.reset(OpConst64)
 27141  		v.AuxInt = int64ToAuxInt(-1)
 27142  		return true
 27143  	}
 27144  	// match: (Slicemask (Const64 [0]))
 27145  	// result: (Const64 [0])
 27146  	for {
 27147  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 27148  			break
 27149  		}
 27150  		v.reset(OpConst64)
 27151  		v.AuxInt = int64ToAuxInt(0)
 27152  		return true
 27153  	}
 27154  	return false
 27155  }
 27156  func rewriteValuegeneric_OpSqrt(v *Value) bool {
 27157  	v_0 := v.Args[0]
 27158  	// match: (Sqrt (Const64F [c]))
 27159  	// cond: !math.IsNaN(math.Sqrt(c))
 27160  	// result: (Const64F [math.Sqrt(c)])
 27161  	for {
 27162  		if v_0.Op != OpConst64F {
 27163  			break
 27164  		}
 27165  		c := auxIntToFloat64(v_0.AuxInt)
 27166  		if !(!math.IsNaN(math.Sqrt(c))) {
 27167  			break
 27168  		}
 27169  		v.reset(OpConst64F)
 27170  		v.AuxInt = float64ToAuxInt(math.Sqrt(c))
 27171  		return true
 27172  	}
 27173  	return false
 27174  }
 27175  func rewriteValuegeneric_OpStaticLECall(v *Value) bool {
 27176  	b := v.Block
 27177  	config := b.Func.Config
 27178  	typ := &b.Func.Config.Types
 27179  	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [1]) mem)
 27180  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon)
 27181  	// result: (MakeResult (Eq8 (Load <typ.Int8> sptr mem) (Const8 <typ.Int8> [int8(read8(scon,0))])) mem)
 27182  	for {
 27183  		if len(v.Args) != 4 {
 27184  			break
 27185  		}
 27186  		callAux := auxToCall(v.Aux)
 27187  		mem := v.Args[3]
 27188  		sptr := v.Args[0]
 27189  		v_1 := v.Args[1]
 27190  		if v_1.Op != OpAddr {
 27191  			break
 27192  		}
 27193  		scon := auxToSym(v_1.Aux)
 27194  		v_1_0 := v_1.Args[0]
 27195  		if v_1_0.Op != OpSB {
 27196  			break
 27197  		}
 27198  		v_2 := v.Args[2]
 27199  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 1 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon)) {
 27200  			break
 27201  		}
 27202  		v.reset(OpMakeResult)
 27203  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
 27204  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int8)
 27205  		v1.AddArg2(sptr, mem)
 27206  		v2 := b.NewValue0(v.Pos, OpConst8, typ.Int8)
 27207  		v2.AuxInt = int8ToAuxInt(int8(read8(scon, 0)))
 27208  		v0.AddArg2(v1, v2)
 27209  		v.AddArg2(v0, mem)
 27210  		return true
 27211  	}
 27212  	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [2]) mem)
 27213  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
 27214  	// result: (MakeResult (Eq16 (Load <typ.Int16> sptr mem) (Const16 <typ.Int16> [int16(read16(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
 27215  	for {
 27216  		if len(v.Args) != 4 {
 27217  			break
 27218  		}
 27219  		callAux := auxToCall(v.Aux)
 27220  		mem := v.Args[3]
 27221  		sptr := v.Args[0]
 27222  		v_1 := v.Args[1]
 27223  		if v_1.Op != OpAddr {
 27224  			break
 27225  		}
 27226  		scon := auxToSym(v_1.Aux)
 27227  		v_1_0 := v_1.Args[0]
 27228  		if v_1_0.Op != OpSB {
 27229  			break
 27230  		}
 27231  		v_2 := v.Args[2]
 27232  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 2 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
 27233  			break
 27234  		}
 27235  		v.reset(OpMakeResult)
 27236  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
 27237  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int16)
 27238  		v1.AddArg2(sptr, mem)
 27239  		v2 := b.NewValue0(v.Pos, OpConst16, typ.Int16)
 27240  		v2.AuxInt = int16ToAuxInt(int16(read16(scon, 0, config.ctxt.Arch.ByteOrder)))
 27241  		v0.AddArg2(v1, v2)
 27242  		v.AddArg2(v0, mem)
 27243  		return true
 27244  	}
 27245  	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [4]) mem)
 27246  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
 27247  	// result: (MakeResult (Eq32 (Load <typ.Int32> sptr mem) (Const32 <typ.Int32> [int32(read32(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
 27248  	for {
 27249  		if len(v.Args) != 4 {
 27250  			break
 27251  		}
 27252  		callAux := auxToCall(v.Aux)
 27253  		mem := v.Args[3]
 27254  		sptr := v.Args[0]
 27255  		v_1 := v.Args[1]
 27256  		if v_1.Op != OpAddr {
 27257  			break
 27258  		}
 27259  		scon := auxToSym(v_1.Aux)
 27260  		v_1_0 := v_1.Args[0]
 27261  		if v_1_0.Op != OpSB {
 27262  			break
 27263  		}
 27264  		v_2 := v.Args[2]
 27265  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 4 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
 27266  			break
 27267  		}
 27268  		v.reset(OpMakeResult)
 27269  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
 27270  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int32)
 27271  		v1.AddArg2(sptr, mem)
 27272  		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
 27273  		v2.AuxInt = int32ToAuxInt(int32(read32(scon, 0, config.ctxt.Arch.ByteOrder)))
 27274  		v0.AddArg2(v1, v2)
 27275  		v.AddArg2(v0, mem)
 27276  		return true
 27277  	}
 27278  	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [8]) mem)
 27279  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8
 27280  	// result: (MakeResult (Eq64 (Load <typ.Int64> sptr mem) (Const64 <typ.Int64> [int64(read64(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
 27281  	for {
 27282  		if len(v.Args) != 4 {
 27283  			break
 27284  		}
 27285  		callAux := auxToCall(v.Aux)
 27286  		mem := v.Args[3]
 27287  		sptr := v.Args[0]
 27288  		v_1 := v.Args[1]
 27289  		if v_1.Op != OpAddr {
 27290  			break
 27291  		}
 27292  		scon := auxToSym(v_1.Aux)
 27293  		v_1_0 := v_1.Args[0]
 27294  		if v_1_0.Op != OpSB {
 27295  			break
 27296  		}
 27297  		v_2 := v.Args[2]
 27298  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 8 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8) {
 27299  			break
 27300  		}
 27301  		v.reset(OpMakeResult)
 27302  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
 27303  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int64)
 27304  		v1.AddArg2(sptr, mem)
 27305  		v2 := b.NewValue0(v.Pos, OpConst64, typ.Int64)
 27306  		v2.AuxInt = int64ToAuxInt(int64(read64(scon, 0, config.ctxt.Arch.ByteOrder)))
 27307  		v0.AddArg2(v1, v2)
 27308  		v.AddArg2(v0, mem)
 27309  		return true
 27310  	}
 27311  	return false
 27312  }
 27313  func rewriteValuegeneric_OpStore(v *Value) bool {
 27314  	v_2 := v.Args[2]
 27315  	v_1 := v.Args[1]
 27316  	v_0 := v.Args[0]
 27317  	b := v.Block
 27318  	fe := b.Func.fe
 27319  	// match: (Store {t1} p1 (Load <t2> p2 mem) mem)
 27320  	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size()
 27321  	// result: mem
 27322  	for {
 27323  		t1 := auxToType(v.Aux)
 27324  		p1 := v_0
 27325  		if v_1.Op != OpLoad {
 27326  			break
 27327  		}
 27328  		t2 := v_1.Type
 27329  		mem := v_1.Args[1]
 27330  		p2 := v_1.Args[0]
 27331  		if mem != v_2 || !(isSamePtr(p1, p2) && t2.Size() == t1.Size()) {
 27332  			break
 27333  		}
 27334  		v.copyOf(mem)
 27335  		return true
 27336  	}
 27337  	// match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ oldmem))
 27338  	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size())
 27339  	// result: mem
 27340  	for {
 27341  		t1 := auxToType(v.Aux)
 27342  		p1 := v_0
 27343  		if v_1.Op != OpLoad {
 27344  			break
 27345  		}
 27346  		t2 := v_1.Type
 27347  		oldmem := v_1.Args[1]
 27348  		p2 := v_1.Args[0]
 27349  		mem := v_2
 27350  		if mem.Op != OpStore {
 27351  			break
 27352  		}
 27353  		t3 := auxToType(mem.Aux)
 27354  		_ = mem.Args[2]
 27355  		p3 := mem.Args[0]
 27356  		if oldmem != mem.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size())) {
 27357  			break
 27358  		}
 27359  		v.copyOf(mem)
 27360  		return true
 27361  	}
 27362  	// match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ oldmem)))
 27363  	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size())
 27364  	// result: mem
 27365  	for {
 27366  		t1 := auxToType(v.Aux)
 27367  		p1 := v_0
 27368  		if v_1.Op != OpLoad {
 27369  			break
 27370  		}
 27371  		t2 := v_1.Type
 27372  		oldmem := v_1.Args[1]
 27373  		p2 := v_1.Args[0]
 27374  		mem := v_2
 27375  		if mem.Op != OpStore {
 27376  			break
 27377  		}
 27378  		t3 := auxToType(mem.Aux)
 27379  		_ = mem.Args[2]
 27380  		p3 := mem.Args[0]
 27381  		mem_2 := mem.Args[2]
 27382  		if mem_2.Op != OpStore {
 27383  			break
 27384  		}
 27385  		t4 := auxToType(mem_2.Aux)
 27386  		_ = mem_2.Args[2]
 27387  		p4 := mem_2.Args[0]
 27388  		if oldmem != mem_2.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size())) {
 27389  			break
 27390  		}
 27391  		v.copyOf(mem)
 27392  		return true
 27393  	}
 27394  	// match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ oldmem))))
 27395  	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size()) && disjoint(p1, t1.Size(), p5, t5.Size())
 27396  	// result: mem
 27397  	for {
 27398  		t1 := auxToType(v.Aux)
 27399  		p1 := v_0
 27400  		if v_1.Op != OpLoad {
 27401  			break
 27402  		}
 27403  		t2 := v_1.Type
 27404  		oldmem := v_1.Args[1]
 27405  		p2 := v_1.Args[0]
 27406  		mem := v_2
 27407  		if mem.Op != OpStore {
 27408  			break
 27409  		}
 27410  		t3 := auxToType(mem.Aux)
 27411  		_ = mem.Args[2]
 27412  		p3 := mem.Args[0]
 27413  		mem_2 := mem.Args[2]
 27414  		if mem_2.Op != OpStore {
 27415  			break
 27416  		}
 27417  		t4 := auxToType(mem_2.Aux)
 27418  		_ = mem_2.Args[2]
 27419  		p4 := mem_2.Args[0]
 27420  		mem_2_2 := mem_2.Args[2]
 27421  		if mem_2_2.Op != OpStore {
 27422  			break
 27423  		}
 27424  		t5 := auxToType(mem_2_2.Aux)
 27425  		_ = mem_2_2.Args[2]
 27426  		p5 := mem_2_2.Args[0]
 27427  		if oldmem != mem_2_2.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size()) && disjoint(p1, t1.Size(), p5, t5.Size())) {
 27428  			break
 27429  		}
 27430  		v.copyOf(mem)
 27431  		return true
 27432  	}
 27433  	// match: (Store {t} (OffPtr [o] p1) x mem:(Zero [n] p2 _))
 27434  	// cond: isConstZero(x) && o >= 0 && t.Size() + o <= n && isSamePtr(p1, p2)
 27435  	// result: mem
 27436  	for {
 27437  		t := auxToType(v.Aux)
 27438  		if v_0.Op != OpOffPtr {
 27439  			break
 27440  		}
 27441  		o := auxIntToInt64(v_0.AuxInt)
 27442  		p1 := v_0.Args[0]
 27443  		x := v_1
 27444  		mem := v_2
 27445  		if mem.Op != OpZero {
 27446  			break
 27447  		}
 27448  		n := auxIntToInt64(mem.AuxInt)
 27449  		p2 := mem.Args[0]
 27450  		if !(isConstZero(x) && o >= 0 && t.Size()+o <= n && isSamePtr(p1, p2)) {
 27451  			break
 27452  		}
 27453  		v.copyOf(mem)
 27454  		return true
 27455  	}
 27456  	// match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Zero [n] p3 _)))
 27457  	// cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p3) && disjoint(op, t1.Size(), p2, t2.Size())
 27458  	// result: mem
 27459  	for {
 27460  		t1 := auxToType(v.Aux)
 27461  		op := v_0
 27462  		if op.Op != OpOffPtr {
 27463  			break
 27464  		}
 27465  		o1 := auxIntToInt64(op.AuxInt)
 27466  		p1 := op.Args[0]
 27467  		x := v_1
 27468  		mem := v_2
 27469  		if mem.Op != OpStore {
 27470  			break
 27471  		}
 27472  		t2 := auxToType(mem.Aux)
 27473  		_ = mem.Args[2]
 27474  		p2 := mem.Args[0]
 27475  		mem_2 := mem.Args[2]
 27476  		if mem_2.Op != OpZero {
 27477  			break
 27478  		}
 27479  		n := auxIntToInt64(mem_2.AuxInt)
 27480  		p3 := mem_2.Args[0]
 27481  		if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p3) && disjoint(op, t1.Size(), p2, t2.Size())) {
 27482  			break
 27483  		}
 27484  		v.copyOf(mem)
 27485  		return true
 27486  	}
 27487  	// match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Zero [n] p4 _))))
 27488  	// cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p4) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())
 27489  	// result: mem
 27490  	for {
 27491  		t1 := auxToType(v.Aux)
 27492  		op := v_0
 27493  		if op.Op != OpOffPtr {
 27494  			break
 27495  		}
 27496  		o1 := auxIntToInt64(op.AuxInt)
 27497  		p1 := op.Args[0]
 27498  		x := v_1
 27499  		mem := v_2
 27500  		if mem.Op != OpStore {
 27501  			break
 27502  		}
 27503  		t2 := auxToType(mem.Aux)
 27504  		_ = mem.Args[2]
 27505  		p2 := mem.Args[0]
 27506  		mem_2 := mem.Args[2]
 27507  		if mem_2.Op != OpStore {
 27508  			break
 27509  		}
 27510  		t3 := auxToType(mem_2.Aux)
 27511  		_ = mem_2.Args[2]
 27512  		p3 := mem_2.Args[0]
 27513  		mem_2_2 := mem_2.Args[2]
 27514  		if mem_2_2.Op != OpZero {
 27515  			break
 27516  		}
 27517  		n := auxIntToInt64(mem_2_2.AuxInt)
 27518  		p4 := mem_2_2.Args[0]
 27519  		if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p4) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())) {
 27520  			break
 27521  		}
 27522  		v.copyOf(mem)
 27523  		return true
 27524  	}
 27525  	// match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Zero [n] p5 _)))))
 27526  	// cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p5) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())
 27527  	// result: mem
 27528  	for {
 27529  		t1 := auxToType(v.Aux)
 27530  		op := v_0
 27531  		if op.Op != OpOffPtr {
 27532  			break
 27533  		}
 27534  		o1 := auxIntToInt64(op.AuxInt)
 27535  		p1 := op.Args[0]
 27536  		x := v_1
 27537  		mem := v_2
 27538  		if mem.Op != OpStore {
 27539  			break
 27540  		}
 27541  		t2 := auxToType(mem.Aux)
 27542  		_ = mem.Args[2]
 27543  		p2 := mem.Args[0]
 27544  		mem_2 := mem.Args[2]
 27545  		if mem_2.Op != OpStore {
 27546  			break
 27547  		}
 27548  		t3 := auxToType(mem_2.Aux)
 27549  		_ = mem_2.Args[2]
 27550  		p3 := mem_2.Args[0]
 27551  		mem_2_2 := mem_2.Args[2]
 27552  		if mem_2_2.Op != OpStore {
 27553  			break
 27554  		}
 27555  		t4 := auxToType(mem_2_2.Aux)
 27556  		_ = mem_2_2.Args[2]
 27557  		p4 := mem_2_2.Args[0]
 27558  		mem_2_2_2 := mem_2_2.Args[2]
 27559  		if mem_2_2_2.Op != OpZero {
 27560  			break
 27561  		}
 27562  		n := auxIntToInt64(mem_2_2_2.AuxInt)
 27563  		p5 := mem_2_2_2.Args[0]
 27564  		if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p5) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())) {
 27565  			break
 27566  		}
 27567  		v.copyOf(mem)
 27568  		return true
 27569  	}
 27570  	// match: (Store _ (StructMake0) mem)
 27571  	// result: mem
 27572  	for {
 27573  		if v_1.Op != OpStructMake0 {
 27574  			break
 27575  		}
 27576  		mem := v_2
 27577  		v.copyOf(mem)
 27578  		return true
 27579  	}
 27580  	// match: (Store dst (StructMake1 <t> f0) mem)
 27581  	// result: (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)
 27582  	for {
 27583  		dst := v_0
 27584  		if v_1.Op != OpStructMake1 {
 27585  			break
 27586  		}
 27587  		t := v_1.Type
 27588  		f0 := v_1.Args[0]
 27589  		mem := v_2
 27590  		v.reset(OpStore)
 27591  		v.Aux = typeToAux(t.FieldType(0))
 27592  		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
 27593  		v0.AuxInt = int64ToAuxInt(0)
 27594  		v0.AddArg(dst)
 27595  		v.AddArg3(v0, f0, mem)
 27596  		return true
 27597  	}
 27598  	// match: (Store dst (StructMake2 <t> f0 f1) mem)
 27599  	// result: (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))
 27600  	for {
 27601  		dst := v_0
 27602  		if v_1.Op != OpStructMake2 {
 27603  			break
 27604  		}
 27605  		t := v_1.Type
 27606  		f1 := v_1.Args[1]
 27607  		f0 := v_1.Args[0]
 27608  		mem := v_2
 27609  		v.reset(OpStore)
 27610  		v.Aux = typeToAux(t.FieldType(1))
 27611  		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 27612  		v0.AuxInt = int64ToAuxInt(t.FieldOff(1))
 27613  		v0.AddArg(dst)
 27614  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27615  		v1.Aux = typeToAux(t.FieldType(0))
 27616  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
 27617  		v2.AuxInt = int64ToAuxInt(0)
 27618  		v2.AddArg(dst)
 27619  		v1.AddArg3(v2, f0, mem)
 27620  		v.AddArg3(v0, f1, v1)
 27621  		return true
 27622  	}
 27623  	// match: (Store dst (StructMake3 <t> f0 f1 f2) mem)
 27624  	// result: (Store {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem)))
 27625  	for {
 27626  		dst := v_0
 27627  		if v_1.Op != OpStructMake3 {
 27628  			break
 27629  		}
 27630  		t := v_1.Type
 27631  		f2 := v_1.Args[2]
 27632  		f0 := v_1.Args[0]
 27633  		f1 := v_1.Args[1]
 27634  		mem := v_2
 27635  		v.reset(OpStore)
 27636  		v.Aux = typeToAux(t.FieldType(2))
 27637  		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
 27638  		v0.AuxInt = int64ToAuxInt(t.FieldOff(2))
 27639  		v0.AddArg(dst)
 27640  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27641  		v1.Aux = typeToAux(t.FieldType(1))
 27642  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 27643  		v2.AuxInt = int64ToAuxInt(t.FieldOff(1))
 27644  		v2.AddArg(dst)
 27645  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27646  		v3.Aux = typeToAux(t.FieldType(0))
 27647  		v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
 27648  		v4.AuxInt = int64ToAuxInt(0)
 27649  		v4.AddArg(dst)
 27650  		v3.AddArg3(v4, f0, mem)
 27651  		v1.AddArg3(v2, f1, v3)
 27652  		v.AddArg3(v0, f2, v1)
 27653  		return true
 27654  	}
 27655  	// match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem)
 27656  	// result: (Store {t.FieldType(3)} (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store {t.FieldType(2)} (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))))
 27657  	for {
 27658  		dst := v_0
 27659  		if v_1.Op != OpStructMake4 {
 27660  			break
 27661  		}
 27662  		t := v_1.Type
 27663  		f3 := v_1.Args[3]
 27664  		f0 := v_1.Args[0]
 27665  		f1 := v_1.Args[1]
 27666  		f2 := v_1.Args[2]
 27667  		mem := v_2
 27668  		v.reset(OpStore)
 27669  		v.Aux = typeToAux(t.FieldType(3))
 27670  		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
 27671  		v0.AuxInt = int64ToAuxInt(t.FieldOff(3))
 27672  		v0.AddArg(dst)
 27673  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27674  		v1.Aux = typeToAux(t.FieldType(2))
 27675  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
 27676  		v2.AuxInt = int64ToAuxInt(t.FieldOff(2))
 27677  		v2.AddArg(dst)
 27678  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27679  		v3.Aux = typeToAux(t.FieldType(1))
 27680  		v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 27681  		v4.AuxInt = int64ToAuxInt(t.FieldOff(1))
 27682  		v4.AddArg(dst)
 27683  		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27684  		v5.Aux = typeToAux(t.FieldType(0))
 27685  		v6 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(0).PtrTo())
 27686  		v6.AuxInt = int64ToAuxInt(0)
 27687  		v6.AddArg(dst)
 27688  		v5.AddArg3(v6, f0, mem)
 27689  		v3.AddArg3(v4, f1, v5)
 27690  		v1.AddArg3(v2, f2, v3)
 27691  		v.AddArg3(v0, f3, v1)
 27692  		return true
 27693  	}
 27694  	// match: (Store {t} dst (Load src mem) mem)
 27695  	// cond: !fe.CanSSA(t)
 27696  	// result: (Move {t} [t.Size()] dst src mem)
 27697  	for {
 27698  		t := auxToType(v.Aux)
 27699  		dst := v_0
 27700  		if v_1.Op != OpLoad {
 27701  			break
 27702  		}
 27703  		mem := v_1.Args[1]
 27704  		src := v_1.Args[0]
 27705  		if mem != v_2 || !(!fe.CanSSA(t)) {
 27706  			break
 27707  		}
 27708  		v.reset(OpMove)
 27709  		v.AuxInt = int64ToAuxInt(t.Size())
 27710  		v.Aux = typeToAux(t)
 27711  		v.AddArg3(dst, src, mem)
 27712  		return true
 27713  	}
 27714  	// match: (Store {t} dst (Load src mem) (VarDef {x} mem))
 27715  	// cond: !fe.CanSSA(t)
 27716  	// result: (Move {t} [t.Size()] dst src (VarDef {x} mem))
 27717  	for {
 27718  		t := auxToType(v.Aux)
 27719  		dst := v_0
 27720  		if v_1.Op != OpLoad {
 27721  			break
 27722  		}
 27723  		mem := v_1.Args[1]
 27724  		src := v_1.Args[0]
 27725  		if v_2.Op != OpVarDef {
 27726  			break
 27727  		}
 27728  		x := auxToSym(v_2.Aux)
 27729  		if mem != v_2.Args[0] || !(!fe.CanSSA(t)) {
 27730  			break
 27731  		}
 27732  		v.reset(OpMove)
 27733  		v.AuxInt = int64ToAuxInt(t.Size())
 27734  		v.Aux = typeToAux(t)
 27735  		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
 27736  		v0.Aux = symToAux(x)
 27737  		v0.AddArg(mem)
 27738  		v.AddArg3(dst, src, v0)
 27739  		return true
 27740  	}
 27741  	// match: (Store _ (ArrayMake0) mem)
 27742  	// result: mem
 27743  	for {
 27744  		if v_1.Op != OpArrayMake0 {
 27745  			break
 27746  		}
 27747  		mem := v_2
 27748  		v.copyOf(mem)
 27749  		return true
 27750  	}
 27751  	// match: (Store dst (ArrayMake1 e) mem)
 27752  	// result: (Store {e.Type} dst e mem)
 27753  	for {
 27754  		dst := v_0
 27755  		if v_1.Op != OpArrayMake1 {
 27756  			break
 27757  		}
 27758  		e := v_1.Args[0]
 27759  		mem := v_2
 27760  		v.reset(OpStore)
 27761  		v.Aux = typeToAux(e.Type)
 27762  		v.AddArg3(dst, e, mem)
 27763  		return true
 27764  	}
 27765  	// match: (Store (SelectN [0] call:(StaticLECall _ _)) x mem:(SelectN [1] call))
 27766  	// cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")
 27767  	// result: mem
 27768  	for {
 27769  		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
 27770  			break
 27771  		}
 27772  		call := v_0.Args[0]
 27773  		if call.Op != OpStaticLECall || len(call.Args) != 2 {
 27774  			break
 27775  		}
 27776  		x := v_1
 27777  		mem := v_2
 27778  		if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) {
 27779  			break
 27780  		}
 27781  		v.copyOf(mem)
 27782  		return true
 27783  	}
 27784  	// match: (Store (OffPtr (SelectN [0] call:(StaticLECall _ _))) x mem:(SelectN [1] call))
 27785  	// cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")
 27786  	// result: mem
 27787  	for {
 27788  		if v_0.Op != OpOffPtr {
 27789  			break
 27790  		}
 27791  		v_0_0 := v_0.Args[0]
 27792  		if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 {
 27793  			break
 27794  		}
 27795  		call := v_0_0.Args[0]
 27796  		if call.Op != OpStaticLECall || len(call.Args) != 2 {
 27797  			break
 27798  		}
 27799  		x := v_1
 27800  		mem := v_2
 27801  		if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) {
 27802  			break
 27803  		}
 27804  		v.copyOf(mem)
 27805  		return true
 27806  	}
 27807  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Move [n] p3 _ mem)))
 27808  	// cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)
 27809  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
 27810  	for {
 27811  		t1 := auxToType(v.Aux)
 27812  		op1 := v_0
 27813  		if op1.Op != OpOffPtr {
 27814  			break
 27815  		}
 27816  		o1 := auxIntToInt64(op1.AuxInt)
 27817  		p1 := op1.Args[0]
 27818  		d1 := v_1
 27819  		m2 := v_2
 27820  		if m2.Op != OpStore {
 27821  			break
 27822  		}
 27823  		t2 := auxToType(m2.Aux)
 27824  		_ = m2.Args[2]
 27825  		op2 := m2.Args[0]
 27826  		if op2.Op != OpOffPtr || auxIntToInt64(op2.AuxInt) != 0 {
 27827  			break
 27828  		}
 27829  		p2 := op2.Args[0]
 27830  		d2 := m2.Args[1]
 27831  		m3 := m2.Args[2]
 27832  		if m3.Op != OpMove {
 27833  			break
 27834  		}
 27835  		n := auxIntToInt64(m3.AuxInt)
 27836  		mem := m3.Args[2]
 27837  		p3 := m3.Args[0]
 27838  		if !(m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)) {
 27839  			break
 27840  		}
 27841  		v.reset(OpStore)
 27842  		v.Aux = typeToAux(t1)
 27843  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27844  		v0.Aux = typeToAux(t2)
 27845  		v0.AddArg3(op2, d2, mem)
 27846  		v.AddArg3(op1, d1, v0)
 27847  		return true
 27848  	}
 27849  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Move [n] p4 _ mem))))
 27850  	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)
 27851  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
 27852  	for {
 27853  		t1 := auxToType(v.Aux)
 27854  		op1 := v_0
 27855  		if op1.Op != OpOffPtr {
 27856  			break
 27857  		}
 27858  		o1 := auxIntToInt64(op1.AuxInt)
 27859  		p1 := op1.Args[0]
 27860  		d1 := v_1
 27861  		m2 := v_2
 27862  		if m2.Op != OpStore {
 27863  			break
 27864  		}
 27865  		t2 := auxToType(m2.Aux)
 27866  		_ = m2.Args[2]
 27867  		op2 := m2.Args[0]
 27868  		if op2.Op != OpOffPtr {
 27869  			break
 27870  		}
 27871  		o2 := auxIntToInt64(op2.AuxInt)
 27872  		p2 := op2.Args[0]
 27873  		d2 := m2.Args[1]
 27874  		m3 := m2.Args[2]
 27875  		if m3.Op != OpStore {
 27876  			break
 27877  		}
 27878  		t3 := auxToType(m3.Aux)
 27879  		_ = m3.Args[2]
 27880  		op3 := m3.Args[0]
 27881  		if op3.Op != OpOffPtr || auxIntToInt64(op3.AuxInt) != 0 {
 27882  			break
 27883  		}
 27884  		p3 := op3.Args[0]
 27885  		d3 := m3.Args[1]
 27886  		m4 := m3.Args[2]
 27887  		if m4.Op != OpMove {
 27888  			break
 27889  		}
 27890  		n := auxIntToInt64(m4.AuxInt)
 27891  		mem := m4.Args[2]
 27892  		p4 := m4.Args[0]
 27893  		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)) {
 27894  			break
 27895  		}
 27896  		v.reset(OpStore)
 27897  		v.Aux = typeToAux(t1)
 27898  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27899  		v0.Aux = typeToAux(t2)
 27900  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27901  		v1.Aux = typeToAux(t3)
 27902  		v1.AddArg3(op3, d3, mem)
 27903  		v0.AddArg3(op2, d2, v1)
 27904  		v.AddArg3(op1, d1, v0)
 27905  		return true
 27906  	}
 27907  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [o3] p3) d3 m4:(Store {t4} op4:(OffPtr [0] p4) d4 m5:(Move [n] p5 _ mem)))))
 27908  	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size() + t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)
 27909  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
 27910  	for {
 27911  		t1 := auxToType(v.Aux)
 27912  		op1 := v_0
 27913  		if op1.Op != OpOffPtr {
 27914  			break
 27915  		}
 27916  		o1 := auxIntToInt64(op1.AuxInt)
 27917  		p1 := op1.Args[0]
 27918  		d1 := v_1
 27919  		m2 := v_2
 27920  		if m2.Op != OpStore {
 27921  			break
 27922  		}
 27923  		t2 := auxToType(m2.Aux)
 27924  		_ = m2.Args[2]
 27925  		op2 := m2.Args[0]
 27926  		if op2.Op != OpOffPtr {
 27927  			break
 27928  		}
 27929  		o2 := auxIntToInt64(op2.AuxInt)
 27930  		p2 := op2.Args[0]
 27931  		d2 := m2.Args[1]
 27932  		m3 := m2.Args[2]
 27933  		if m3.Op != OpStore {
 27934  			break
 27935  		}
 27936  		t3 := auxToType(m3.Aux)
 27937  		_ = m3.Args[2]
 27938  		op3 := m3.Args[0]
 27939  		if op3.Op != OpOffPtr {
 27940  			break
 27941  		}
 27942  		o3 := auxIntToInt64(op3.AuxInt)
 27943  		p3 := op3.Args[0]
 27944  		d3 := m3.Args[1]
 27945  		m4 := m3.Args[2]
 27946  		if m4.Op != OpStore {
 27947  			break
 27948  		}
 27949  		t4 := auxToType(m4.Aux)
 27950  		_ = m4.Args[2]
 27951  		op4 := m4.Args[0]
 27952  		if op4.Op != OpOffPtr || auxIntToInt64(op4.AuxInt) != 0 {
 27953  			break
 27954  		}
 27955  		p4 := op4.Args[0]
 27956  		d4 := m4.Args[1]
 27957  		m5 := m4.Args[2]
 27958  		if m5.Op != OpMove {
 27959  			break
 27960  		}
 27961  		n := auxIntToInt64(m5.AuxInt)
 27962  		mem := m5.Args[2]
 27963  		p5 := m5.Args[0]
 27964  		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size()+t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)) {
 27965  			break
 27966  		}
 27967  		v.reset(OpStore)
 27968  		v.Aux = typeToAux(t1)
 27969  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27970  		v0.Aux = typeToAux(t2)
 27971  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27972  		v1.Aux = typeToAux(t3)
 27973  		v2 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 27974  		v2.Aux = typeToAux(t4)
 27975  		v2.AddArg3(op4, d4, mem)
 27976  		v1.AddArg3(op3, d3, v2)
 27977  		v0.AddArg3(op2, d2, v1)
 27978  		v.AddArg3(op1, d1, v0)
 27979  		return true
 27980  	}
 27981  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Zero [n] p3 mem)))
 27982  	// cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)
 27983  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
 27984  	for {
 27985  		t1 := auxToType(v.Aux)
 27986  		op1 := v_0
 27987  		if op1.Op != OpOffPtr {
 27988  			break
 27989  		}
 27990  		o1 := auxIntToInt64(op1.AuxInt)
 27991  		p1 := op1.Args[0]
 27992  		d1 := v_1
 27993  		m2 := v_2
 27994  		if m2.Op != OpStore {
 27995  			break
 27996  		}
 27997  		t2 := auxToType(m2.Aux)
 27998  		_ = m2.Args[2]
 27999  		op2 := m2.Args[0]
 28000  		if op2.Op != OpOffPtr || auxIntToInt64(op2.AuxInt) != 0 {
 28001  			break
 28002  		}
 28003  		p2 := op2.Args[0]
 28004  		d2 := m2.Args[1]
 28005  		m3 := m2.Args[2]
 28006  		if m3.Op != OpZero {
 28007  			break
 28008  		}
 28009  		n := auxIntToInt64(m3.AuxInt)
 28010  		mem := m3.Args[1]
 28011  		p3 := m3.Args[0]
 28012  		if !(m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)) {
 28013  			break
 28014  		}
 28015  		v.reset(OpStore)
 28016  		v.Aux = typeToAux(t1)
 28017  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 28018  		v0.Aux = typeToAux(t2)
 28019  		v0.AddArg3(op2, d2, mem)
 28020  		v.AddArg3(op1, d1, v0)
 28021  		return true
 28022  	}
 28023  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Zero [n] p4 mem))))
 28024  	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)
 28025  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
 28026  	for {
 28027  		t1 := auxToType(v.Aux)
 28028  		op1 := v_0
 28029  		if op1.Op != OpOffPtr {
 28030  			break
 28031  		}
 28032  		o1 := auxIntToInt64(op1.AuxInt)
 28033  		p1 := op1.Args[0]
 28034  		d1 := v_1
 28035  		m2 := v_2
 28036  		if m2.Op != OpStore {
 28037  			break
 28038  		}
 28039  		t2 := auxToType(m2.Aux)
 28040  		_ = m2.Args[2]
 28041  		op2 := m2.Args[0]
 28042  		if op2.Op != OpOffPtr {
 28043  			break
 28044  		}
 28045  		o2 := auxIntToInt64(op2.AuxInt)
 28046  		p2 := op2.Args[0]
 28047  		d2 := m2.Args[1]
 28048  		m3 := m2.Args[2]
 28049  		if m3.Op != OpStore {
 28050  			break
 28051  		}
 28052  		t3 := auxToType(m3.Aux)
 28053  		_ = m3.Args[2]
 28054  		op3 := m3.Args[0]
 28055  		if op3.Op != OpOffPtr || auxIntToInt64(op3.AuxInt) != 0 {
 28056  			break
 28057  		}
 28058  		p3 := op3.Args[0]
 28059  		d3 := m3.Args[1]
 28060  		m4 := m3.Args[2]
 28061  		if m4.Op != OpZero {
 28062  			break
 28063  		}
 28064  		n := auxIntToInt64(m4.AuxInt)
 28065  		mem := m4.Args[1]
 28066  		p4 := m4.Args[0]
 28067  		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)) {
 28068  			break
 28069  		}
 28070  		v.reset(OpStore)
 28071  		v.Aux = typeToAux(t1)
 28072  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 28073  		v0.Aux = typeToAux(t2)
 28074  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 28075  		v1.Aux = typeToAux(t3)
 28076  		v1.AddArg3(op3, d3, mem)
 28077  		v0.AddArg3(op2, d2, v1)
 28078  		v.AddArg3(op1, d1, v0)
 28079  		return true
 28080  	}
 28081  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [o3] p3) d3 m4:(Store {t4} op4:(OffPtr [0] p4) d4 m5:(Zero [n] p5 mem)))))
 28082  	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size() + t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)
 28083  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
 28084  	for {
 28085  		t1 := auxToType(v.Aux)
 28086  		op1 := v_0
 28087  		if op1.Op != OpOffPtr {
 28088  			break
 28089  		}
 28090  		o1 := auxIntToInt64(op1.AuxInt)
 28091  		p1 := op1.Args[0]
 28092  		d1 := v_1
 28093  		m2 := v_2
 28094  		if m2.Op != OpStore {
 28095  			break
 28096  		}
 28097  		t2 := auxToType(m2.Aux)
 28098  		_ = m2.Args[2]
 28099  		op2 := m2.Args[0]
 28100  		if op2.Op != OpOffPtr {
 28101  			break
 28102  		}
 28103  		o2 := auxIntToInt64(op2.AuxInt)
 28104  		p2 := op2.Args[0]
 28105  		d2 := m2.Args[1]
 28106  		m3 := m2.Args[2]
 28107  		if m3.Op != OpStore {
 28108  			break
 28109  		}
 28110  		t3 := auxToType(m3.Aux)
 28111  		_ = m3.Args[2]
 28112  		op3 := m3.Args[0]
 28113  		if op3.Op != OpOffPtr {
 28114  			break
 28115  		}
 28116  		o3 := auxIntToInt64(op3.AuxInt)
 28117  		p3 := op3.Args[0]
 28118  		d3 := m3.Args[1]
 28119  		m4 := m3.Args[2]
 28120  		if m4.Op != OpStore {
 28121  			break
 28122  		}
 28123  		t4 := auxToType(m4.Aux)
 28124  		_ = m4.Args[2]
 28125  		op4 := m4.Args[0]
 28126  		if op4.Op != OpOffPtr || auxIntToInt64(op4.AuxInt) != 0 {
 28127  			break
 28128  		}
 28129  		p4 := op4.Args[0]
 28130  		d4 := m4.Args[1]
 28131  		m5 := m4.Args[2]
 28132  		if m5.Op != OpZero {
 28133  			break
 28134  		}
 28135  		n := auxIntToInt64(m5.AuxInt)
 28136  		mem := m5.Args[1]
 28137  		p5 := m5.Args[0]
 28138  		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size()+t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)) {
 28139  			break
 28140  		}
 28141  		v.reset(OpStore)
 28142  		v.Aux = typeToAux(t1)
 28143  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 28144  		v0.Aux = typeToAux(t2)
 28145  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 28146  		v1.Aux = typeToAux(t3)
 28147  		v2 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 28148  		v2.Aux = typeToAux(t4)
 28149  		v2.AddArg3(op4, d4, mem)
 28150  		v1.AddArg3(op3, d3, v2)
 28151  		v0.AddArg3(op2, d2, v1)
 28152  		v.AddArg3(op1, d1, v0)
 28153  		return true
 28154  	}
 28155  	return false
 28156  }
 28157  func rewriteValuegeneric_OpStringLen(v *Value) bool {
 28158  	v_0 := v.Args[0]
 28159  	// match: (StringLen (StringMake _ (Const64 <t> [c])))
 28160  	// result: (Const64 <t> [c])
 28161  	for {
 28162  		if v_0.Op != OpStringMake {
 28163  			break
 28164  		}
 28165  		_ = v_0.Args[1]
 28166  		v_0_1 := v_0.Args[1]
 28167  		if v_0_1.Op != OpConst64 {
 28168  			break
 28169  		}
 28170  		t := v_0_1.Type
 28171  		c := auxIntToInt64(v_0_1.AuxInt)
 28172  		v.reset(OpConst64)
 28173  		v.Type = t
 28174  		v.AuxInt = int64ToAuxInt(c)
 28175  		return true
 28176  	}
 28177  	return false
 28178  }
 28179  func rewriteValuegeneric_OpStringPtr(v *Value) bool {
 28180  	v_0 := v.Args[0]
 28181  	// match: (StringPtr (StringMake (Addr <t> {s} base) _))
 28182  	// result: (Addr <t> {s} base)
 28183  	for {
 28184  		if v_0.Op != OpStringMake {
 28185  			break
 28186  		}
 28187  		v_0_0 := v_0.Args[0]
 28188  		if v_0_0.Op != OpAddr {
 28189  			break
 28190  		}
 28191  		t := v_0_0.Type
 28192  		s := auxToSym(v_0_0.Aux)
 28193  		base := v_0_0.Args[0]
 28194  		v.reset(OpAddr)
 28195  		v.Type = t
 28196  		v.Aux = symToAux(s)
 28197  		v.AddArg(base)
 28198  		return true
 28199  	}
 28200  	return false
 28201  }
 28202  func rewriteValuegeneric_OpStructSelect(v *Value) bool {
 28203  	v_0 := v.Args[0]
 28204  	b := v.Block
 28205  	fe := b.Func.fe
 28206  	// match: (StructSelect (StructMake1 x))
 28207  	// result: x
 28208  	for {
 28209  		if v_0.Op != OpStructMake1 {
 28210  			break
 28211  		}
 28212  		x := v_0.Args[0]
 28213  		v.copyOf(x)
 28214  		return true
 28215  	}
 28216  	// match: (StructSelect [0] (StructMake2 x _))
 28217  	// result: x
 28218  	for {
 28219  		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStructMake2 {
 28220  			break
 28221  		}
 28222  		x := v_0.Args[0]
 28223  		v.copyOf(x)
 28224  		return true
 28225  	}
 28226  	// match: (StructSelect [1] (StructMake2 _ x))
 28227  	// result: x
 28228  	for {
 28229  		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStructMake2 {
 28230  			break
 28231  		}
 28232  		x := v_0.Args[1]
 28233  		v.copyOf(x)
 28234  		return true
 28235  	}
 28236  	// match: (StructSelect [0] (StructMake3 x _ _))
 28237  	// result: x
 28238  	for {
 28239  		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStructMake3 {
 28240  			break
 28241  		}
 28242  		x := v_0.Args[0]
 28243  		v.copyOf(x)
 28244  		return true
 28245  	}
 28246  	// match: (StructSelect [1] (StructMake3 _ x _))
 28247  	// result: x
 28248  	for {
 28249  		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStructMake3 {
 28250  			break
 28251  		}
 28252  		x := v_0.Args[1]
 28253  		v.copyOf(x)
 28254  		return true
 28255  	}
 28256  	// match: (StructSelect [2] (StructMake3 _ _ x))
 28257  	// result: x
 28258  	for {
 28259  		if auxIntToInt64(v.AuxInt) != 2 || v_0.Op != OpStructMake3 {
 28260  			break
 28261  		}
 28262  		x := v_0.Args[2]
 28263  		v.copyOf(x)
 28264  		return true
 28265  	}
 28266  	// match: (StructSelect [0] (StructMake4 x _ _ _))
 28267  	// result: x
 28268  	for {
 28269  		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStructMake4 {
 28270  			break
 28271  		}
 28272  		x := v_0.Args[0]
 28273  		v.copyOf(x)
 28274  		return true
 28275  	}
 28276  	// match: (StructSelect [1] (StructMake4 _ x _ _))
 28277  	// result: x
 28278  	for {
 28279  		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStructMake4 {
 28280  			break
 28281  		}
 28282  		x := v_0.Args[1]
 28283  		v.copyOf(x)
 28284  		return true
 28285  	}
 28286  	// match: (StructSelect [2] (StructMake4 _ _ x _))
 28287  	// result: x
 28288  	for {
 28289  		if auxIntToInt64(v.AuxInt) != 2 || v_0.Op != OpStructMake4 {
 28290  			break
 28291  		}
 28292  		x := v_0.Args[2]
 28293  		v.copyOf(x)
 28294  		return true
 28295  	}
 28296  	// match: (StructSelect [3] (StructMake4 _ _ _ x))
 28297  	// result: x
 28298  	for {
 28299  		if auxIntToInt64(v.AuxInt) != 3 || v_0.Op != OpStructMake4 {
 28300  			break
 28301  		}
 28302  		x := v_0.Args[3]
 28303  		v.copyOf(x)
 28304  		return true
 28305  	}
 28306  	// match: (StructSelect [i] x:(Load <t> ptr mem))
 28307  	// cond: !fe.CanSSA(t)
 28308  	// result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem)
 28309  	for {
 28310  		i := auxIntToInt64(v.AuxInt)
 28311  		x := v_0
 28312  		if x.Op != OpLoad {
 28313  			break
 28314  		}
 28315  		t := x.Type
 28316  		mem := x.Args[1]
 28317  		ptr := x.Args[0]
 28318  		if !(!fe.CanSSA(t)) {
 28319  			break
 28320  		}
 28321  		b = x.Block
 28322  		v0 := b.NewValue0(v.Pos, OpLoad, v.Type)
 28323  		v.copyOf(v0)
 28324  		v1 := b.NewValue0(v.Pos, OpOffPtr, v.Type.PtrTo())
 28325  		v1.AuxInt = int64ToAuxInt(t.FieldOff(int(i)))
 28326  		v1.AddArg(ptr)
 28327  		v0.AddArg2(v1, mem)
 28328  		return true
 28329  	}
 28330  	// match: (StructSelect [0] (IData x))
 28331  	// result: (IData x)
 28332  	for {
 28333  		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpIData {
 28334  			break
 28335  		}
 28336  		x := v_0.Args[0]
 28337  		v.reset(OpIData)
 28338  		v.AddArg(x)
 28339  		return true
 28340  	}
 28341  	return false
 28342  }
 28343  func rewriteValuegeneric_OpSub16(v *Value) bool {
 28344  	v_1 := v.Args[1]
 28345  	v_0 := v.Args[0]
 28346  	b := v.Block
 28347  	// match: (Sub16 (Const16 [c]) (Const16 [d]))
 28348  	// result: (Const16 [c-d])
 28349  	for {
 28350  		if v_0.Op != OpConst16 {
 28351  			break
 28352  		}
 28353  		c := auxIntToInt16(v_0.AuxInt)
 28354  		if v_1.Op != OpConst16 {
 28355  			break
 28356  		}
 28357  		d := auxIntToInt16(v_1.AuxInt)
 28358  		v.reset(OpConst16)
 28359  		v.AuxInt = int16ToAuxInt(c - d)
 28360  		return true
 28361  	}
 28362  	// match: (Sub16 x (Const16 <t> [c]))
 28363  	// cond: x.Op != OpConst16
 28364  	// result: (Add16 (Const16 <t> [-c]) x)
 28365  	for {
 28366  		x := v_0
 28367  		if v_1.Op != OpConst16 {
 28368  			break
 28369  		}
 28370  		t := v_1.Type
 28371  		c := auxIntToInt16(v_1.AuxInt)
 28372  		if !(x.Op != OpConst16) {
 28373  			break
 28374  		}
 28375  		v.reset(OpAdd16)
 28376  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 28377  		v0.AuxInt = int16ToAuxInt(-c)
 28378  		v.AddArg2(v0, x)
 28379  		return true
 28380  	}
 28381  	// match: (Sub16 <t> (Mul16 x y) (Mul16 x z))
 28382  	// result: (Mul16 x (Sub16 <t> y z))
 28383  	for {
 28384  		t := v.Type
 28385  		if v_0.Op != OpMul16 {
 28386  			break
 28387  		}
 28388  		_ = v_0.Args[1]
 28389  		v_0_0 := v_0.Args[0]
 28390  		v_0_1 := v_0.Args[1]
 28391  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28392  			x := v_0_0
 28393  			y := v_0_1
 28394  			if v_1.Op != OpMul16 {
 28395  				continue
 28396  			}
 28397  			_ = v_1.Args[1]
 28398  			v_1_0 := v_1.Args[0]
 28399  			v_1_1 := v_1.Args[1]
 28400  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 28401  				if x != v_1_0 {
 28402  					continue
 28403  				}
 28404  				z := v_1_1
 28405  				v.reset(OpMul16)
 28406  				v0 := b.NewValue0(v.Pos, OpSub16, t)
 28407  				v0.AddArg2(y, z)
 28408  				v.AddArg2(x, v0)
 28409  				return true
 28410  			}
 28411  		}
 28412  		break
 28413  	}
 28414  	// match: (Sub16 x x)
 28415  	// result: (Const16 [0])
 28416  	for {
 28417  		x := v_0
 28418  		if x != v_1 {
 28419  			break
 28420  		}
 28421  		v.reset(OpConst16)
 28422  		v.AuxInt = int16ToAuxInt(0)
 28423  		return true
 28424  	}
 28425  	// match: (Sub16 (Neg16 x) (Com16 x))
 28426  	// result: (Const16 [1])
 28427  	for {
 28428  		if v_0.Op != OpNeg16 {
 28429  			break
 28430  		}
 28431  		x := v_0.Args[0]
 28432  		if v_1.Op != OpCom16 || x != v_1.Args[0] {
 28433  			break
 28434  		}
 28435  		v.reset(OpConst16)
 28436  		v.AuxInt = int16ToAuxInt(1)
 28437  		return true
 28438  	}
 28439  	// match: (Sub16 (Com16 x) (Neg16 x))
 28440  	// result: (Const16 [-1])
 28441  	for {
 28442  		if v_0.Op != OpCom16 {
 28443  			break
 28444  		}
 28445  		x := v_0.Args[0]
 28446  		if v_1.Op != OpNeg16 || x != v_1.Args[0] {
 28447  			break
 28448  		}
 28449  		v.reset(OpConst16)
 28450  		v.AuxInt = int16ToAuxInt(-1)
 28451  		return true
 28452  	}
 28453  	// match: (Sub16 (Add16 x y) x)
 28454  	// result: y
 28455  	for {
 28456  		if v_0.Op != OpAdd16 {
 28457  			break
 28458  		}
 28459  		_ = v_0.Args[1]
 28460  		v_0_0 := v_0.Args[0]
 28461  		v_0_1 := v_0.Args[1]
 28462  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28463  			x := v_0_0
 28464  			y := v_0_1
 28465  			if x != v_1 {
 28466  				continue
 28467  			}
 28468  			v.copyOf(y)
 28469  			return true
 28470  		}
 28471  		break
 28472  	}
 28473  	// match: (Sub16 (Add16 x y) y)
 28474  	// result: x
 28475  	for {
 28476  		if v_0.Op != OpAdd16 {
 28477  			break
 28478  		}
 28479  		_ = v_0.Args[1]
 28480  		v_0_0 := v_0.Args[0]
 28481  		v_0_1 := v_0.Args[1]
 28482  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28483  			x := v_0_0
 28484  			y := v_0_1
 28485  			if y != v_1 {
 28486  				continue
 28487  			}
 28488  			v.copyOf(x)
 28489  			return true
 28490  		}
 28491  		break
 28492  	}
 28493  	// match: (Sub16 (Sub16 x y) x)
 28494  	// result: (Neg16 y)
 28495  	for {
 28496  		if v_0.Op != OpSub16 {
 28497  			break
 28498  		}
 28499  		y := v_0.Args[1]
 28500  		x := v_0.Args[0]
 28501  		if x != v_1 {
 28502  			break
 28503  		}
 28504  		v.reset(OpNeg16)
 28505  		v.AddArg(y)
 28506  		return true
 28507  	}
 28508  	// match: (Sub16 x (Add16 x y))
 28509  	// result: (Neg16 y)
 28510  	for {
 28511  		x := v_0
 28512  		if v_1.Op != OpAdd16 {
 28513  			break
 28514  		}
 28515  		_ = v_1.Args[1]
 28516  		v_1_0 := v_1.Args[0]
 28517  		v_1_1 := v_1.Args[1]
 28518  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 28519  			if x != v_1_0 {
 28520  				continue
 28521  			}
 28522  			y := v_1_1
 28523  			v.reset(OpNeg16)
 28524  			v.AddArg(y)
 28525  			return true
 28526  		}
 28527  		break
 28528  	}
 28529  	// match: (Sub16 x (Sub16 i:(Const16 <t>) z))
 28530  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 28531  	// result: (Sub16 (Add16 <t> x z) i)
 28532  	for {
 28533  		x := v_0
 28534  		if v_1.Op != OpSub16 {
 28535  			break
 28536  		}
 28537  		z := v_1.Args[1]
 28538  		i := v_1.Args[0]
 28539  		if i.Op != OpConst16 {
 28540  			break
 28541  		}
 28542  		t := i.Type
 28543  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
 28544  			break
 28545  		}
 28546  		v.reset(OpSub16)
 28547  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
 28548  		v0.AddArg2(x, z)
 28549  		v.AddArg2(v0, i)
 28550  		return true
 28551  	}
 28552  	// match: (Sub16 x (Add16 z i:(Const16 <t>)))
 28553  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 28554  	// result: (Sub16 (Sub16 <t> x z) i)
 28555  	for {
 28556  		x := v_0
 28557  		if v_1.Op != OpAdd16 {
 28558  			break
 28559  		}
 28560  		_ = v_1.Args[1]
 28561  		v_1_0 := v_1.Args[0]
 28562  		v_1_1 := v_1.Args[1]
 28563  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 28564  			z := v_1_0
 28565  			i := v_1_1
 28566  			if i.Op != OpConst16 {
 28567  				continue
 28568  			}
 28569  			t := i.Type
 28570  			if !(z.Op != OpConst16 && x.Op != OpConst16) {
 28571  				continue
 28572  			}
 28573  			v.reset(OpSub16)
 28574  			v0 := b.NewValue0(v.Pos, OpSub16, t)
 28575  			v0.AddArg2(x, z)
 28576  			v.AddArg2(v0, i)
 28577  			return true
 28578  		}
 28579  		break
 28580  	}
 28581  	// match: (Sub16 (Sub16 i:(Const16 <t>) z) x)
 28582  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 28583  	// result: (Sub16 i (Add16 <t> z x))
 28584  	for {
 28585  		if v_0.Op != OpSub16 {
 28586  			break
 28587  		}
 28588  		z := v_0.Args[1]
 28589  		i := v_0.Args[0]
 28590  		if i.Op != OpConst16 {
 28591  			break
 28592  		}
 28593  		t := i.Type
 28594  		x := v_1
 28595  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
 28596  			break
 28597  		}
 28598  		v.reset(OpSub16)
 28599  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
 28600  		v0.AddArg2(z, x)
 28601  		v.AddArg2(i, v0)
 28602  		return true
 28603  	}
 28604  	// match: (Sub16 (Add16 z i:(Const16 <t>)) x)
 28605  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 28606  	// result: (Add16 i (Sub16 <t> z x))
 28607  	for {
 28608  		if v_0.Op != OpAdd16 {
 28609  			break
 28610  		}
 28611  		_ = v_0.Args[1]
 28612  		v_0_0 := v_0.Args[0]
 28613  		v_0_1 := v_0.Args[1]
 28614  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28615  			z := v_0_0
 28616  			i := v_0_1
 28617  			if i.Op != OpConst16 {
 28618  				continue
 28619  			}
 28620  			t := i.Type
 28621  			x := v_1
 28622  			if !(z.Op != OpConst16 && x.Op != OpConst16) {
 28623  				continue
 28624  			}
 28625  			v.reset(OpAdd16)
 28626  			v0 := b.NewValue0(v.Pos, OpSub16, t)
 28627  			v0.AddArg2(z, x)
 28628  			v.AddArg2(i, v0)
 28629  			return true
 28630  		}
 28631  		break
 28632  	}
 28633  	// match: (Sub16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x))
 28634  	// result: (Add16 (Const16 <t> [c-d]) x)
 28635  	for {
 28636  		if v_0.Op != OpConst16 {
 28637  			break
 28638  		}
 28639  		t := v_0.Type
 28640  		c := auxIntToInt16(v_0.AuxInt)
 28641  		if v_1.Op != OpSub16 {
 28642  			break
 28643  		}
 28644  		x := v_1.Args[1]
 28645  		v_1_0 := v_1.Args[0]
 28646  		if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 28647  			break
 28648  		}
 28649  		d := auxIntToInt16(v_1_0.AuxInt)
 28650  		v.reset(OpAdd16)
 28651  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 28652  		v0.AuxInt = int16ToAuxInt(c - d)
 28653  		v.AddArg2(v0, x)
 28654  		return true
 28655  	}
 28656  	// match: (Sub16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
 28657  	// result: (Sub16 (Const16 <t> [c-d]) x)
 28658  	for {
 28659  		if v_0.Op != OpConst16 {
 28660  			break
 28661  		}
 28662  		t := v_0.Type
 28663  		c := auxIntToInt16(v_0.AuxInt)
 28664  		if v_1.Op != OpAdd16 {
 28665  			break
 28666  		}
 28667  		_ = v_1.Args[1]
 28668  		v_1_0 := v_1.Args[0]
 28669  		v_1_1 := v_1.Args[1]
 28670  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 28671  			if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 28672  				continue
 28673  			}
 28674  			d := auxIntToInt16(v_1_0.AuxInt)
 28675  			x := v_1_1
 28676  			v.reset(OpSub16)
 28677  			v0 := b.NewValue0(v.Pos, OpConst16, t)
 28678  			v0.AuxInt = int16ToAuxInt(c - d)
 28679  			v.AddArg2(v0, x)
 28680  			return true
 28681  		}
 28682  		break
 28683  	}
 28684  	return false
 28685  }
 28686  func rewriteValuegeneric_OpSub32(v *Value) bool {
 28687  	v_1 := v.Args[1]
 28688  	v_0 := v.Args[0]
 28689  	b := v.Block
 28690  	// match: (Sub32 (Const32 [c]) (Const32 [d]))
 28691  	// result: (Const32 [c-d])
 28692  	for {
 28693  		if v_0.Op != OpConst32 {
 28694  			break
 28695  		}
 28696  		c := auxIntToInt32(v_0.AuxInt)
 28697  		if v_1.Op != OpConst32 {
 28698  			break
 28699  		}
 28700  		d := auxIntToInt32(v_1.AuxInt)
 28701  		v.reset(OpConst32)
 28702  		v.AuxInt = int32ToAuxInt(c - d)
 28703  		return true
 28704  	}
 28705  	// match: (Sub32 x (Const32 <t> [c]))
 28706  	// cond: x.Op != OpConst32
 28707  	// result: (Add32 (Const32 <t> [-c]) x)
 28708  	for {
 28709  		x := v_0
 28710  		if v_1.Op != OpConst32 {
 28711  			break
 28712  		}
 28713  		t := v_1.Type
 28714  		c := auxIntToInt32(v_1.AuxInt)
 28715  		if !(x.Op != OpConst32) {
 28716  			break
 28717  		}
 28718  		v.reset(OpAdd32)
 28719  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 28720  		v0.AuxInt = int32ToAuxInt(-c)
 28721  		v.AddArg2(v0, x)
 28722  		return true
 28723  	}
 28724  	// match: (Sub32 <t> (Mul32 x y) (Mul32 x z))
 28725  	// result: (Mul32 x (Sub32 <t> y z))
 28726  	for {
 28727  		t := v.Type
 28728  		if v_0.Op != OpMul32 {
 28729  			break
 28730  		}
 28731  		_ = v_0.Args[1]
 28732  		v_0_0 := v_0.Args[0]
 28733  		v_0_1 := v_0.Args[1]
 28734  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28735  			x := v_0_0
 28736  			y := v_0_1
 28737  			if v_1.Op != OpMul32 {
 28738  				continue
 28739  			}
 28740  			_ = v_1.Args[1]
 28741  			v_1_0 := v_1.Args[0]
 28742  			v_1_1 := v_1.Args[1]
 28743  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 28744  				if x != v_1_0 {
 28745  					continue
 28746  				}
 28747  				z := v_1_1
 28748  				v.reset(OpMul32)
 28749  				v0 := b.NewValue0(v.Pos, OpSub32, t)
 28750  				v0.AddArg2(y, z)
 28751  				v.AddArg2(x, v0)
 28752  				return true
 28753  			}
 28754  		}
 28755  		break
 28756  	}
 28757  	// match: (Sub32 x x)
 28758  	// result: (Const32 [0])
 28759  	for {
 28760  		x := v_0
 28761  		if x != v_1 {
 28762  			break
 28763  		}
 28764  		v.reset(OpConst32)
 28765  		v.AuxInt = int32ToAuxInt(0)
 28766  		return true
 28767  	}
 28768  	// match: (Sub32 (Neg32 x) (Com32 x))
 28769  	// result: (Const32 [1])
 28770  	for {
 28771  		if v_0.Op != OpNeg32 {
 28772  			break
 28773  		}
 28774  		x := v_0.Args[0]
 28775  		if v_1.Op != OpCom32 || x != v_1.Args[0] {
 28776  			break
 28777  		}
 28778  		v.reset(OpConst32)
 28779  		v.AuxInt = int32ToAuxInt(1)
 28780  		return true
 28781  	}
 28782  	// match: (Sub32 (Com32 x) (Neg32 x))
 28783  	// result: (Const32 [-1])
 28784  	for {
 28785  		if v_0.Op != OpCom32 {
 28786  			break
 28787  		}
 28788  		x := v_0.Args[0]
 28789  		if v_1.Op != OpNeg32 || x != v_1.Args[0] {
 28790  			break
 28791  		}
 28792  		v.reset(OpConst32)
 28793  		v.AuxInt = int32ToAuxInt(-1)
 28794  		return true
 28795  	}
 28796  	// match: (Sub32 (Add32 x y) x)
 28797  	// result: y
 28798  	for {
 28799  		if v_0.Op != OpAdd32 {
 28800  			break
 28801  		}
 28802  		_ = v_0.Args[1]
 28803  		v_0_0 := v_0.Args[0]
 28804  		v_0_1 := v_0.Args[1]
 28805  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28806  			x := v_0_0
 28807  			y := v_0_1
 28808  			if x != v_1 {
 28809  				continue
 28810  			}
 28811  			v.copyOf(y)
 28812  			return true
 28813  		}
 28814  		break
 28815  	}
 28816  	// match: (Sub32 (Add32 x y) y)
 28817  	// result: x
 28818  	for {
 28819  		if v_0.Op != OpAdd32 {
 28820  			break
 28821  		}
 28822  		_ = v_0.Args[1]
 28823  		v_0_0 := v_0.Args[0]
 28824  		v_0_1 := v_0.Args[1]
 28825  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28826  			x := v_0_0
 28827  			y := v_0_1
 28828  			if y != v_1 {
 28829  				continue
 28830  			}
 28831  			v.copyOf(x)
 28832  			return true
 28833  		}
 28834  		break
 28835  	}
 28836  	// match: (Sub32 (Sub32 x y) x)
 28837  	// result: (Neg32 y)
 28838  	for {
 28839  		if v_0.Op != OpSub32 {
 28840  			break
 28841  		}
 28842  		y := v_0.Args[1]
 28843  		x := v_0.Args[0]
 28844  		if x != v_1 {
 28845  			break
 28846  		}
 28847  		v.reset(OpNeg32)
 28848  		v.AddArg(y)
 28849  		return true
 28850  	}
 28851  	// match: (Sub32 x (Add32 x y))
 28852  	// result: (Neg32 y)
 28853  	for {
 28854  		x := v_0
 28855  		if v_1.Op != OpAdd32 {
 28856  			break
 28857  		}
 28858  		_ = v_1.Args[1]
 28859  		v_1_0 := v_1.Args[0]
 28860  		v_1_1 := v_1.Args[1]
 28861  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 28862  			if x != v_1_0 {
 28863  				continue
 28864  			}
 28865  			y := v_1_1
 28866  			v.reset(OpNeg32)
 28867  			v.AddArg(y)
 28868  			return true
 28869  		}
 28870  		break
 28871  	}
 28872  	// match: (Sub32 x (Sub32 i:(Const32 <t>) z))
 28873  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 28874  	// result: (Sub32 (Add32 <t> x z) i)
 28875  	for {
 28876  		x := v_0
 28877  		if v_1.Op != OpSub32 {
 28878  			break
 28879  		}
 28880  		z := v_1.Args[1]
 28881  		i := v_1.Args[0]
 28882  		if i.Op != OpConst32 {
 28883  			break
 28884  		}
 28885  		t := i.Type
 28886  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
 28887  			break
 28888  		}
 28889  		v.reset(OpSub32)
 28890  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
 28891  		v0.AddArg2(x, z)
 28892  		v.AddArg2(v0, i)
 28893  		return true
 28894  	}
 28895  	// match: (Sub32 x (Add32 z i:(Const32 <t>)))
 28896  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 28897  	// result: (Sub32 (Sub32 <t> x z) i)
 28898  	for {
 28899  		x := v_0
 28900  		if v_1.Op != OpAdd32 {
 28901  			break
 28902  		}
 28903  		_ = v_1.Args[1]
 28904  		v_1_0 := v_1.Args[0]
 28905  		v_1_1 := v_1.Args[1]
 28906  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 28907  			z := v_1_0
 28908  			i := v_1_1
 28909  			if i.Op != OpConst32 {
 28910  				continue
 28911  			}
 28912  			t := i.Type
 28913  			if !(z.Op != OpConst32 && x.Op != OpConst32) {
 28914  				continue
 28915  			}
 28916  			v.reset(OpSub32)
 28917  			v0 := b.NewValue0(v.Pos, OpSub32, t)
 28918  			v0.AddArg2(x, z)
 28919  			v.AddArg2(v0, i)
 28920  			return true
 28921  		}
 28922  		break
 28923  	}
 28924  	// match: (Sub32 (Sub32 i:(Const32 <t>) z) x)
 28925  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 28926  	// result: (Sub32 i (Add32 <t> z x))
 28927  	for {
 28928  		if v_0.Op != OpSub32 {
 28929  			break
 28930  		}
 28931  		z := v_0.Args[1]
 28932  		i := v_0.Args[0]
 28933  		if i.Op != OpConst32 {
 28934  			break
 28935  		}
 28936  		t := i.Type
 28937  		x := v_1
 28938  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
 28939  			break
 28940  		}
 28941  		v.reset(OpSub32)
 28942  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
 28943  		v0.AddArg2(z, x)
 28944  		v.AddArg2(i, v0)
 28945  		return true
 28946  	}
 28947  	// match: (Sub32 (Add32 z i:(Const32 <t>)) x)
 28948  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 28949  	// result: (Add32 i (Sub32 <t> z x))
 28950  	for {
 28951  		if v_0.Op != OpAdd32 {
 28952  			break
 28953  		}
 28954  		_ = v_0.Args[1]
 28955  		v_0_0 := v_0.Args[0]
 28956  		v_0_1 := v_0.Args[1]
 28957  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28958  			z := v_0_0
 28959  			i := v_0_1
 28960  			if i.Op != OpConst32 {
 28961  				continue
 28962  			}
 28963  			t := i.Type
 28964  			x := v_1
 28965  			if !(z.Op != OpConst32 && x.Op != OpConst32) {
 28966  				continue
 28967  			}
 28968  			v.reset(OpAdd32)
 28969  			v0 := b.NewValue0(v.Pos, OpSub32, t)
 28970  			v0.AddArg2(z, x)
 28971  			v.AddArg2(i, v0)
 28972  			return true
 28973  		}
 28974  		break
 28975  	}
 28976  	// match: (Sub32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x))
 28977  	// result: (Add32 (Const32 <t> [c-d]) x)
 28978  	for {
 28979  		if v_0.Op != OpConst32 {
 28980  			break
 28981  		}
 28982  		t := v_0.Type
 28983  		c := auxIntToInt32(v_0.AuxInt)
 28984  		if v_1.Op != OpSub32 {
 28985  			break
 28986  		}
 28987  		x := v_1.Args[1]
 28988  		v_1_0 := v_1.Args[0]
 28989  		if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 28990  			break
 28991  		}
 28992  		d := auxIntToInt32(v_1_0.AuxInt)
 28993  		v.reset(OpAdd32)
 28994  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 28995  		v0.AuxInt = int32ToAuxInt(c - d)
 28996  		v.AddArg2(v0, x)
 28997  		return true
 28998  	}
 28999  	// match: (Sub32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
 29000  	// result: (Sub32 (Const32 <t> [c-d]) x)
 29001  	for {
 29002  		if v_0.Op != OpConst32 {
 29003  			break
 29004  		}
 29005  		t := v_0.Type
 29006  		c := auxIntToInt32(v_0.AuxInt)
 29007  		if v_1.Op != OpAdd32 {
 29008  			break
 29009  		}
 29010  		_ = v_1.Args[1]
 29011  		v_1_0 := v_1.Args[0]
 29012  		v_1_1 := v_1.Args[1]
 29013  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 29014  			if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 29015  				continue
 29016  			}
 29017  			d := auxIntToInt32(v_1_0.AuxInt)
 29018  			x := v_1_1
 29019  			v.reset(OpSub32)
 29020  			v0 := b.NewValue0(v.Pos, OpConst32, t)
 29021  			v0.AuxInt = int32ToAuxInt(c - d)
 29022  			v.AddArg2(v0, x)
 29023  			return true
 29024  		}
 29025  		break
 29026  	}
 29027  	return false
 29028  }
 29029  func rewriteValuegeneric_OpSub32F(v *Value) bool {
 29030  	v_1 := v.Args[1]
 29031  	v_0 := v.Args[0]
 29032  	// match: (Sub32F (Const32F [c]) (Const32F [d]))
 29033  	// cond: c-d == c-d
 29034  	// result: (Const32F [c-d])
 29035  	for {
 29036  		if v_0.Op != OpConst32F {
 29037  			break
 29038  		}
 29039  		c := auxIntToFloat32(v_0.AuxInt)
 29040  		if v_1.Op != OpConst32F {
 29041  			break
 29042  		}
 29043  		d := auxIntToFloat32(v_1.AuxInt)
 29044  		if !(c-d == c-d) {
 29045  			break
 29046  		}
 29047  		v.reset(OpConst32F)
 29048  		v.AuxInt = float32ToAuxInt(c - d)
 29049  		return true
 29050  	}
 29051  	return false
 29052  }
 29053  func rewriteValuegeneric_OpSub64(v *Value) bool {
 29054  	v_1 := v.Args[1]
 29055  	v_0 := v.Args[0]
 29056  	b := v.Block
 29057  	// match: (Sub64 (Const64 [c]) (Const64 [d]))
 29058  	// result: (Const64 [c-d])
 29059  	for {
 29060  		if v_0.Op != OpConst64 {
 29061  			break
 29062  		}
 29063  		c := auxIntToInt64(v_0.AuxInt)
 29064  		if v_1.Op != OpConst64 {
 29065  			break
 29066  		}
 29067  		d := auxIntToInt64(v_1.AuxInt)
 29068  		v.reset(OpConst64)
 29069  		v.AuxInt = int64ToAuxInt(c - d)
 29070  		return true
 29071  	}
 29072  	// match: (Sub64 x (Const64 <t> [c]))
 29073  	// cond: x.Op != OpConst64
 29074  	// result: (Add64 (Const64 <t> [-c]) x)
 29075  	for {
 29076  		x := v_0
 29077  		if v_1.Op != OpConst64 {
 29078  			break
 29079  		}
 29080  		t := v_1.Type
 29081  		c := auxIntToInt64(v_1.AuxInt)
 29082  		if !(x.Op != OpConst64) {
 29083  			break
 29084  		}
 29085  		v.reset(OpAdd64)
 29086  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 29087  		v0.AuxInt = int64ToAuxInt(-c)
 29088  		v.AddArg2(v0, x)
 29089  		return true
 29090  	}
 29091  	// match: (Sub64 <t> (Mul64 x y) (Mul64 x z))
 29092  	// result: (Mul64 x (Sub64 <t> y z))
 29093  	for {
 29094  		t := v.Type
 29095  		if v_0.Op != OpMul64 {
 29096  			break
 29097  		}
 29098  		_ = v_0.Args[1]
 29099  		v_0_0 := v_0.Args[0]
 29100  		v_0_1 := v_0.Args[1]
 29101  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29102  			x := v_0_0
 29103  			y := v_0_1
 29104  			if v_1.Op != OpMul64 {
 29105  				continue
 29106  			}
 29107  			_ = v_1.Args[1]
 29108  			v_1_0 := v_1.Args[0]
 29109  			v_1_1 := v_1.Args[1]
 29110  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 29111  				if x != v_1_0 {
 29112  					continue
 29113  				}
 29114  				z := v_1_1
 29115  				v.reset(OpMul64)
 29116  				v0 := b.NewValue0(v.Pos, OpSub64, t)
 29117  				v0.AddArg2(y, z)
 29118  				v.AddArg2(x, v0)
 29119  				return true
 29120  			}
 29121  		}
 29122  		break
 29123  	}
 29124  	// match: (Sub64 x x)
 29125  	// result: (Const64 [0])
 29126  	for {
 29127  		x := v_0
 29128  		if x != v_1 {
 29129  			break
 29130  		}
 29131  		v.reset(OpConst64)
 29132  		v.AuxInt = int64ToAuxInt(0)
 29133  		return true
 29134  	}
 29135  	// match: (Sub64 (Neg64 x) (Com64 x))
 29136  	// result: (Const64 [1])
 29137  	for {
 29138  		if v_0.Op != OpNeg64 {
 29139  			break
 29140  		}
 29141  		x := v_0.Args[0]
 29142  		if v_1.Op != OpCom64 || x != v_1.Args[0] {
 29143  			break
 29144  		}
 29145  		v.reset(OpConst64)
 29146  		v.AuxInt = int64ToAuxInt(1)
 29147  		return true
 29148  	}
 29149  	// match: (Sub64 (Com64 x) (Neg64 x))
 29150  	// result: (Const64 [-1])
 29151  	for {
 29152  		if v_0.Op != OpCom64 {
 29153  			break
 29154  		}
 29155  		x := v_0.Args[0]
 29156  		if v_1.Op != OpNeg64 || x != v_1.Args[0] {
 29157  			break
 29158  		}
 29159  		v.reset(OpConst64)
 29160  		v.AuxInt = int64ToAuxInt(-1)
 29161  		return true
 29162  	}
 29163  	// match: (Sub64 (Add64 x y) x)
 29164  	// result: y
 29165  	for {
 29166  		if v_0.Op != OpAdd64 {
 29167  			break
 29168  		}
 29169  		_ = v_0.Args[1]
 29170  		v_0_0 := v_0.Args[0]
 29171  		v_0_1 := v_0.Args[1]
 29172  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29173  			x := v_0_0
 29174  			y := v_0_1
 29175  			if x != v_1 {
 29176  				continue
 29177  			}
 29178  			v.copyOf(y)
 29179  			return true
 29180  		}
 29181  		break
 29182  	}
 29183  	// match: (Sub64 (Add64 x y) y)
 29184  	// result: x
 29185  	for {
 29186  		if v_0.Op != OpAdd64 {
 29187  			break
 29188  		}
 29189  		_ = v_0.Args[1]
 29190  		v_0_0 := v_0.Args[0]
 29191  		v_0_1 := v_0.Args[1]
 29192  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29193  			x := v_0_0
 29194  			y := v_0_1
 29195  			if y != v_1 {
 29196  				continue
 29197  			}
 29198  			v.copyOf(x)
 29199  			return true
 29200  		}
 29201  		break
 29202  	}
 29203  	// match: (Sub64 (Sub64 x y) x)
 29204  	// result: (Neg64 y)
 29205  	for {
 29206  		if v_0.Op != OpSub64 {
 29207  			break
 29208  		}
 29209  		y := v_0.Args[1]
 29210  		x := v_0.Args[0]
 29211  		if x != v_1 {
 29212  			break
 29213  		}
 29214  		v.reset(OpNeg64)
 29215  		v.AddArg(y)
 29216  		return true
 29217  	}
 29218  	// match: (Sub64 x (Add64 x y))
 29219  	// result: (Neg64 y)
 29220  	for {
 29221  		x := v_0
 29222  		if v_1.Op != OpAdd64 {
 29223  			break
 29224  		}
 29225  		_ = v_1.Args[1]
 29226  		v_1_0 := v_1.Args[0]
 29227  		v_1_1 := v_1.Args[1]
 29228  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 29229  			if x != v_1_0 {
 29230  				continue
 29231  			}
 29232  			y := v_1_1
 29233  			v.reset(OpNeg64)
 29234  			v.AddArg(y)
 29235  			return true
 29236  		}
 29237  		break
 29238  	}
 29239  	// match: (Sub64 x (Sub64 i:(Const64 <t>) z))
 29240  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 29241  	// result: (Sub64 (Add64 <t> x z) i)
 29242  	for {
 29243  		x := v_0
 29244  		if v_1.Op != OpSub64 {
 29245  			break
 29246  		}
 29247  		z := v_1.Args[1]
 29248  		i := v_1.Args[0]
 29249  		if i.Op != OpConst64 {
 29250  			break
 29251  		}
 29252  		t := i.Type
 29253  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
 29254  			break
 29255  		}
 29256  		v.reset(OpSub64)
 29257  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
 29258  		v0.AddArg2(x, z)
 29259  		v.AddArg2(v0, i)
 29260  		return true
 29261  	}
 29262  	// match: (Sub64 x (Add64 z i:(Const64 <t>)))
 29263  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 29264  	// result: (Sub64 (Sub64 <t> x z) i)
 29265  	for {
 29266  		x := v_0
 29267  		if v_1.Op != OpAdd64 {
 29268  			break
 29269  		}
 29270  		_ = v_1.Args[1]
 29271  		v_1_0 := v_1.Args[0]
 29272  		v_1_1 := v_1.Args[1]
 29273  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 29274  			z := v_1_0
 29275  			i := v_1_1
 29276  			if i.Op != OpConst64 {
 29277  				continue
 29278  			}
 29279  			t := i.Type
 29280  			if !(z.Op != OpConst64 && x.Op != OpConst64) {
 29281  				continue
 29282  			}
 29283  			v.reset(OpSub64)
 29284  			v0 := b.NewValue0(v.Pos, OpSub64, t)
 29285  			v0.AddArg2(x, z)
 29286  			v.AddArg2(v0, i)
 29287  			return true
 29288  		}
 29289  		break
 29290  	}
 29291  	// match: (Sub64 (Sub64 i:(Const64 <t>) z) x)
 29292  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 29293  	// result: (Sub64 i (Add64 <t> z x))
 29294  	for {
 29295  		if v_0.Op != OpSub64 {
 29296  			break
 29297  		}
 29298  		z := v_0.Args[1]
 29299  		i := v_0.Args[0]
 29300  		if i.Op != OpConst64 {
 29301  			break
 29302  		}
 29303  		t := i.Type
 29304  		x := v_1
 29305  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
 29306  			break
 29307  		}
 29308  		v.reset(OpSub64)
 29309  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
 29310  		v0.AddArg2(z, x)
 29311  		v.AddArg2(i, v0)
 29312  		return true
 29313  	}
 29314  	// match: (Sub64 (Add64 z i:(Const64 <t>)) x)
 29315  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 29316  	// result: (Add64 i (Sub64 <t> z x))
 29317  	for {
 29318  		if v_0.Op != OpAdd64 {
 29319  			break
 29320  		}
 29321  		_ = v_0.Args[1]
 29322  		v_0_0 := v_0.Args[0]
 29323  		v_0_1 := v_0.Args[1]
 29324  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29325  			z := v_0_0
 29326  			i := v_0_1
 29327  			if i.Op != OpConst64 {
 29328  				continue
 29329  			}
 29330  			t := i.Type
 29331  			x := v_1
 29332  			if !(z.Op != OpConst64 && x.Op != OpConst64) {
 29333  				continue
 29334  			}
 29335  			v.reset(OpAdd64)
 29336  			v0 := b.NewValue0(v.Pos, OpSub64, t)
 29337  			v0.AddArg2(z, x)
 29338  			v.AddArg2(i, v0)
 29339  			return true
 29340  		}
 29341  		break
 29342  	}
 29343  	// match: (Sub64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x))
 29344  	// result: (Add64 (Const64 <t> [c-d]) x)
 29345  	for {
 29346  		if v_0.Op != OpConst64 {
 29347  			break
 29348  		}
 29349  		t := v_0.Type
 29350  		c := auxIntToInt64(v_0.AuxInt)
 29351  		if v_1.Op != OpSub64 {
 29352  			break
 29353  		}
 29354  		x := v_1.Args[1]
 29355  		v_1_0 := v_1.Args[0]
 29356  		if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 29357  			break
 29358  		}
 29359  		d := auxIntToInt64(v_1_0.AuxInt)
 29360  		v.reset(OpAdd64)
 29361  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 29362  		v0.AuxInt = int64ToAuxInt(c - d)
 29363  		v.AddArg2(v0, x)
 29364  		return true
 29365  	}
 29366  	// match: (Sub64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
 29367  	// result: (Sub64 (Const64 <t> [c-d]) x)
 29368  	for {
 29369  		if v_0.Op != OpConst64 {
 29370  			break
 29371  		}
 29372  		t := v_0.Type
 29373  		c := auxIntToInt64(v_0.AuxInt)
 29374  		if v_1.Op != OpAdd64 {
 29375  			break
 29376  		}
 29377  		_ = v_1.Args[1]
 29378  		v_1_0 := v_1.Args[0]
 29379  		v_1_1 := v_1.Args[1]
 29380  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 29381  			if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 29382  				continue
 29383  			}
 29384  			d := auxIntToInt64(v_1_0.AuxInt)
 29385  			x := v_1_1
 29386  			v.reset(OpSub64)
 29387  			v0 := b.NewValue0(v.Pos, OpConst64, t)
 29388  			v0.AuxInt = int64ToAuxInt(c - d)
 29389  			v.AddArg2(v0, x)
 29390  			return true
 29391  		}
 29392  		break
 29393  	}
 29394  	return false
 29395  }
 29396  func rewriteValuegeneric_OpSub64F(v *Value) bool {
 29397  	v_1 := v.Args[1]
 29398  	v_0 := v.Args[0]
 29399  	// match: (Sub64F (Const64F [c]) (Const64F [d]))
 29400  	// cond: c-d == c-d
 29401  	// result: (Const64F [c-d])
 29402  	for {
 29403  		if v_0.Op != OpConst64F {
 29404  			break
 29405  		}
 29406  		c := auxIntToFloat64(v_0.AuxInt)
 29407  		if v_1.Op != OpConst64F {
 29408  			break
 29409  		}
 29410  		d := auxIntToFloat64(v_1.AuxInt)
 29411  		if !(c-d == c-d) {
 29412  			break
 29413  		}
 29414  		v.reset(OpConst64F)
 29415  		v.AuxInt = float64ToAuxInt(c - d)
 29416  		return true
 29417  	}
 29418  	return false
 29419  }
 29420  func rewriteValuegeneric_OpSub8(v *Value) bool {
 29421  	v_1 := v.Args[1]
 29422  	v_0 := v.Args[0]
 29423  	b := v.Block
 29424  	// match: (Sub8 (Const8 [c]) (Const8 [d]))
 29425  	// result: (Const8 [c-d])
 29426  	for {
 29427  		if v_0.Op != OpConst8 {
 29428  			break
 29429  		}
 29430  		c := auxIntToInt8(v_0.AuxInt)
 29431  		if v_1.Op != OpConst8 {
 29432  			break
 29433  		}
 29434  		d := auxIntToInt8(v_1.AuxInt)
 29435  		v.reset(OpConst8)
 29436  		v.AuxInt = int8ToAuxInt(c - d)
 29437  		return true
 29438  	}
 29439  	// match: (Sub8 x (Const8 <t> [c]))
 29440  	// cond: x.Op != OpConst8
 29441  	// result: (Add8 (Const8 <t> [-c]) x)
 29442  	for {
 29443  		x := v_0
 29444  		if v_1.Op != OpConst8 {
 29445  			break
 29446  		}
 29447  		t := v_1.Type
 29448  		c := auxIntToInt8(v_1.AuxInt)
 29449  		if !(x.Op != OpConst8) {
 29450  			break
 29451  		}
 29452  		v.reset(OpAdd8)
 29453  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 29454  		v0.AuxInt = int8ToAuxInt(-c)
 29455  		v.AddArg2(v0, x)
 29456  		return true
 29457  	}
 29458  	// match: (Sub8 <t> (Mul8 x y) (Mul8 x z))
 29459  	// result: (Mul8 x (Sub8 <t> y z))
 29460  	for {
 29461  		t := v.Type
 29462  		if v_0.Op != OpMul8 {
 29463  			break
 29464  		}
 29465  		_ = v_0.Args[1]
 29466  		v_0_0 := v_0.Args[0]
 29467  		v_0_1 := v_0.Args[1]
 29468  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29469  			x := v_0_0
 29470  			y := v_0_1
 29471  			if v_1.Op != OpMul8 {
 29472  				continue
 29473  			}
 29474  			_ = v_1.Args[1]
 29475  			v_1_0 := v_1.Args[0]
 29476  			v_1_1 := v_1.Args[1]
 29477  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 29478  				if x != v_1_0 {
 29479  					continue
 29480  				}
 29481  				z := v_1_1
 29482  				v.reset(OpMul8)
 29483  				v0 := b.NewValue0(v.Pos, OpSub8, t)
 29484  				v0.AddArg2(y, z)
 29485  				v.AddArg2(x, v0)
 29486  				return true
 29487  			}
 29488  		}
 29489  		break
 29490  	}
 29491  	// match: (Sub8 x x)
 29492  	// result: (Const8 [0])
 29493  	for {
 29494  		x := v_0
 29495  		if x != v_1 {
 29496  			break
 29497  		}
 29498  		v.reset(OpConst8)
 29499  		v.AuxInt = int8ToAuxInt(0)
 29500  		return true
 29501  	}
 29502  	// match: (Sub8 (Neg8 x) (Com8 x))
 29503  	// result: (Const8 [1])
 29504  	for {
 29505  		if v_0.Op != OpNeg8 {
 29506  			break
 29507  		}
 29508  		x := v_0.Args[0]
 29509  		if v_1.Op != OpCom8 || x != v_1.Args[0] {
 29510  			break
 29511  		}
 29512  		v.reset(OpConst8)
 29513  		v.AuxInt = int8ToAuxInt(1)
 29514  		return true
 29515  	}
 29516  	// match: (Sub8 (Com8 x) (Neg8 x))
 29517  	// result: (Const8 [-1])
 29518  	for {
 29519  		if v_0.Op != OpCom8 {
 29520  			break
 29521  		}
 29522  		x := v_0.Args[0]
 29523  		if v_1.Op != OpNeg8 || x != v_1.Args[0] {
 29524  			break
 29525  		}
 29526  		v.reset(OpConst8)
 29527  		v.AuxInt = int8ToAuxInt(-1)
 29528  		return true
 29529  	}
 29530  	// match: (Sub8 (Add8 x y) x)
 29531  	// result: y
 29532  	for {
 29533  		if v_0.Op != OpAdd8 {
 29534  			break
 29535  		}
 29536  		_ = v_0.Args[1]
 29537  		v_0_0 := v_0.Args[0]
 29538  		v_0_1 := v_0.Args[1]
 29539  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29540  			x := v_0_0
 29541  			y := v_0_1
 29542  			if x != v_1 {
 29543  				continue
 29544  			}
 29545  			v.copyOf(y)
 29546  			return true
 29547  		}
 29548  		break
 29549  	}
 29550  	// match: (Sub8 (Add8 x y) y)
 29551  	// result: x
 29552  	for {
 29553  		if v_0.Op != OpAdd8 {
 29554  			break
 29555  		}
 29556  		_ = v_0.Args[1]
 29557  		v_0_0 := v_0.Args[0]
 29558  		v_0_1 := v_0.Args[1]
 29559  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29560  			x := v_0_0
 29561  			y := v_0_1
 29562  			if y != v_1 {
 29563  				continue
 29564  			}
 29565  			v.copyOf(x)
 29566  			return true
 29567  		}
 29568  		break
 29569  	}
 29570  	// match: (Sub8 (Sub8 x y) x)
 29571  	// result: (Neg8 y)
 29572  	for {
 29573  		if v_0.Op != OpSub8 {
 29574  			break
 29575  		}
 29576  		y := v_0.Args[1]
 29577  		x := v_0.Args[0]
 29578  		if x != v_1 {
 29579  			break
 29580  		}
 29581  		v.reset(OpNeg8)
 29582  		v.AddArg(y)
 29583  		return true
 29584  	}
 29585  	// match: (Sub8 x (Add8 x y))
 29586  	// result: (Neg8 y)
 29587  	for {
 29588  		x := v_0
 29589  		if v_1.Op != OpAdd8 {
 29590  			break
 29591  		}
 29592  		_ = v_1.Args[1]
 29593  		v_1_0 := v_1.Args[0]
 29594  		v_1_1 := v_1.Args[1]
 29595  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 29596  			if x != v_1_0 {
 29597  				continue
 29598  			}
 29599  			y := v_1_1
 29600  			v.reset(OpNeg8)
 29601  			v.AddArg(y)
 29602  			return true
 29603  		}
 29604  		break
 29605  	}
 29606  	// match: (Sub8 x (Sub8 i:(Const8 <t>) z))
 29607  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 29608  	// result: (Sub8 (Add8 <t> x z) i)
 29609  	for {
 29610  		x := v_0
 29611  		if v_1.Op != OpSub8 {
 29612  			break
 29613  		}
 29614  		z := v_1.Args[1]
 29615  		i := v_1.Args[0]
 29616  		if i.Op != OpConst8 {
 29617  			break
 29618  		}
 29619  		t := i.Type
 29620  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
 29621  			break
 29622  		}
 29623  		v.reset(OpSub8)
 29624  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
 29625  		v0.AddArg2(x, z)
 29626  		v.AddArg2(v0, i)
 29627  		return true
 29628  	}
 29629  	// match: (Sub8 x (Add8 z i:(Const8 <t>)))
 29630  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 29631  	// result: (Sub8 (Sub8 <t> x z) i)
 29632  	for {
 29633  		x := v_0
 29634  		if v_1.Op != OpAdd8 {
 29635  			break
 29636  		}
 29637  		_ = v_1.Args[1]
 29638  		v_1_0 := v_1.Args[0]
 29639  		v_1_1 := v_1.Args[1]
 29640  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 29641  			z := v_1_0
 29642  			i := v_1_1
 29643  			if i.Op != OpConst8 {
 29644  				continue
 29645  			}
 29646  			t := i.Type
 29647  			if !(z.Op != OpConst8 && x.Op != OpConst8) {
 29648  				continue
 29649  			}
 29650  			v.reset(OpSub8)
 29651  			v0 := b.NewValue0(v.Pos, OpSub8, t)
 29652  			v0.AddArg2(x, z)
 29653  			v.AddArg2(v0, i)
 29654  			return true
 29655  		}
 29656  		break
 29657  	}
 29658  	// match: (Sub8 (Sub8 i:(Const8 <t>) z) x)
 29659  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 29660  	// result: (Sub8 i (Add8 <t> z x))
 29661  	for {
 29662  		if v_0.Op != OpSub8 {
 29663  			break
 29664  		}
 29665  		z := v_0.Args[1]
 29666  		i := v_0.Args[0]
 29667  		if i.Op != OpConst8 {
 29668  			break
 29669  		}
 29670  		t := i.Type
 29671  		x := v_1
 29672  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
 29673  			break
 29674  		}
 29675  		v.reset(OpSub8)
 29676  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
 29677  		v0.AddArg2(z, x)
 29678  		v.AddArg2(i, v0)
 29679  		return true
 29680  	}
 29681  	// match: (Sub8 (Add8 z i:(Const8 <t>)) x)
 29682  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 29683  	// result: (Add8 i (Sub8 <t> z x))
 29684  	for {
 29685  		if v_0.Op != OpAdd8 {
 29686  			break
 29687  		}
 29688  		_ = v_0.Args[1]
 29689  		v_0_0 := v_0.Args[0]
 29690  		v_0_1 := v_0.Args[1]
 29691  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29692  			z := v_0_0
 29693  			i := v_0_1
 29694  			if i.Op != OpConst8 {
 29695  				continue
 29696  			}
 29697  			t := i.Type
 29698  			x := v_1
 29699  			if !(z.Op != OpConst8 && x.Op != OpConst8) {
 29700  				continue
 29701  			}
 29702  			v.reset(OpAdd8)
 29703  			v0 := b.NewValue0(v.Pos, OpSub8, t)
 29704  			v0.AddArg2(z, x)
 29705  			v.AddArg2(i, v0)
 29706  			return true
 29707  		}
 29708  		break
 29709  	}
 29710  	// match: (Sub8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x))
 29711  	// result: (Add8 (Const8 <t> [c-d]) x)
 29712  	for {
 29713  		if v_0.Op != OpConst8 {
 29714  			break
 29715  		}
 29716  		t := v_0.Type
 29717  		c := auxIntToInt8(v_0.AuxInt)
 29718  		if v_1.Op != OpSub8 {
 29719  			break
 29720  		}
 29721  		x := v_1.Args[1]
 29722  		v_1_0 := v_1.Args[0]
 29723  		if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 29724  			break
 29725  		}
 29726  		d := auxIntToInt8(v_1_0.AuxInt)
 29727  		v.reset(OpAdd8)
 29728  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 29729  		v0.AuxInt = int8ToAuxInt(c - d)
 29730  		v.AddArg2(v0, x)
 29731  		return true
 29732  	}
 29733  	// match: (Sub8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
 29734  	// result: (Sub8 (Const8 <t> [c-d]) x)
 29735  	for {
 29736  		if v_0.Op != OpConst8 {
 29737  			break
 29738  		}
 29739  		t := v_0.Type
 29740  		c := auxIntToInt8(v_0.AuxInt)
 29741  		if v_1.Op != OpAdd8 {
 29742  			break
 29743  		}
 29744  		_ = v_1.Args[1]
 29745  		v_1_0 := v_1.Args[0]
 29746  		v_1_1 := v_1.Args[1]
 29747  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 29748  			if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 29749  				continue
 29750  			}
 29751  			d := auxIntToInt8(v_1_0.AuxInt)
 29752  			x := v_1_1
 29753  			v.reset(OpSub8)
 29754  			v0 := b.NewValue0(v.Pos, OpConst8, t)
 29755  			v0.AuxInt = int8ToAuxInt(c - d)
 29756  			v.AddArg2(v0, x)
 29757  			return true
 29758  		}
 29759  		break
 29760  	}
 29761  	return false
 29762  }
 29763  func rewriteValuegeneric_OpTrunc(v *Value) bool {
 29764  	v_0 := v.Args[0]
 29765  	// match: (Trunc (Const64F [c]))
 29766  	// result: (Const64F [math.Trunc(c)])
 29767  	for {
 29768  		if v_0.Op != OpConst64F {
 29769  			break
 29770  		}
 29771  		c := auxIntToFloat64(v_0.AuxInt)
 29772  		v.reset(OpConst64F)
 29773  		v.AuxInt = float64ToAuxInt(math.Trunc(c))
 29774  		return true
 29775  	}
 29776  	return false
 29777  }
 29778  func rewriteValuegeneric_OpTrunc16to8(v *Value) bool {
 29779  	v_0 := v.Args[0]
 29780  	// match: (Trunc16to8 (Const16 [c]))
 29781  	// result: (Const8 [int8(c)])
 29782  	for {
 29783  		if v_0.Op != OpConst16 {
 29784  			break
 29785  		}
 29786  		c := auxIntToInt16(v_0.AuxInt)
 29787  		v.reset(OpConst8)
 29788  		v.AuxInt = int8ToAuxInt(int8(c))
 29789  		return true
 29790  	}
 29791  	// match: (Trunc16to8 (ZeroExt8to16 x))
 29792  	// result: x
 29793  	for {
 29794  		if v_0.Op != OpZeroExt8to16 {
 29795  			break
 29796  		}
 29797  		x := v_0.Args[0]
 29798  		v.copyOf(x)
 29799  		return true
 29800  	}
 29801  	// match: (Trunc16to8 (SignExt8to16 x))
 29802  	// result: x
 29803  	for {
 29804  		if v_0.Op != OpSignExt8to16 {
 29805  			break
 29806  		}
 29807  		x := v_0.Args[0]
 29808  		v.copyOf(x)
 29809  		return true
 29810  	}
 29811  	// match: (Trunc16to8 (And16 (Const16 [y]) x))
 29812  	// cond: y&0xFF == 0xFF
 29813  	// result: (Trunc16to8 x)
 29814  	for {
 29815  		if v_0.Op != OpAnd16 {
 29816  			break
 29817  		}
 29818  		_ = v_0.Args[1]
 29819  		v_0_0 := v_0.Args[0]
 29820  		v_0_1 := v_0.Args[1]
 29821  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29822  			if v_0_0.Op != OpConst16 {
 29823  				continue
 29824  			}
 29825  			y := auxIntToInt16(v_0_0.AuxInt)
 29826  			x := v_0_1
 29827  			if !(y&0xFF == 0xFF) {
 29828  				continue
 29829  			}
 29830  			v.reset(OpTrunc16to8)
 29831  			v.AddArg(x)
 29832  			return true
 29833  		}
 29834  		break
 29835  	}
 29836  	return false
 29837  }
 29838  func rewriteValuegeneric_OpTrunc32to16(v *Value) bool {
 29839  	v_0 := v.Args[0]
 29840  	// match: (Trunc32to16 (Const32 [c]))
 29841  	// result: (Const16 [int16(c)])
 29842  	for {
 29843  		if v_0.Op != OpConst32 {
 29844  			break
 29845  		}
 29846  		c := auxIntToInt32(v_0.AuxInt)
 29847  		v.reset(OpConst16)
 29848  		v.AuxInt = int16ToAuxInt(int16(c))
 29849  		return true
 29850  	}
 29851  	// match: (Trunc32to16 (ZeroExt8to32 x))
 29852  	// result: (ZeroExt8to16 x)
 29853  	for {
 29854  		if v_0.Op != OpZeroExt8to32 {
 29855  			break
 29856  		}
 29857  		x := v_0.Args[0]
 29858  		v.reset(OpZeroExt8to16)
 29859  		v.AddArg(x)
 29860  		return true
 29861  	}
 29862  	// match: (Trunc32to16 (ZeroExt16to32 x))
 29863  	// result: x
 29864  	for {
 29865  		if v_0.Op != OpZeroExt16to32 {
 29866  			break
 29867  		}
 29868  		x := v_0.Args[0]
 29869  		v.copyOf(x)
 29870  		return true
 29871  	}
 29872  	// match: (Trunc32to16 (SignExt8to32 x))
 29873  	// result: (SignExt8to16 x)
 29874  	for {
 29875  		if v_0.Op != OpSignExt8to32 {
 29876  			break
 29877  		}
 29878  		x := v_0.Args[0]
 29879  		v.reset(OpSignExt8to16)
 29880  		v.AddArg(x)
 29881  		return true
 29882  	}
 29883  	// match: (Trunc32to16 (SignExt16to32 x))
 29884  	// result: x
 29885  	for {
 29886  		if v_0.Op != OpSignExt16to32 {
 29887  			break
 29888  		}
 29889  		x := v_0.Args[0]
 29890  		v.copyOf(x)
 29891  		return true
 29892  	}
 29893  	// match: (Trunc32to16 (And32 (Const32 [y]) x))
 29894  	// cond: y&0xFFFF == 0xFFFF
 29895  	// result: (Trunc32to16 x)
 29896  	for {
 29897  		if v_0.Op != OpAnd32 {
 29898  			break
 29899  		}
 29900  		_ = v_0.Args[1]
 29901  		v_0_0 := v_0.Args[0]
 29902  		v_0_1 := v_0.Args[1]
 29903  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29904  			if v_0_0.Op != OpConst32 {
 29905  				continue
 29906  			}
 29907  			y := auxIntToInt32(v_0_0.AuxInt)
 29908  			x := v_0_1
 29909  			if !(y&0xFFFF == 0xFFFF) {
 29910  				continue
 29911  			}
 29912  			v.reset(OpTrunc32to16)
 29913  			v.AddArg(x)
 29914  			return true
 29915  		}
 29916  		break
 29917  	}
 29918  	return false
 29919  }
 29920  func rewriteValuegeneric_OpTrunc32to8(v *Value) bool {
 29921  	v_0 := v.Args[0]
 29922  	// match: (Trunc32to8 (Const32 [c]))
 29923  	// result: (Const8 [int8(c)])
 29924  	for {
 29925  		if v_0.Op != OpConst32 {
 29926  			break
 29927  		}
 29928  		c := auxIntToInt32(v_0.AuxInt)
 29929  		v.reset(OpConst8)
 29930  		v.AuxInt = int8ToAuxInt(int8(c))
 29931  		return true
 29932  	}
 29933  	// match: (Trunc32to8 (ZeroExt8to32 x))
 29934  	// result: x
 29935  	for {
 29936  		if v_0.Op != OpZeroExt8to32 {
 29937  			break
 29938  		}
 29939  		x := v_0.Args[0]
 29940  		v.copyOf(x)
 29941  		return true
 29942  	}
 29943  	// match: (Trunc32to8 (SignExt8to32 x))
 29944  	// result: x
 29945  	for {
 29946  		if v_0.Op != OpSignExt8to32 {
 29947  			break
 29948  		}
 29949  		x := v_0.Args[0]
 29950  		v.copyOf(x)
 29951  		return true
 29952  	}
 29953  	// match: (Trunc32to8 (And32 (Const32 [y]) x))
 29954  	// cond: y&0xFF == 0xFF
 29955  	// result: (Trunc32to8 x)
 29956  	for {
 29957  		if v_0.Op != OpAnd32 {
 29958  			break
 29959  		}
 29960  		_ = v_0.Args[1]
 29961  		v_0_0 := v_0.Args[0]
 29962  		v_0_1 := v_0.Args[1]
 29963  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 29964  			if v_0_0.Op != OpConst32 {
 29965  				continue
 29966  			}
 29967  			y := auxIntToInt32(v_0_0.AuxInt)
 29968  			x := v_0_1
 29969  			if !(y&0xFF == 0xFF) {
 29970  				continue
 29971  			}
 29972  			v.reset(OpTrunc32to8)
 29973  			v.AddArg(x)
 29974  			return true
 29975  		}
 29976  		break
 29977  	}
 29978  	return false
 29979  }
 29980  func rewriteValuegeneric_OpTrunc64to16(v *Value) bool {
 29981  	v_0 := v.Args[0]
 29982  	// match: (Trunc64to16 (Const64 [c]))
 29983  	// result: (Const16 [int16(c)])
 29984  	for {
 29985  		if v_0.Op != OpConst64 {
 29986  			break
 29987  		}
 29988  		c := auxIntToInt64(v_0.AuxInt)
 29989  		v.reset(OpConst16)
 29990  		v.AuxInt = int16ToAuxInt(int16(c))
 29991  		return true
 29992  	}
 29993  	// match: (Trunc64to16 (ZeroExt8to64 x))
 29994  	// result: (ZeroExt8to16 x)
 29995  	for {
 29996  		if v_0.Op != OpZeroExt8to64 {
 29997  			break
 29998  		}
 29999  		x := v_0.Args[0]
 30000  		v.reset(OpZeroExt8to16)
 30001  		v.AddArg(x)
 30002  		return true
 30003  	}
 30004  	// match: (Trunc64to16 (ZeroExt16to64 x))
 30005  	// result: x
 30006  	for {
 30007  		if v_0.Op != OpZeroExt16to64 {
 30008  			break
 30009  		}
 30010  		x := v_0.Args[0]
 30011  		v.copyOf(x)
 30012  		return true
 30013  	}
 30014  	// match: (Trunc64to16 (SignExt8to64 x))
 30015  	// result: (SignExt8to16 x)
 30016  	for {
 30017  		if v_0.Op != OpSignExt8to64 {
 30018  			break
 30019  		}
 30020  		x := v_0.Args[0]
 30021  		v.reset(OpSignExt8to16)
 30022  		v.AddArg(x)
 30023  		return true
 30024  	}
 30025  	// match: (Trunc64to16 (SignExt16to64 x))
 30026  	// result: x
 30027  	for {
 30028  		if v_0.Op != OpSignExt16to64 {
 30029  			break
 30030  		}
 30031  		x := v_0.Args[0]
 30032  		v.copyOf(x)
 30033  		return true
 30034  	}
 30035  	// match: (Trunc64to16 (And64 (Const64 [y]) x))
 30036  	// cond: y&0xFFFF == 0xFFFF
 30037  	// result: (Trunc64to16 x)
 30038  	for {
 30039  		if v_0.Op != OpAnd64 {
 30040  			break
 30041  		}
 30042  		_ = v_0.Args[1]
 30043  		v_0_0 := v_0.Args[0]
 30044  		v_0_1 := v_0.Args[1]
 30045  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 30046  			if v_0_0.Op != OpConst64 {
 30047  				continue
 30048  			}
 30049  			y := auxIntToInt64(v_0_0.AuxInt)
 30050  			x := v_0_1
 30051  			if !(y&0xFFFF == 0xFFFF) {
 30052  				continue
 30053  			}
 30054  			v.reset(OpTrunc64to16)
 30055  			v.AddArg(x)
 30056  			return true
 30057  		}
 30058  		break
 30059  	}
 30060  	return false
 30061  }
 30062  func rewriteValuegeneric_OpTrunc64to32(v *Value) bool {
 30063  	v_0 := v.Args[0]
 30064  	// match: (Trunc64to32 (Const64 [c]))
 30065  	// result: (Const32 [int32(c)])
 30066  	for {
 30067  		if v_0.Op != OpConst64 {
 30068  			break
 30069  		}
 30070  		c := auxIntToInt64(v_0.AuxInt)
 30071  		v.reset(OpConst32)
 30072  		v.AuxInt = int32ToAuxInt(int32(c))
 30073  		return true
 30074  	}
 30075  	// match: (Trunc64to32 (ZeroExt8to64 x))
 30076  	// result: (ZeroExt8to32 x)
 30077  	for {
 30078  		if v_0.Op != OpZeroExt8to64 {
 30079  			break
 30080  		}
 30081  		x := v_0.Args[0]
 30082  		v.reset(OpZeroExt8to32)
 30083  		v.AddArg(x)
 30084  		return true
 30085  	}
 30086  	// match: (Trunc64to32 (ZeroExt16to64 x))
 30087  	// result: (ZeroExt16to32 x)
 30088  	for {
 30089  		if v_0.Op != OpZeroExt16to64 {
 30090  			break
 30091  		}
 30092  		x := v_0.Args[0]
 30093  		v.reset(OpZeroExt16to32)
 30094  		v.AddArg(x)
 30095  		return true
 30096  	}
 30097  	// match: (Trunc64to32 (ZeroExt32to64 x))
 30098  	// result: x
 30099  	for {
 30100  		if v_0.Op != OpZeroExt32to64 {
 30101  			break
 30102  		}
 30103  		x := v_0.Args[0]
 30104  		v.copyOf(x)
 30105  		return true
 30106  	}
 30107  	// match: (Trunc64to32 (SignExt8to64 x))
 30108  	// result: (SignExt8to32 x)
 30109  	for {
 30110  		if v_0.Op != OpSignExt8to64 {
 30111  			break
 30112  		}
 30113  		x := v_0.Args[0]
 30114  		v.reset(OpSignExt8to32)
 30115  		v.AddArg(x)
 30116  		return true
 30117  	}
 30118  	// match: (Trunc64to32 (SignExt16to64 x))
 30119  	// result: (SignExt16to32 x)
 30120  	for {
 30121  		if v_0.Op != OpSignExt16to64 {
 30122  			break
 30123  		}
 30124  		x := v_0.Args[0]
 30125  		v.reset(OpSignExt16to32)
 30126  		v.AddArg(x)
 30127  		return true
 30128  	}
 30129  	// match: (Trunc64to32 (SignExt32to64 x))
 30130  	// result: x
 30131  	for {
 30132  		if v_0.Op != OpSignExt32to64 {
 30133  			break
 30134  		}
 30135  		x := v_0.Args[0]
 30136  		v.copyOf(x)
 30137  		return true
 30138  	}
 30139  	// match: (Trunc64to32 (And64 (Const64 [y]) x))
 30140  	// cond: y&0xFFFFFFFF == 0xFFFFFFFF
 30141  	// result: (Trunc64to32 x)
 30142  	for {
 30143  		if v_0.Op != OpAnd64 {
 30144  			break
 30145  		}
 30146  		_ = v_0.Args[1]
 30147  		v_0_0 := v_0.Args[0]
 30148  		v_0_1 := v_0.Args[1]
 30149  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 30150  			if v_0_0.Op != OpConst64 {
 30151  				continue
 30152  			}
 30153  			y := auxIntToInt64(v_0_0.AuxInt)
 30154  			x := v_0_1
 30155  			if !(y&0xFFFFFFFF == 0xFFFFFFFF) {
 30156  				continue
 30157  			}
 30158  			v.reset(OpTrunc64to32)
 30159  			v.AddArg(x)
 30160  			return true
 30161  		}
 30162  		break
 30163  	}
 30164  	return false
 30165  }
 30166  func rewriteValuegeneric_OpTrunc64to8(v *Value) bool {
 30167  	v_0 := v.Args[0]
 30168  	// match: (Trunc64to8 (Const64 [c]))
 30169  	// result: (Const8 [int8(c)])
 30170  	for {
 30171  		if v_0.Op != OpConst64 {
 30172  			break
 30173  		}
 30174  		c := auxIntToInt64(v_0.AuxInt)
 30175  		v.reset(OpConst8)
 30176  		v.AuxInt = int8ToAuxInt(int8(c))
 30177  		return true
 30178  	}
 30179  	// match: (Trunc64to8 (ZeroExt8to64 x))
 30180  	// result: x
 30181  	for {
 30182  		if v_0.Op != OpZeroExt8to64 {
 30183  			break
 30184  		}
 30185  		x := v_0.Args[0]
 30186  		v.copyOf(x)
 30187  		return true
 30188  	}
 30189  	// match: (Trunc64to8 (SignExt8to64 x))
 30190  	// result: x
 30191  	for {
 30192  		if v_0.Op != OpSignExt8to64 {
 30193  			break
 30194  		}
 30195  		x := v_0.Args[0]
 30196  		v.copyOf(x)
 30197  		return true
 30198  	}
 30199  	// match: (Trunc64to8 (And64 (Const64 [y]) x))
 30200  	// cond: y&0xFF == 0xFF
 30201  	// result: (Trunc64to8 x)
 30202  	for {
 30203  		if v_0.Op != OpAnd64 {
 30204  			break
 30205  		}
 30206  		_ = v_0.Args[1]
 30207  		v_0_0 := v_0.Args[0]
 30208  		v_0_1 := v_0.Args[1]
 30209  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 30210  			if v_0_0.Op != OpConst64 {
 30211  				continue
 30212  			}
 30213  			y := auxIntToInt64(v_0_0.AuxInt)
 30214  			x := v_0_1
 30215  			if !(y&0xFF == 0xFF) {
 30216  				continue
 30217  			}
 30218  			v.reset(OpTrunc64to8)
 30219  			v.AddArg(x)
 30220  			return true
 30221  		}
 30222  		break
 30223  	}
 30224  	return false
 30225  }
 30226  func rewriteValuegeneric_OpXor16(v *Value) bool {
 30227  	v_1 := v.Args[1]
 30228  	v_0 := v.Args[0]
 30229  	b := v.Block
 30230  	config := b.Func.Config
 30231  	// match: (Xor16 (Const16 [c]) (Const16 [d]))
 30232  	// result: (Const16 [c^d])
 30233  	for {
 30234  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30235  			if v_0.Op != OpConst16 {
 30236  				continue
 30237  			}
 30238  			c := auxIntToInt16(v_0.AuxInt)
 30239  			if v_1.Op != OpConst16 {
 30240  				continue
 30241  			}
 30242  			d := auxIntToInt16(v_1.AuxInt)
 30243  			v.reset(OpConst16)
 30244  			v.AuxInt = int16ToAuxInt(c ^ d)
 30245  			return true
 30246  		}
 30247  		break
 30248  	}
 30249  	// match: (Xor16 x x)
 30250  	// result: (Const16 [0])
 30251  	for {
 30252  		x := v_0
 30253  		if x != v_1 {
 30254  			break
 30255  		}
 30256  		v.reset(OpConst16)
 30257  		v.AuxInt = int16ToAuxInt(0)
 30258  		return true
 30259  	}
 30260  	// match: (Xor16 (Const16 [0]) x)
 30261  	// result: x
 30262  	for {
 30263  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30264  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 30265  				continue
 30266  			}
 30267  			x := v_1
 30268  			v.copyOf(x)
 30269  			return true
 30270  		}
 30271  		break
 30272  	}
 30273  	// match: (Xor16 (Com16 x) x)
 30274  	// result: (Const16 [-1])
 30275  	for {
 30276  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30277  			if v_0.Op != OpCom16 {
 30278  				continue
 30279  			}
 30280  			x := v_0.Args[0]
 30281  			if x != v_1 {
 30282  				continue
 30283  			}
 30284  			v.reset(OpConst16)
 30285  			v.AuxInt = int16ToAuxInt(-1)
 30286  			return true
 30287  		}
 30288  		break
 30289  	}
 30290  	// match: (Xor16 (Const16 [-1]) x)
 30291  	// result: (Com16 x)
 30292  	for {
 30293  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30294  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
 30295  				continue
 30296  			}
 30297  			x := v_1
 30298  			v.reset(OpCom16)
 30299  			v.AddArg(x)
 30300  			return true
 30301  		}
 30302  		break
 30303  	}
 30304  	// match: (Xor16 x (Xor16 x y))
 30305  	// result: y
 30306  	for {
 30307  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30308  			x := v_0
 30309  			if v_1.Op != OpXor16 {
 30310  				continue
 30311  			}
 30312  			_ = v_1.Args[1]
 30313  			v_1_0 := v_1.Args[0]
 30314  			v_1_1 := v_1.Args[1]
 30315  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 30316  				if x != v_1_0 {
 30317  					continue
 30318  				}
 30319  				y := v_1_1
 30320  				v.copyOf(y)
 30321  				return true
 30322  			}
 30323  		}
 30324  		break
 30325  	}
 30326  	// match: (Xor16 (Xor16 i:(Const16 <t>) z) x)
 30327  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 30328  	// result: (Xor16 i (Xor16 <t> z x))
 30329  	for {
 30330  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30331  			if v_0.Op != OpXor16 {
 30332  				continue
 30333  			}
 30334  			_ = v_0.Args[1]
 30335  			v_0_0 := v_0.Args[0]
 30336  			v_0_1 := v_0.Args[1]
 30337  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 30338  				i := v_0_0
 30339  				if i.Op != OpConst16 {
 30340  					continue
 30341  				}
 30342  				t := i.Type
 30343  				z := v_0_1
 30344  				x := v_1
 30345  				if !(z.Op != OpConst16 && x.Op != OpConst16) {
 30346  					continue
 30347  				}
 30348  				v.reset(OpXor16)
 30349  				v0 := b.NewValue0(v.Pos, OpXor16, t)
 30350  				v0.AddArg2(z, x)
 30351  				v.AddArg2(i, v0)
 30352  				return true
 30353  			}
 30354  		}
 30355  		break
 30356  	}
 30357  	// match: (Xor16 (Const16 <t> [c]) (Xor16 (Const16 <t> [d]) x))
 30358  	// result: (Xor16 (Const16 <t> [c^d]) x)
 30359  	for {
 30360  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30361  			if v_0.Op != OpConst16 {
 30362  				continue
 30363  			}
 30364  			t := v_0.Type
 30365  			c := auxIntToInt16(v_0.AuxInt)
 30366  			if v_1.Op != OpXor16 {
 30367  				continue
 30368  			}
 30369  			_ = v_1.Args[1]
 30370  			v_1_0 := v_1.Args[0]
 30371  			v_1_1 := v_1.Args[1]
 30372  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 30373  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 30374  					continue
 30375  				}
 30376  				d := auxIntToInt16(v_1_0.AuxInt)
 30377  				x := v_1_1
 30378  				v.reset(OpXor16)
 30379  				v0 := b.NewValue0(v.Pos, OpConst16, t)
 30380  				v0.AuxInt = int16ToAuxInt(c ^ d)
 30381  				v.AddArg2(v0, x)
 30382  				return true
 30383  			}
 30384  		}
 30385  		break
 30386  	}
 30387  	// match: (Xor16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
 30388  	// cond: c < 16 && d == 16-c && canRotate(config, 16)
 30389  	// result: (RotateLeft16 x z)
 30390  	for {
 30391  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30392  			if v_0.Op != OpLsh16x64 {
 30393  				continue
 30394  			}
 30395  			_ = v_0.Args[1]
 30396  			x := v_0.Args[0]
 30397  			z := v_0.Args[1]
 30398  			if z.Op != OpConst64 {
 30399  				continue
 30400  			}
 30401  			c := auxIntToInt64(z.AuxInt)
 30402  			if v_1.Op != OpRsh16Ux64 {
 30403  				continue
 30404  			}
 30405  			_ = v_1.Args[1]
 30406  			if x != v_1.Args[0] {
 30407  				continue
 30408  			}
 30409  			v_1_1 := v_1.Args[1]
 30410  			if v_1_1.Op != OpConst64 {
 30411  				continue
 30412  			}
 30413  			d := auxIntToInt64(v_1_1.AuxInt)
 30414  			if !(c < 16 && d == 16-c && canRotate(config, 16)) {
 30415  				continue
 30416  			}
 30417  			v.reset(OpRotateLeft16)
 30418  			v.AddArg2(x, z)
 30419  			return true
 30420  		}
 30421  		break
 30422  	}
 30423  	// match: (Xor16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
 30424  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 30425  	// result: (RotateLeft16 x y)
 30426  	for {
 30427  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30428  			left := v_0
 30429  			if left.Op != OpLsh16x64 {
 30430  				continue
 30431  			}
 30432  			y := left.Args[1]
 30433  			x := left.Args[0]
 30434  			right := v_1
 30435  			if right.Op != OpRsh16Ux64 {
 30436  				continue
 30437  			}
 30438  			_ = right.Args[1]
 30439  			if x != right.Args[0] {
 30440  				continue
 30441  			}
 30442  			right_1 := right.Args[1]
 30443  			if right_1.Op != OpSub64 {
 30444  				continue
 30445  			}
 30446  			_ = right_1.Args[1]
 30447  			right_1_0 := right_1.Args[0]
 30448  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 30449  				continue
 30450  			}
 30451  			v.reset(OpRotateLeft16)
 30452  			v.AddArg2(x, y)
 30453  			return true
 30454  		}
 30455  		break
 30456  	}
 30457  	// match: (Xor16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
 30458  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 30459  	// result: (RotateLeft16 x y)
 30460  	for {
 30461  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30462  			left := v_0
 30463  			if left.Op != OpLsh16x32 {
 30464  				continue
 30465  			}
 30466  			y := left.Args[1]
 30467  			x := left.Args[0]
 30468  			right := v_1
 30469  			if right.Op != OpRsh16Ux32 {
 30470  				continue
 30471  			}
 30472  			_ = right.Args[1]
 30473  			if x != right.Args[0] {
 30474  				continue
 30475  			}
 30476  			right_1 := right.Args[1]
 30477  			if right_1.Op != OpSub32 {
 30478  				continue
 30479  			}
 30480  			_ = right_1.Args[1]
 30481  			right_1_0 := right_1.Args[0]
 30482  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 30483  				continue
 30484  			}
 30485  			v.reset(OpRotateLeft16)
 30486  			v.AddArg2(x, y)
 30487  			return true
 30488  		}
 30489  		break
 30490  	}
 30491  	// match: (Xor16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
 30492  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 30493  	// result: (RotateLeft16 x y)
 30494  	for {
 30495  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30496  			left := v_0
 30497  			if left.Op != OpLsh16x16 {
 30498  				continue
 30499  			}
 30500  			y := left.Args[1]
 30501  			x := left.Args[0]
 30502  			right := v_1
 30503  			if right.Op != OpRsh16Ux16 {
 30504  				continue
 30505  			}
 30506  			_ = right.Args[1]
 30507  			if x != right.Args[0] {
 30508  				continue
 30509  			}
 30510  			right_1 := right.Args[1]
 30511  			if right_1.Op != OpSub16 {
 30512  				continue
 30513  			}
 30514  			_ = right_1.Args[1]
 30515  			right_1_0 := right_1.Args[0]
 30516  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 30517  				continue
 30518  			}
 30519  			v.reset(OpRotateLeft16)
 30520  			v.AddArg2(x, y)
 30521  			return true
 30522  		}
 30523  		break
 30524  	}
 30525  	// match: (Xor16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
 30526  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 30527  	// result: (RotateLeft16 x y)
 30528  	for {
 30529  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30530  			left := v_0
 30531  			if left.Op != OpLsh16x8 {
 30532  				continue
 30533  			}
 30534  			y := left.Args[1]
 30535  			x := left.Args[0]
 30536  			right := v_1
 30537  			if right.Op != OpRsh16Ux8 {
 30538  				continue
 30539  			}
 30540  			_ = right.Args[1]
 30541  			if x != right.Args[0] {
 30542  				continue
 30543  			}
 30544  			right_1 := right.Args[1]
 30545  			if right_1.Op != OpSub8 {
 30546  				continue
 30547  			}
 30548  			_ = right_1.Args[1]
 30549  			right_1_0 := right_1.Args[0]
 30550  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 30551  				continue
 30552  			}
 30553  			v.reset(OpRotateLeft16)
 30554  			v.AddArg2(x, y)
 30555  			return true
 30556  		}
 30557  		break
 30558  	}
 30559  	// match: (Xor16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
 30560  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 30561  	// result: (RotateLeft16 x z)
 30562  	for {
 30563  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30564  			right := v_0
 30565  			if right.Op != OpRsh16Ux64 {
 30566  				continue
 30567  			}
 30568  			y := right.Args[1]
 30569  			x := right.Args[0]
 30570  			left := v_1
 30571  			if left.Op != OpLsh16x64 {
 30572  				continue
 30573  			}
 30574  			_ = left.Args[1]
 30575  			if x != left.Args[0] {
 30576  				continue
 30577  			}
 30578  			z := left.Args[1]
 30579  			if z.Op != OpSub64 {
 30580  				continue
 30581  			}
 30582  			_ = z.Args[1]
 30583  			z_0 := z.Args[0]
 30584  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 30585  				continue
 30586  			}
 30587  			v.reset(OpRotateLeft16)
 30588  			v.AddArg2(x, z)
 30589  			return true
 30590  		}
 30591  		break
 30592  	}
 30593  	// match: (Xor16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
 30594  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 30595  	// result: (RotateLeft16 x z)
 30596  	for {
 30597  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30598  			right := v_0
 30599  			if right.Op != OpRsh16Ux32 {
 30600  				continue
 30601  			}
 30602  			y := right.Args[1]
 30603  			x := right.Args[0]
 30604  			left := v_1
 30605  			if left.Op != OpLsh16x32 {
 30606  				continue
 30607  			}
 30608  			_ = left.Args[1]
 30609  			if x != left.Args[0] {
 30610  				continue
 30611  			}
 30612  			z := left.Args[1]
 30613  			if z.Op != OpSub32 {
 30614  				continue
 30615  			}
 30616  			_ = z.Args[1]
 30617  			z_0 := z.Args[0]
 30618  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 30619  				continue
 30620  			}
 30621  			v.reset(OpRotateLeft16)
 30622  			v.AddArg2(x, z)
 30623  			return true
 30624  		}
 30625  		break
 30626  	}
 30627  	// match: (Xor16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
 30628  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 30629  	// result: (RotateLeft16 x z)
 30630  	for {
 30631  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30632  			right := v_0
 30633  			if right.Op != OpRsh16Ux16 {
 30634  				continue
 30635  			}
 30636  			y := right.Args[1]
 30637  			x := right.Args[0]
 30638  			left := v_1
 30639  			if left.Op != OpLsh16x16 {
 30640  				continue
 30641  			}
 30642  			_ = left.Args[1]
 30643  			if x != left.Args[0] {
 30644  				continue
 30645  			}
 30646  			z := left.Args[1]
 30647  			if z.Op != OpSub16 {
 30648  				continue
 30649  			}
 30650  			_ = z.Args[1]
 30651  			z_0 := z.Args[0]
 30652  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 30653  				continue
 30654  			}
 30655  			v.reset(OpRotateLeft16)
 30656  			v.AddArg2(x, z)
 30657  			return true
 30658  		}
 30659  		break
 30660  	}
 30661  	// match: (Xor16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
 30662  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 30663  	// result: (RotateLeft16 x z)
 30664  	for {
 30665  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30666  			right := v_0
 30667  			if right.Op != OpRsh16Ux8 {
 30668  				continue
 30669  			}
 30670  			y := right.Args[1]
 30671  			x := right.Args[0]
 30672  			left := v_1
 30673  			if left.Op != OpLsh16x8 {
 30674  				continue
 30675  			}
 30676  			_ = left.Args[1]
 30677  			if x != left.Args[0] {
 30678  				continue
 30679  			}
 30680  			z := left.Args[1]
 30681  			if z.Op != OpSub8 {
 30682  				continue
 30683  			}
 30684  			_ = z.Args[1]
 30685  			z_0 := z.Args[0]
 30686  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 30687  				continue
 30688  			}
 30689  			v.reset(OpRotateLeft16)
 30690  			v.AddArg2(x, z)
 30691  			return true
 30692  		}
 30693  		break
 30694  	}
 30695  	return false
 30696  }
 30697  func rewriteValuegeneric_OpXor32(v *Value) bool {
 30698  	v_1 := v.Args[1]
 30699  	v_0 := v.Args[0]
 30700  	b := v.Block
 30701  	config := b.Func.Config
 30702  	// match: (Xor32 (Const32 [c]) (Const32 [d]))
 30703  	// result: (Const32 [c^d])
 30704  	for {
 30705  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30706  			if v_0.Op != OpConst32 {
 30707  				continue
 30708  			}
 30709  			c := auxIntToInt32(v_0.AuxInt)
 30710  			if v_1.Op != OpConst32 {
 30711  				continue
 30712  			}
 30713  			d := auxIntToInt32(v_1.AuxInt)
 30714  			v.reset(OpConst32)
 30715  			v.AuxInt = int32ToAuxInt(c ^ d)
 30716  			return true
 30717  		}
 30718  		break
 30719  	}
 30720  	// match: (Xor32 x x)
 30721  	// result: (Const32 [0])
 30722  	for {
 30723  		x := v_0
 30724  		if x != v_1 {
 30725  			break
 30726  		}
 30727  		v.reset(OpConst32)
 30728  		v.AuxInt = int32ToAuxInt(0)
 30729  		return true
 30730  	}
 30731  	// match: (Xor32 (Const32 [0]) x)
 30732  	// result: x
 30733  	for {
 30734  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30735  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 30736  				continue
 30737  			}
 30738  			x := v_1
 30739  			v.copyOf(x)
 30740  			return true
 30741  		}
 30742  		break
 30743  	}
 30744  	// match: (Xor32 (Com32 x) x)
 30745  	// result: (Const32 [-1])
 30746  	for {
 30747  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30748  			if v_0.Op != OpCom32 {
 30749  				continue
 30750  			}
 30751  			x := v_0.Args[0]
 30752  			if x != v_1 {
 30753  				continue
 30754  			}
 30755  			v.reset(OpConst32)
 30756  			v.AuxInt = int32ToAuxInt(-1)
 30757  			return true
 30758  		}
 30759  		break
 30760  	}
 30761  	// match: (Xor32 (Const32 [-1]) x)
 30762  	// result: (Com32 x)
 30763  	for {
 30764  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30765  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
 30766  				continue
 30767  			}
 30768  			x := v_1
 30769  			v.reset(OpCom32)
 30770  			v.AddArg(x)
 30771  			return true
 30772  		}
 30773  		break
 30774  	}
 30775  	// match: (Xor32 x (Xor32 x y))
 30776  	// result: y
 30777  	for {
 30778  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30779  			x := v_0
 30780  			if v_1.Op != OpXor32 {
 30781  				continue
 30782  			}
 30783  			_ = v_1.Args[1]
 30784  			v_1_0 := v_1.Args[0]
 30785  			v_1_1 := v_1.Args[1]
 30786  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 30787  				if x != v_1_0 {
 30788  					continue
 30789  				}
 30790  				y := v_1_1
 30791  				v.copyOf(y)
 30792  				return true
 30793  			}
 30794  		}
 30795  		break
 30796  	}
 30797  	// match: (Xor32 (Xor32 i:(Const32 <t>) z) x)
 30798  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 30799  	// result: (Xor32 i (Xor32 <t> z x))
 30800  	for {
 30801  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30802  			if v_0.Op != OpXor32 {
 30803  				continue
 30804  			}
 30805  			_ = v_0.Args[1]
 30806  			v_0_0 := v_0.Args[0]
 30807  			v_0_1 := v_0.Args[1]
 30808  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 30809  				i := v_0_0
 30810  				if i.Op != OpConst32 {
 30811  					continue
 30812  				}
 30813  				t := i.Type
 30814  				z := v_0_1
 30815  				x := v_1
 30816  				if !(z.Op != OpConst32 && x.Op != OpConst32) {
 30817  					continue
 30818  				}
 30819  				v.reset(OpXor32)
 30820  				v0 := b.NewValue0(v.Pos, OpXor32, t)
 30821  				v0.AddArg2(z, x)
 30822  				v.AddArg2(i, v0)
 30823  				return true
 30824  			}
 30825  		}
 30826  		break
 30827  	}
 30828  	// match: (Xor32 (Const32 <t> [c]) (Xor32 (Const32 <t> [d]) x))
 30829  	// result: (Xor32 (Const32 <t> [c^d]) x)
 30830  	for {
 30831  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30832  			if v_0.Op != OpConst32 {
 30833  				continue
 30834  			}
 30835  			t := v_0.Type
 30836  			c := auxIntToInt32(v_0.AuxInt)
 30837  			if v_1.Op != OpXor32 {
 30838  				continue
 30839  			}
 30840  			_ = v_1.Args[1]
 30841  			v_1_0 := v_1.Args[0]
 30842  			v_1_1 := v_1.Args[1]
 30843  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 30844  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 30845  					continue
 30846  				}
 30847  				d := auxIntToInt32(v_1_0.AuxInt)
 30848  				x := v_1_1
 30849  				v.reset(OpXor32)
 30850  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 30851  				v0.AuxInt = int32ToAuxInt(c ^ d)
 30852  				v.AddArg2(v0, x)
 30853  				return true
 30854  			}
 30855  		}
 30856  		break
 30857  	}
 30858  	// match: (Xor32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
 30859  	// cond: c < 32 && d == 32-c && canRotate(config, 32)
 30860  	// result: (RotateLeft32 x z)
 30861  	for {
 30862  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30863  			if v_0.Op != OpLsh32x64 {
 30864  				continue
 30865  			}
 30866  			_ = v_0.Args[1]
 30867  			x := v_0.Args[0]
 30868  			z := v_0.Args[1]
 30869  			if z.Op != OpConst64 {
 30870  				continue
 30871  			}
 30872  			c := auxIntToInt64(z.AuxInt)
 30873  			if v_1.Op != OpRsh32Ux64 {
 30874  				continue
 30875  			}
 30876  			_ = v_1.Args[1]
 30877  			if x != v_1.Args[0] {
 30878  				continue
 30879  			}
 30880  			v_1_1 := v_1.Args[1]
 30881  			if v_1_1.Op != OpConst64 {
 30882  				continue
 30883  			}
 30884  			d := auxIntToInt64(v_1_1.AuxInt)
 30885  			if !(c < 32 && d == 32-c && canRotate(config, 32)) {
 30886  				continue
 30887  			}
 30888  			v.reset(OpRotateLeft32)
 30889  			v.AddArg2(x, z)
 30890  			return true
 30891  		}
 30892  		break
 30893  	}
 30894  	// match: (Xor32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
 30895  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 30896  	// result: (RotateLeft32 x y)
 30897  	for {
 30898  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30899  			left := v_0
 30900  			if left.Op != OpLsh32x64 {
 30901  				continue
 30902  			}
 30903  			y := left.Args[1]
 30904  			x := left.Args[0]
 30905  			right := v_1
 30906  			if right.Op != OpRsh32Ux64 {
 30907  				continue
 30908  			}
 30909  			_ = right.Args[1]
 30910  			if x != right.Args[0] {
 30911  				continue
 30912  			}
 30913  			right_1 := right.Args[1]
 30914  			if right_1.Op != OpSub64 {
 30915  				continue
 30916  			}
 30917  			_ = right_1.Args[1]
 30918  			right_1_0 := right_1.Args[0]
 30919  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 30920  				continue
 30921  			}
 30922  			v.reset(OpRotateLeft32)
 30923  			v.AddArg2(x, y)
 30924  			return true
 30925  		}
 30926  		break
 30927  	}
 30928  	// match: (Xor32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
 30929  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 30930  	// result: (RotateLeft32 x y)
 30931  	for {
 30932  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30933  			left := v_0
 30934  			if left.Op != OpLsh32x32 {
 30935  				continue
 30936  			}
 30937  			y := left.Args[1]
 30938  			x := left.Args[0]
 30939  			right := v_1
 30940  			if right.Op != OpRsh32Ux32 {
 30941  				continue
 30942  			}
 30943  			_ = right.Args[1]
 30944  			if x != right.Args[0] {
 30945  				continue
 30946  			}
 30947  			right_1 := right.Args[1]
 30948  			if right_1.Op != OpSub32 {
 30949  				continue
 30950  			}
 30951  			_ = right_1.Args[1]
 30952  			right_1_0 := right_1.Args[0]
 30953  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 30954  				continue
 30955  			}
 30956  			v.reset(OpRotateLeft32)
 30957  			v.AddArg2(x, y)
 30958  			return true
 30959  		}
 30960  		break
 30961  	}
 30962  	// match: (Xor32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
 30963  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 30964  	// result: (RotateLeft32 x y)
 30965  	for {
 30966  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 30967  			left := v_0
 30968  			if left.Op != OpLsh32x16 {
 30969  				continue
 30970  			}
 30971  			y := left.Args[1]
 30972  			x := left.Args[0]
 30973  			right := v_1
 30974  			if right.Op != OpRsh32Ux16 {
 30975  				continue
 30976  			}
 30977  			_ = right.Args[1]
 30978  			if x != right.Args[0] {
 30979  				continue
 30980  			}
 30981  			right_1 := right.Args[1]
 30982  			if right_1.Op != OpSub16 {
 30983  				continue
 30984  			}
 30985  			_ = right_1.Args[1]
 30986  			right_1_0 := right_1.Args[0]
 30987  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 30988  				continue
 30989  			}
 30990  			v.reset(OpRotateLeft32)
 30991  			v.AddArg2(x, y)
 30992  			return true
 30993  		}
 30994  		break
 30995  	}
 30996  	// match: (Xor32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
 30997  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 30998  	// result: (RotateLeft32 x y)
 30999  	for {
 31000  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31001  			left := v_0
 31002  			if left.Op != OpLsh32x8 {
 31003  				continue
 31004  			}
 31005  			y := left.Args[1]
 31006  			x := left.Args[0]
 31007  			right := v_1
 31008  			if right.Op != OpRsh32Ux8 {
 31009  				continue
 31010  			}
 31011  			_ = right.Args[1]
 31012  			if x != right.Args[0] {
 31013  				continue
 31014  			}
 31015  			right_1 := right.Args[1]
 31016  			if right_1.Op != OpSub8 {
 31017  				continue
 31018  			}
 31019  			_ = right_1.Args[1]
 31020  			right_1_0 := right_1.Args[0]
 31021  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 31022  				continue
 31023  			}
 31024  			v.reset(OpRotateLeft32)
 31025  			v.AddArg2(x, y)
 31026  			return true
 31027  		}
 31028  		break
 31029  	}
 31030  	// match: (Xor32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
 31031  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 31032  	// result: (RotateLeft32 x z)
 31033  	for {
 31034  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31035  			right := v_0
 31036  			if right.Op != OpRsh32Ux64 {
 31037  				continue
 31038  			}
 31039  			y := right.Args[1]
 31040  			x := right.Args[0]
 31041  			left := v_1
 31042  			if left.Op != OpLsh32x64 {
 31043  				continue
 31044  			}
 31045  			_ = left.Args[1]
 31046  			if x != left.Args[0] {
 31047  				continue
 31048  			}
 31049  			z := left.Args[1]
 31050  			if z.Op != OpSub64 {
 31051  				continue
 31052  			}
 31053  			_ = z.Args[1]
 31054  			z_0 := z.Args[0]
 31055  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 31056  				continue
 31057  			}
 31058  			v.reset(OpRotateLeft32)
 31059  			v.AddArg2(x, z)
 31060  			return true
 31061  		}
 31062  		break
 31063  	}
 31064  	// match: (Xor32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
 31065  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 31066  	// result: (RotateLeft32 x z)
 31067  	for {
 31068  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31069  			right := v_0
 31070  			if right.Op != OpRsh32Ux32 {
 31071  				continue
 31072  			}
 31073  			y := right.Args[1]
 31074  			x := right.Args[0]
 31075  			left := v_1
 31076  			if left.Op != OpLsh32x32 {
 31077  				continue
 31078  			}
 31079  			_ = left.Args[1]
 31080  			if x != left.Args[0] {
 31081  				continue
 31082  			}
 31083  			z := left.Args[1]
 31084  			if z.Op != OpSub32 {
 31085  				continue
 31086  			}
 31087  			_ = z.Args[1]
 31088  			z_0 := z.Args[0]
 31089  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 31090  				continue
 31091  			}
 31092  			v.reset(OpRotateLeft32)
 31093  			v.AddArg2(x, z)
 31094  			return true
 31095  		}
 31096  		break
 31097  	}
 31098  	// match: (Xor32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
 31099  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 31100  	// result: (RotateLeft32 x z)
 31101  	for {
 31102  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31103  			right := v_0
 31104  			if right.Op != OpRsh32Ux16 {
 31105  				continue
 31106  			}
 31107  			y := right.Args[1]
 31108  			x := right.Args[0]
 31109  			left := v_1
 31110  			if left.Op != OpLsh32x16 {
 31111  				continue
 31112  			}
 31113  			_ = left.Args[1]
 31114  			if x != left.Args[0] {
 31115  				continue
 31116  			}
 31117  			z := left.Args[1]
 31118  			if z.Op != OpSub16 {
 31119  				continue
 31120  			}
 31121  			_ = z.Args[1]
 31122  			z_0 := z.Args[0]
 31123  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 31124  				continue
 31125  			}
 31126  			v.reset(OpRotateLeft32)
 31127  			v.AddArg2(x, z)
 31128  			return true
 31129  		}
 31130  		break
 31131  	}
 31132  	// match: (Xor32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
 31133  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 31134  	// result: (RotateLeft32 x z)
 31135  	for {
 31136  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31137  			right := v_0
 31138  			if right.Op != OpRsh32Ux8 {
 31139  				continue
 31140  			}
 31141  			y := right.Args[1]
 31142  			x := right.Args[0]
 31143  			left := v_1
 31144  			if left.Op != OpLsh32x8 {
 31145  				continue
 31146  			}
 31147  			_ = left.Args[1]
 31148  			if x != left.Args[0] {
 31149  				continue
 31150  			}
 31151  			z := left.Args[1]
 31152  			if z.Op != OpSub8 {
 31153  				continue
 31154  			}
 31155  			_ = z.Args[1]
 31156  			z_0 := z.Args[0]
 31157  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 31158  				continue
 31159  			}
 31160  			v.reset(OpRotateLeft32)
 31161  			v.AddArg2(x, z)
 31162  			return true
 31163  		}
 31164  		break
 31165  	}
 31166  	return false
 31167  }
 31168  func rewriteValuegeneric_OpXor64(v *Value) bool {
 31169  	v_1 := v.Args[1]
 31170  	v_0 := v.Args[0]
 31171  	b := v.Block
 31172  	config := b.Func.Config
 31173  	// match: (Xor64 (Const64 [c]) (Const64 [d]))
 31174  	// result: (Const64 [c^d])
 31175  	for {
 31176  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31177  			if v_0.Op != OpConst64 {
 31178  				continue
 31179  			}
 31180  			c := auxIntToInt64(v_0.AuxInt)
 31181  			if v_1.Op != OpConst64 {
 31182  				continue
 31183  			}
 31184  			d := auxIntToInt64(v_1.AuxInt)
 31185  			v.reset(OpConst64)
 31186  			v.AuxInt = int64ToAuxInt(c ^ d)
 31187  			return true
 31188  		}
 31189  		break
 31190  	}
 31191  	// match: (Xor64 x x)
 31192  	// result: (Const64 [0])
 31193  	for {
 31194  		x := v_0
 31195  		if x != v_1 {
 31196  			break
 31197  		}
 31198  		v.reset(OpConst64)
 31199  		v.AuxInt = int64ToAuxInt(0)
 31200  		return true
 31201  	}
 31202  	// match: (Xor64 (Const64 [0]) x)
 31203  	// result: x
 31204  	for {
 31205  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31206  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 31207  				continue
 31208  			}
 31209  			x := v_1
 31210  			v.copyOf(x)
 31211  			return true
 31212  		}
 31213  		break
 31214  	}
 31215  	// match: (Xor64 (Com64 x) x)
 31216  	// result: (Const64 [-1])
 31217  	for {
 31218  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31219  			if v_0.Op != OpCom64 {
 31220  				continue
 31221  			}
 31222  			x := v_0.Args[0]
 31223  			if x != v_1 {
 31224  				continue
 31225  			}
 31226  			v.reset(OpConst64)
 31227  			v.AuxInt = int64ToAuxInt(-1)
 31228  			return true
 31229  		}
 31230  		break
 31231  	}
 31232  	// match: (Xor64 (Const64 [-1]) x)
 31233  	// result: (Com64 x)
 31234  	for {
 31235  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31236  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
 31237  				continue
 31238  			}
 31239  			x := v_1
 31240  			v.reset(OpCom64)
 31241  			v.AddArg(x)
 31242  			return true
 31243  		}
 31244  		break
 31245  	}
 31246  	// match: (Xor64 x (Xor64 x y))
 31247  	// result: y
 31248  	for {
 31249  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31250  			x := v_0
 31251  			if v_1.Op != OpXor64 {
 31252  				continue
 31253  			}
 31254  			_ = v_1.Args[1]
 31255  			v_1_0 := v_1.Args[0]
 31256  			v_1_1 := v_1.Args[1]
 31257  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 31258  				if x != v_1_0 {
 31259  					continue
 31260  				}
 31261  				y := v_1_1
 31262  				v.copyOf(y)
 31263  				return true
 31264  			}
 31265  		}
 31266  		break
 31267  	}
 31268  	// match: (Xor64 (Xor64 i:(Const64 <t>) z) x)
 31269  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 31270  	// result: (Xor64 i (Xor64 <t> z x))
 31271  	for {
 31272  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31273  			if v_0.Op != OpXor64 {
 31274  				continue
 31275  			}
 31276  			_ = v_0.Args[1]
 31277  			v_0_0 := v_0.Args[0]
 31278  			v_0_1 := v_0.Args[1]
 31279  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 31280  				i := v_0_0
 31281  				if i.Op != OpConst64 {
 31282  					continue
 31283  				}
 31284  				t := i.Type
 31285  				z := v_0_1
 31286  				x := v_1
 31287  				if !(z.Op != OpConst64 && x.Op != OpConst64) {
 31288  					continue
 31289  				}
 31290  				v.reset(OpXor64)
 31291  				v0 := b.NewValue0(v.Pos, OpXor64, t)
 31292  				v0.AddArg2(z, x)
 31293  				v.AddArg2(i, v0)
 31294  				return true
 31295  			}
 31296  		}
 31297  		break
 31298  	}
 31299  	// match: (Xor64 (Const64 <t> [c]) (Xor64 (Const64 <t> [d]) x))
 31300  	// result: (Xor64 (Const64 <t> [c^d]) x)
 31301  	for {
 31302  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31303  			if v_0.Op != OpConst64 {
 31304  				continue
 31305  			}
 31306  			t := v_0.Type
 31307  			c := auxIntToInt64(v_0.AuxInt)
 31308  			if v_1.Op != OpXor64 {
 31309  				continue
 31310  			}
 31311  			_ = v_1.Args[1]
 31312  			v_1_0 := v_1.Args[0]
 31313  			v_1_1 := v_1.Args[1]
 31314  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 31315  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 31316  					continue
 31317  				}
 31318  				d := auxIntToInt64(v_1_0.AuxInt)
 31319  				x := v_1_1
 31320  				v.reset(OpXor64)
 31321  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 31322  				v0.AuxInt = int64ToAuxInt(c ^ d)
 31323  				v.AddArg2(v0, x)
 31324  				return true
 31325  			}
 31326  		}
 31327  		break
 31328  	}
 31329  	// match: (Xor64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
 31330  	// cond: c < 64 && d == 64-c && canRotate(config, 64)
 31331  	// result: (RotateLeft64 x z)
 31332  	for {
 31333  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31334  			if v_0.Op != OpLsh64x64 {
 31335  				continue
 31336  			}
 31337  			_ = v_0.Args[1]
 31338  			x := v_0.Args[0]
 31339  			z := v_0.Args[1]
 31340  			if z.Op != OpConst64 {
 31341  				continue
 31342  			}
 31343  			c := auxIntToInt64(z.AuxInt)
 31344  			if v_1.Op != OpRsh64Ux64 {
 31345  				continue
 31346  			}
 31347  			_ = v_1.Args[1]
 31348  			if x != v_1.Args[0] {
 31349  				continue
 31350  			}
 31351  			v_1_1 := v_1.Args[1]
 31352  			if v_1_1.Op != OpConst64 {
 31353  				continue
 31354  			}
 31355  			d := auxIntToInt64(v_1_1.AuxInt)
 31356  			if !(c < 64 && d == 64-c && canRotate(config, 64)) {
 31357  				continue
 31358  			}
 31359  			v.reset(OpRotateLeft64)
 31360  			v.AddArg2(x, z)
 31361  			return true
 31362  		}
 31363  		break
 31364  	}
 31365  	// match: (Xor64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
 31366  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 31367  	// result: (RotateLeft64 x y)
 31368  	for {
 31369  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31370  			left := v_0
 31371  			if left.Op != OpLsh64x64 {
 31372  				continue
 31373  			}
 31374  			y := left.Args[1]
 31375  			x := left.Args[0]
 31376  			right := v_1
 31377  			if right.Op != OpRsh64Ux64 {
 31378  				continue
 31379  			}
 31380  			_ = right.Args[1]
 31381  			if x != right.Args[0] {
 31382  				continue
 31383  			}
 31384  			right_1 := right.Args[1]
 31385  			if right_1.Op != OpSub64 {
 31386  				continue
 31387  			}
 31388  			_ = right_1.Args[1]
 31389  			right_1_0 := right_1.Args[0]
 31390  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 31391  				continue
 31392  			}
 31393  			v.reset(OpRotateLeft64)
 31394  			v.AddArg2(x, y)
 31395  			return true
 31396  		}
 31397  		break
 31398  	}
 31399  	// match: (Xor64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
 31400  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 31401  	// result: (RotateLeft64 x y)
 31402  	for {
 31403  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31404  			left := v_0
 31405  			if left.Op != OpLsh64x32 {
 31406  				continue
 31407  			}
 31408  			y := left.Args[1]
 31409  			x := left.Args[0]
 31410  			right := v_1
 31411  			if right.Op != OpRsh64Ux32 {
 31412  				continue
 31413  			}
 31414  			_ = right.Args[1]
 31415  			if x != right.Args[0] {
 31416  				continue
 31417  			}
 31418  			right_1 := right.Args[1]
 31419  			if right_1.Op != OpSub32 {
 31420  				continue
 31421  			}
 31422  			_ = right_1.Args[1]
 31423  			right_1_0 := right_1.Args[0]
 31424  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 31425  				continue
 31426  			}
 31427  			v.reset(OpRotateLeft64)
 31428  			v.AddArg2(x, y)
 31429  			return true
 31430  		}
 31431  		break
 31432  	}
 31433  	// match: (Xor64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
 31434  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 31435  	// result: (RotateLeft64 x y)
 31436  	for {
 31437  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31438  			left := v_0
 31439  			if left.Op != OpLsh64x16 {
 31440  				continue
 31441  			}
 31442  			y := left.Args[1]
 31443  			x := left.Args[0]
 31444  			right := v_1
 31445  			if right.Op != OpRsh64Ux16 {
 31446  				continue
 31447  			}
 31448  			_ = right.Args[1]
 31449  			if x != right.Args[0] {
 31450  				continue
 31451  			}
 31452  			right_1 := right.Args[1]
 31453  			if right_1.Op != OpSub16 {
 31454  				continue
 31455  			}
 31456  			_ = right_1.Args[1]
 31457  			right_1_0 := right_1.Args[0]
 31458  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 31459  				continue
 31460  			}
 31461  			v.reset(OpRotateLeft64)
 31462  			v.AddArg2(x, y)
 31463  			return true
 31464  		}
 31465  		break
 31466  	}
 31467  	// match: (Xor64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
 31468  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 31469  	// result: (RotateLeft64 x y)
 31470  	for {
 31471  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31472  			left := v_0
 31473  			if left.Op != OpLsh64x8 {
 31474  				continue
 31475  			}
 31476  			y := left.Args[1]
 31477  			x := left.Args[0]
 31478  			right := v_1
 31479  			if right.Op != OpRsh64Ux8 {
 31480  				continue
 31481  			}
 31482  			_ = right.Args[1]
 31483  			if x != right.Args[0] {
 31484  				continue
 31485  			}
 31486  			right_1 := right.Args[1]
 31487  			if right_1.Op != OpSub8 {
 31488  				continue
 31489  			}
 31490  			_ = right_1.Args[1]
 31491  			right_1_0 := right_1.Args[0]
 31492  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 31493  				continue
 31494  			}
 31495  			v.reset(OpRotateLeft64)
 31496  			v.AddArg2(x, y)
 31497  			return true
 31498  		}
 31499  		break
 31500  	}
 31501  	// match: (Xor64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
 31502  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 31503  	// result: (RotateLeft64 x z)
 31504  	for {
 31505  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31506  			right := v_0
 31507  			if right.Op != OpRsh64Ux64 {
 31508  				continue
 31509  			}
 31510  			y := right.Args[1]
 31511  			x := right.Args[0]
 31512  			left := v_1
 31513  			if left.Op != OpLsh64x64 {
 31514  				continue
 31515  			}
 31516  			_ = left.Args[1]
 31517  			if x != left.Args[0] {
 31518  				continue
 31519  			}
 31520  			z := left.Args[1]
 31521  			if z.Op != OpSub64 {
 31522  				continue
 31523  			}
 31524  			_ = z.Args[1]
 31525  			z_0 := z.Args[0]
 31526  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 31527  				continue
 31528  			}
 31529  			v.reset(OpRotateLeft64)
 31530  			v.AddArg2(x, z)
 31531  			return true
 31532  		}
 31533  		break
 31534  	}
 31535  	// match: (Xor64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
 31536  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 31537  	// result: (RotateLeft64 x z)
 31538  	for {
 31539  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31540  			right := v_0
 31541  			if right.Op != OpRsh64Ux32 {
 31542  				continue
 31543  			}
 31544  			y := right.Args[1]
 31545  			x := right.Args[0]
 31546  			left := v_1
 31547  			if left.Op != OpLsh64x32 {
 31548  				continue
 31549  			}
 31550  			_ = left.Args[1]
 31551  			if x != left.Args[0] {
 31552  				continue
 31553  			}
 31554  			z := left.Args[1]
 31555  			if z.Op != OpSub32 {
 31556  				continue
 31557  			}
 31558  			_ = z.Args[1]
 31559  			z_0 := z.Args[0]
 31560  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 31561  				continue
 31562  			}
 31563  			v.reset(OpRotateLeft64)
 31564  			v.AddArg2(x, z)
 31565  			return true
 31566  		}
 31567  		break
 31568  	}
 31569  	// match: (Xor64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
 31570  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 31571  	// result: (RotateLeft64 x z)
 31572  	for {
 31573  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31574  			right := v_0
 31575  			if right.Op != OpRsh64Ux16 {
 31576  				continue
 31577  			}
 31578  			y := right.Args[1]
 31579  			x := right.Args[0]
 31580  			left := v_1
 31581  			if left.Op != OpLsh64x16 {
 31582  				continue
 31583  			}
 31584  			_ = left.Args[1]
 31585  			if x != left.Args[0] {
 31586  				continue
 31587  			}
 31588  			z := left.Args[1]
 31589  			if z.Op != OpSub16 {
 31590  				continue
 31591  			}
 31592  			_ = z.Args[1]
 31593  			z_0 := z.Args[0]
 31594  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 31595  				continue
 31596  			}
 31597  			v.reset(OpRotateLeft64)
 31598  			v.AddArg2(x, z)
 31599  			return true
 31600  		}
 31601  		break
 31602  	}
 31603  	// match: (Xor64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
 31604  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 31605  	// result: (RotateLeft64 x z)
 31606  	for {
 31607  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31608  			right := v_0
 31609  			if right.Op != OpRsh64Ux8 {
 31610  				continue
 31611  			}
 31612  			y := right.Args[1]
 31613  			x := right.Args[0]
 31614  			left := v_1
 31615  			if left.Op != OpLsh64x8 {
 31616  				continue
 31617  			}
 31618  			_ = left.Args[1]
 31619  			if x != left.Args[0] {
 31620  				continue
 31621  			}
 31622  			z := left.Args[1]
 31623  			if z.Op != OpSub8 {
 31624  				continue
 31625  			}
 31626  			_ = z.Args[1]
 31627  			z_0 := z.Args[0]
 31628  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 31629  				continue
 31630  			}
 31631  			v.reset(OpRotateLeft64)
 31632  			v.AddArg2(x, z)
 31633  			return true
 31634  		}
 31635  		break
 31636  	}
 31637  	return false
 31638  }
 31639  func rewriteValuegeneric_OpXor8(v *Value) bool {
 31640  	v_1 := v.Args[1]
 31641  	v_0 := v.Args[0]
 31642  	b := v.Block
 31643  	config := b.Func.Config
 31644  	// match: (Xor8 (Const8 [c]) (Const8 [d]))
 31645  	// result: (Const8 [c^d])
 31646  	for {
 31647  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31648  			if v_0.Op != OpConst8 {
 31649  				continue
 31650  			}
 31651  			c := auxIntToInt8(v_0.AuxInt)
 31652  			if v_1.Op != OpConst8 {
 31653  				continue
 31654  			}
 31655  			d := auxIntToInt8(v_1.AuxInt)
 31656  			v.reset(OpConst8)
 31657  			v.AuxInt = int8ToAuxInt(c ^ d)
 31658  			return true
 31659  		}
 31660  		break
 31661  	}
 31662  	// match: (Xor8 x x)
 31663  	// result: (Const8 [0])
 31664  	for {
 31665  		x := v_0
 31666  		if x != v_1 {
 31667  			break
 31668  		}
 31669  		v.reset(OpConst8)
 31670  		v.AuxInt = int8ToAuxInt(0)
 31671  		return true
 31672  	}
 31673  	// match: (Xor8 (Const8 [0]) x)
 31674  	// result: x
 31675  	for {
 31676  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31677  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 31678  				continue
 31679  			}
 31680  			x := v_1
 31681  			v.copyOf(x)
 31682  			return true
 31683  		}
 31684  		break
 31685  	}
 31686  	// match: (Xor8 (Com8 x) x)
 31687  	// result: (Const8 [-1])
 31688  	for {
 31689  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31690  			if v_0.Op != OpCom8 {
 31691  				continue
 31692  			}
 31693  			x := v_0.Args[0]
 31694  			if x != v_1 {
 31695  				continue
 31696  			}
 31697  			v.reset(OpConst8)
 31698  			v.AuxInt = int8ToAuxInt(-1)
 31699  			return true
 31700  		}
 31701  		break
 31702  	}
 31703  	// match: (Xor8 (Const8 [-1]) x)
 31704  	// result: (Com8 x)
 31705  	for {
 31706  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31707  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
 31708  				continue
 31709  			}
 31710  			x := v_1
 31711  			v.reset(OpCom8)
 31712  			v.AddArg(x)
 31713  			return true
 31714  		}
 31715  		break
 31716  	}
 31717  	// match: (Xor8 x (Xor8 x y))
 31718  	// result: y
 31719  	for {
 31720  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31721  			x := v_0
 31722  			if v_1.Op != OpXor8 {
 31723  				continue
 31724  			}
 31725  			_ = v_1.Args[1]
 31726  			v_1_0 := v_1.Args[0]
 31727  			v_1_1 := v_1.Args[1]
 31728  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 31729  				if x != v_1_0 {
 31730  					continue
 31731  				}
 31732  				y := v_1_1
 31733  				v.copyOf(y)
 31734  				return true
 31735  			}
 31736  		}
 31737  		break
 31738  	}
 31739  	// match: (Xor8 (Xor8 i:(Const8 <t>) z) x)
 31740  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 31741  	// result: (Xor8 i (Xor8 <t> z x))
 31742  	for {
 31743  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31744  			if v_0.Op != OpXor8 {
 31745  				continue
 31746  			}
 31747  			_ = v_0.Args[1]
 31748  			v_0_0 := v_0.Args[0]
 31749  			v_0_1 := v_0.Args[1]
 31750  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 31751  				i := v_0_0
 31752  				if i.Op != OpConst8 {
 31753  					continue
 31754  				}
 31755  				t := i.Type
 31756  				z := v_0_1
 31757  				x := v_1
 31758  				if !(z.Op != OpConst8 && x.Op != OpConst8) {
 31759  					continue
 31760  				}
 31761  				v.reset(OpXor8)
 31762  				v0 := b.NewValue0(v.Pos, OpXor8, t)
 31763  				v0.AddArg2(z, x)
 31764  				v.AddArg2(i, v0)
 31765  				return true
 31766  			}
 31767  		}
 31768  		break
 31769  	}
 31770  	// match: (Xor8 (Const8 <t> [c]) (Xor8 (Const8 <t> [d]) x))
 31771  	// result: (Xor8 (Const8 <t> [c^d]) x)
 31772  	for {
 31773  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31774  			if v_0.Op != OpConst8 {
 31775  				continue
 31776  			}
 31777  			t := v_0.Type
 31778  			c := auxIntToInt8(v_0.AuxInt)
 31779  			if v_1.Op != OpXor8 {
 31780  				continue
 31781  			}
 31782  			_ = v_1.Args[1]
 31783  			v_1_0 := v_1.Args[0]
 31784  			v_1_1 := v_1.Args[1]
 31785  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 31786  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 31787  					continue
 31788  				}
 31789  				d := auxIntToInt8(v_1_0.AuxInt)
 31790  				x := v_1_1
 31791  				v.reset(OpXor8)
 31792  				v0 := b.NewValue0(v.Pos, OpConst8, t)
 31793  				v0.AuxInt = int8ToAuxInt(c ^ d)
 31794  				v.AddArg2(v0, x)
 31795  				return true
 31796  			}
 31797  		}
 31798  		break
 31799  	}
 31800  	// match: (Xor8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
 31801  	// cond: c < 8 && d == 8-c && canRotate(config, 8)
 31802  	// result: (RotateLeft8 x z)
 31803  	for {
 31804  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31805  			if v_0.Op != OpLsh8x64 {
 31806  				continue
 31807  			}
 31808  			_ = v_0.Args[1]
 31809  			x := v_0.Args[0]
 31810  			z := v_0.Args[1]
 31811  			if z.Op != OpConst64 {
 31812  				continue
 31813  			}
 31814  			c := auxIntToInt64(z.AuxInt)
 31815  			if v_1.Op != OpRsh8Ux64 {
 31816  				continue
 31817  			}
 31818  			_ = v_1.Args[1]
 31819  			if x != v_1.Args[0] {
 31820  				continue
 31821  			}
 31822  			v_1_1 := v_1.Args[1]
 31823  			if v_1_1.Op != OpConst64 {
 31824  				continue
 31825  			}
 31826  			d := auxIntToInt64(v_1_1.AuxInt)
 31827  			if !(c < 8 && d == 8-c && canRotate(config, 8)) {
 31828  				continue
 31829  			}
 31830  			v.reset(OpRotateLeft8)
 31831  			v.AddArg2(x, z)
 31832  			return true
 31833  		}
 31834  		break
 31835  	}
 31836  	// match: (Xor8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
 31837  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 31838  	// result: (RotateLeft8 x y)
 31839  	for {
 31840  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31841  			left := v_0
 31842  			if left.Op != OpLsh8x64 {
 31843  				continue
 31844  			}
 31845  			y := left.Args[1]
 31846  			x := left.Args[0]
 31847  			right := v_1
 31848  			if right.Op != OpRsh8Ux64 {
 31849  				continue
 31850  			}
 31851  			_ = right.Args[1]
 31852  			if x != right.Args[0] {
 31853  				continue
 31854  			}
 31855  			right_1 := right.Args[1]
 31856  			if right_1.Op != OpSub64 {
 31857  				continue
 31858  			}
 31859  			_ = right_1.Args[1]
 31860  			right_1_0 := right_1.Args[0]
 31861  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 31862  				continue
 31863  			}
 31864  			v.reset(OpRotateLeft8)
 31865  			v.AddArg2(x, y)
 31866  			return true
 31867  		}
 31868  		break
 31869  	}
 31870  	// match: (Xor8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
 31871  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 31872  	// result: (RotateLeft8 x y)
 31873  	for {
 31874  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31875  			left := v_0
 31876  			if left.Op != OpLsh8x32 {
 31877  				continue
 31878  			}
 31879  			y := left.Args[1]
 31880  			x := left.Args[0]
 31881  			right := v_1
 31882  			if right.Op != OpRsh8Ux32 {
 31883  				continue
 31884  			}
 31885  			_ = right.Args[1]
 31886  			if x != right.Args[0] {
 31887  				continue
 31888  			}
 31889  			right_1 := right.Args[1]
 31890  			if right_1.Op != OpSub32 {
 31891  				continue
 31892  			}
 31893  			_ = right_1.Args[1]
 31894  			right_1_0 := right_1.Args[0]
 31895  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 31896  				continue
 31897  			}
 31898  			v.reset(OpRotateLeft8)
 31899  			v.AddArg2(x, y)
 31900  			return true
 31901  		}
 31902  		break
 31903  	}
 31904  	// match: (Xor8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
 31905  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 31906  	// result: (RotateLeft8 x y)
 31907  	for {
 31908  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31909  			left := v_0
 31910  			if left.Op != OpLsh8x16 {
 31911  				continue
 31912  			}
 31913  			y := left.Args[1]
 31914  			x := left.Args[0]
 31915  			right := v_1
 31916  			if right.Op != OpRsh8Ux16 {
 31917  				continue
 31918  			}
 31919  			_ = right.Args[1]
 31920  			if x != right.Args[0] {
 31921  				continue
 31922  			}
 31923  			right_1 := right.Args[1]
 31924  			if right_1.Op != OpSub16 {
 31925  				continue
 31926  			}
 31927  			_ = right_1.Args[1]
 31928  			right_1_0 := right_1.Args[0]
 31929  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 31930  				continue
 31931  			}
 31932  			v.reset(OpRotateLeft8)
 31933  			v.AddArg2(x, y)
 31934  			return true
 31935  		}
 31936  		break
 31937  	}
 31938  	// match: (Xor8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
 31939  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 31940  	// result: (RotateLeft8 x y)
 31941  	for {
 31942  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31943  			left := v_0
 31944  			if left.Op != OpLsh8x8 {
 31945  				continue
 31946  			}
 31947  			y := left.Args[1]
 31948  			x := left.Args[0]
 31949  			right := v_1
 31950  			if right.Op != OpRsh8Ux8 {
 31951  				continue
 31952  			}
 31953  			_ = right.Args[1]
 31954  			if x != right.Args[0] {
 31955  				continue
 31956  			}
 31957  			right_1 := right.Args[1]
 31958  			if right_1.Op != OpSub8 {
 31959  				continue
 31960  			}
 31961  			_ = right_1.Args[1]
 31962  			right_1_0 := right_1.Args[0]
 31963  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 31964  				continue
 31965  			}
 31966  			v.reset(OpRotateLeft8)
 31967  			v.AddArg2(x, y)
 31968  			return true
 31969  		}
 31970  		break
 31971  	}
 31972  	// match: (Xor8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
 31973  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 31974  	// result: (RotateLeft8 x z)
 31975  	for {
 31976  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 31977  			right := v_0
 31978  			if right.Op != OpRsh8Ux64 {
 31979  				continue
 31980  			}
 31981  			y := right.Args[1]
 31982  			x := right.Args[0]
 31983  			left := v_1
 31984  			if left.Op != OpLsh8x64 {
 31985  				continue
 31986  			}
 31987  			_ = left.Args[1]
 31988  			if x != left.Args[0] {
 31989  				continue
 31990  			}
 31991  			z := left.Args[1]
 31992  			if z.Op != OpSub64 {
 31993  				continue
 31994  			}
 31995  			_ = z.Args[1]
 31996  			z_0 := z.Args[0]
 31997  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 31998  				continue
 31999  			}
 32000  			v.reset(OpRotateLeft8)
 32001  			v.AddArg2(x, z)
 32002  			return true
 32003  		}
 32004  		break
 32005  	}
 32006  	// match: (Xor8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
 32007  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 32008  	// result: (RotateLeft8 x z)
 32009  	for {
 32010  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 32011  			right := v_0
 32012  			if right.Op != OpRsh8Ux32 {
 32013  				continue
 32014  			}
 32015  			y := right.Args[1]
 32016  			x := right.Args[0]
 32017  			left := v_1
 32018  			if left.Op != OpLsh8x32 {
 32019  				continue
 32020  			}
 32021  			_ = left.Args[1]
 32022  			if x != left.Args[0] {
 32023  				continue
 32024  			}
 32025  			z := left.Args[1]
 32026  			if z.Op != OpSub32 {
 32027  				continue
 32028  			}
 32029  			_ = z.Args[1]
 32030  			z_0 := z.Args[0]
 32031  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 32032  				continue
 32033  			}
 32034  			v.reset(OpRotateLeft8)
 32035  			v.AddArg2(x, z)
 32036  			return true
 32037  		}
 32038  		break
 32039  	}
 32040  	// match: (Xor8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
 32041  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 32042  	// result: (RotateLeft8 x z)
 32043  	for {
 32044  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 32045  			right := v_0
 32046  			if right.Op != OpRsh8Ux16 {
 32047  				continue
 32048  			}
 32049  			y := right.Args[1]
 32050  			x := right.Args[0]
 32051  			left := v_1
 32052  			if left.Op != OpLsh8x16 {
 32053  				continue
 32054  			}
 32055  			_ = left.Args[1]
 32056  			if x != left.Args[0] {
 32057  				continue
 32058  			}
 32059  			z := left.Args[1]
 32060  			if z.Op != OpSub16 {
 32061  				continue
 32062  			}
 32063  			_ = z.Args[1]
 32064  			z_0 := z.Args[0]
 32065  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 32066  				continue
 32067  			}
 32068  			v.reset(OpRotateLeft8)
 32069  			v.AddArg2(x, z)
 32070  			return true
 32071  		}
 32072  		break
 32073  	}
 32074  	// match: (Xor8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
 32075  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 32076  	// result: (RotateLeft8 x z)
 32077  	for {
 32078  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 32079  			right := v_0
 32080  			if right.Op != OpRsh8Ux8 {
 32081  				continue
 32082  			}
 32083  			y := right.Args[1]
 32084  			x := right.Args[0]
 32085  			left := v_1
 32086  			if left.Op != OpLsh8x8 {
 32087  				continue
 32088  			}
 32089  			_ = left.Args[1]
 32090  			if x != left.Args[0] {
 32091  				continue
 32092  			}
 32093  			z := left.Args[1]
 32094  			if z.Op != OpSub8 {
 32095  				continue
 32096  			}
 32097  			_ = z.Args[1]
 32098  			z_0 := z.Args[0]
 32099  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 32100  				continue
 32101  			}
 32102  			v.reset(OpRotateLeft8)
 32103  			v.AddArg2(x, z)
 32104  			return true
 32105  		}
 32106  		break
 32107  	}
 32108  	return false
 32109  }
 32110  func rewriteValuegeneric_OpZero(v *Value) bool {
 32111  	v_1 := v.Args[1]
 32112  	v_0 := v.Args[0]
 32113  	b := v.Block
 32114  	// match: (Zero (SelectN [0] call:(StaticLECall _ _)) mem:(SelectN [1] call))
 32115  	// cond: isSameCall(call.Aux, "runtime.newobject")
 32116  	// result: mem
 32117  	for {
 32118  		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
 32119  			break
 32120  		}
 32121  		call := v_0.Args[0]
 32122  		if call.Op != OpStaticLECall || len(call.Args) != 2 {
 32123  			break
 32124  		}
 32125  		mem := v_1
 32126  		if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isSameCall(call.Aux, "runtime.newobject")) {
 32127  			break
 32128  		}
 32129  		v.copyOf(mem)
 32130  		return true
 32131  	}
 32132  	// match: (Zero {t1} [n] p1 store:(Store {t2} (OffPtr [o2] p2) _ mem))
 32133  	// cond: isSamePtr(p1, p2) && store.Uses == 1 && n >= o2 + t2.Size() && clobber(store)
 32134  	// result: (Zero {t1} [n] p1 mem)
 32135  	for {
 32136  		n := auxIntToInt64(v.AuxInt)
 32137  		t1 := auxToType(v.Aux)
 32138  		p1 := v_0
 32139  		store := v_1
 32140  		if store.Op != OpStore {
 32141  			break
 32142  		}
 32143  		t2 := auxToType(store.Aux)
 32144  		mem := store.Args[2]
 32145  		store_0 := store.Args[0]
 32146  		if store_0.Op != OpOffPtr {
 32147  			break
 32148  		}
 32149  		o2 := auxIntToInt64(store_0.AuxInt)
 32150  		p2 := store_0.Args[0]
 32151  		if !(isSamePtr(p1, p2) && store.Uses == 1 && n >= o2+t2.Size() && clobber(store)) {
 32152  			break
 32153  		}
 32154  		v.reset(OpZero)
 32155  		v.AuxInt = int64ToAuxInt(n)
 32156  		v.Aux = typeToAux(t1)
 32157  		v.AddArg2(p1, mem)
 32158  		return true
 32159  	}
 32160  	// match: (Zero {t} [n] dst1 move:(Move {t} [n] dst2 _ mem))
 32161  	// cond: move.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move)
 32162  	// result: (Zero {t} [n] dst1 mem)
 32163  	for {
 32164  		n := auxIntToInt64(v.AuxInt)
 32165  		t := auxToType(v.Aux)
 32166  		dst1 := v_0
 32167  		move := v_1
 32168  		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
 32169  			break
 32170  		}
 32171  		mem := move.Args[2]
 32172  		dst2 := move.Args[0]
 32173  		if !(move.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move)) {
 32174  			break
 32175  		}
 32176  		v.reset(OpZero)
 32177  		v.AuxInt = int64ToAuxInt(n)
 32178  		v.Aux = typeToAux(t)
 32179  		v.AddArg2(dst1, mem)
 32180  		return true
 32181  	}
 32182  	// match: (Zero {t} [n] dst1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
 32183  	// cond: move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move, vardef)
 32184  	// result: (Zero {t} [n] dst1 (VarDef {x} mem))
 32185  	for {
 32186  		n := auxIntToInt64(v.AuxInt)
 32187  		t := auxToType(v.Aux)
 32188  		dst1 := v_0
 32189  		vardef := v_1
 32190  		if vardef.Op != OpVarDef {
 32191  			break
 32192  		}
 32193  		x := auxToSym(vardef.Aux)
 32194  		move := vardef.Args[0]
 32195  		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
 32196  			break
 32197  		}
 32198  		mem := move.Args[2]
 32199  		dst2 := move.Args[0]
 32200  		if !(move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move, vardef)) {
 32201  			break
 32202  		}
 32203  		v.reset(OpZero)
 32204  		v.AuxInt = int64ToAuxInt(n)
 32205  		v.Aux = typeToAux(t)
 32206  		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
 32207  		v0.Aux = symToAux(x)
 32208  		v0.AddArg(mem)
 32209  		v.AddArg2(dst1, v0)
 32210  		return true
 32211  	}
 32212  	// match: (Zero {t} [s] dst1 zero:(Zero {t} [s] dst2 _))
 32213  	// cond: isSamePtr(dst1, dst2)
 32214  	// result: zero
 32215  	for {
 32216  		s := auxIntToInt64(v.AuxInt)
 32217  		t := auxToType(v.Aux)
 32218  		dst1 := v_0
 32219  		zero := v_1
 32220  		if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != s || auxToType(zero.Aux) != t {
 32221  			break
 32222  		}
 32223  		dst2 := zero.Args[0]
 32224  		if !(isSamePtr(dst1, dst2)) {
 32225  			break
 32226  		}
 32227  		v.copyOf(zero)
 32228  		return true
 32229  	}
 32230  	// match: (Zero {t} [s] dst1 vardef:(VarDef (Zero {t} [s] dst2 _)))
 32231  	// cond: isSamePtr(dst1, dst2)
 32232  	// result: vardef
 32233  	for {
 32234  		s := auxIntToInt64(v.AuxInt)
 32235  		t := auxToType(v.Aux)
 32236  		dst1 := v_0
 32237  		vardef := v_1
 32238  		if vardef.Op != OpVarDef {
 32239  			break
 32240  		}
 32241  		vardef_0 := vardef.Args[0]
 32242  		if vardef_0.Op != OpZero || auxIntToInt64(vardef_0.AuxInt) != s || auxToType(vardef_0.Aux) != t {
 32243  			break
 32244  		}
 32245  		dst2 := vardef_0.Args[0]
 32246  		if !(isSamePtr(dst1, dst2)) {
 32247  			break
 32248  		}
 32249  		v.copyOf(vardef)
 32250  		return true
 32251  	}
 32252  	return false
 32253  }
 32254  func rewriteValuegeneric_OpZeroExt16to32(v *Value) bool {
 32255  	v_0 := v.Args[0]
 32256  	// match: (ZeroExt16to32 (Const16 [c]))
 32257  	// result: (Const32 [int32(uint16(c))])
 32258  	for {
 32259  		if v_0.Op != OpConst16 {
 32260  			break
 32261  		}
 32262  		c := auxIntToInt16(v_0.AuxInt)
 32263  		v.reset(OpConst32)
 32264  		v.AuxInt = int32ToAuxInt(int32(uint16(c)))
 32265  		return true
 32266  	}
 32267  	// match: (ZeroExt16to32 (Trunc32to16 x:(Rsh32Ux64 _ (Const64 [s]))))
 32268  	// cond: s >= 16
 32269  	// result: x
 32270  	for {
 32271  		if v_0.Op != OpTrunc32to16 {
 32272  			break
 32273  		}
 32274  		x := v_0.Args[0]
 32275  		if x.Op != OpRsh32Ux64 {
 32276  			break
 32277  		}
 32278  		_ = x.Args[1]
 32279  		x_1 := x.Args[1]
 32280  		if x_1.Op != OpConst64 {
 32281  			break
 32282  		}
 32283  		s := auxIntToInt64(x_1.AuxInt)
 32284  		if !(s >= 16) {
 32285  			break
 32286  		}
 32287  		v.copyOf(x)
 32288  		return true
 32289  	}
 32290  	return false
 32291  }
 32292  func rewriteValuegeneric_OpZeroExt16to64(v *Value) bool {
 32293  	v_0 := v.Args[0]
 32294  	// match: (ZeroExt16to64 (Const16 [c]))
 32295  	// result: (Const64 [int64(uint16(c))])
 32296  	for {
 32297  		if v_0.Op != OpConst16 {
 32298  			break
 32299  		}
 32300  		c := auxIntToInt16(v_0.AuxInt)
 32301  		v.reset(OpConst64)
 32302  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
 32303  		return true
 32304  	}
 32305  	// match: (ZeroExt16to64 (Trunc64to16 x:(Rsh64Ux64 _ (Const64 [s]))))
 32306  	// cond: s >= 48
 32307  	// result: x
 32308  	for {
 32309  		if v_0.Op != OpTrunc64to16 {
 32310  			break
 32311  		}
 32312  		x := v_0.Args[0]
 32313  		if x.Op != OpRsh64Ux64 {
 32314  			break
 32315  		}
 32316  		_ = x.Args[1]
 32317  		x_1 := x.Args[1]
 32318  		if x_1.Op != OpConst64 {
 32319  			break
 32320  		}
 32321  		s := auxIntToInt64(x_1.AuxInt)
 32322  		if !(s >= 48) {
 32323  			break
 32324  		}
 32325  		v.copyOf(x)
 32326  		return true
 32327  	}
 32328  	return false
 32329  }
 32330  func rewriteValuegeneric_OpZeroExt32to64(v *Value) bool {
 32331  	v_0 := v.Args[0]
 32332  	// match: (ZeroExt32to64 (Const32 [c]))
 32333  	// result: (Const64 [int64(uint32(c))])
 32334  	for {
 32335  		if v_0.Op != OpConst32 {
 32336  			break
 32337  		}
 32338  		c := auxIntToInt32(v_0.AuxInt)
 32339  		v.reset(OpConst64)
 32340  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
 32341  		return true
 32342  	}
 32343  	// match: (ZeroExt32to64 (Trunc64to32 x:(Rsh64Ux64 _ (Const64 [s]))))
 32344  	// cond: s >= 32
 32345  	// result: x
 32346  	for {
 32347  		if v_0.Op != OpTrunc64to32 {
 32348  			break
 32349  		}
 32350  		x := v_0.Args[0]
 32351  		if x.Op != OpRsh64Ux64 {
 32352  			break
 32353  		}
 32354  		_ = x.Args[1]
 32355  		x_1 := x.Args[1]
 32356  		if x_1.Op != OpConst64 {
 32357  			break
 32358  		}
 32359  		s := auxIntToInt64(x_1.AuxInt)
 32360  		if !(s >= 32) {
 32361  			break
 32362  		}
 32363  		v.copyOf(x)
 32364  		return true
 32365  	}
 32366  	return false
 32367  }
 32368  func rewriteValuegeneric_OpZeroExt8to16(v *Value) bool {
 32369  	v_0 := v.Args[0]
 32370  	// match: (ZeroExt8to16 (Const8 [c]))
 32371  	// result: (Const16 [int16( uint8(c))])
 32372  	for {
 32373  		if v_0.Op != OpConst8 {
 32374  			break
 32375  		}
 32376  		c := auxIntToInt8(v_0.AuxInt)
 32377  		v.reset(OpConst16)
 32378  		v.AuxInt = int16ToAuxInt(int16(uint8(c)))
 32379  		return true
 32380  	}
 32381  	// match: (ZeroExt8to16 (Trunc16to8 x:(Rsh16Ux64 _ (Const64 [s]))))
 32382  	// cond: s >= 8
 32383  	// result: x
 32384  	for {
 32385  		if v_0.Op != OpTrunc16to8 {
 32386  			break
 32387  		}
 32388  		x := v_0.Args[0]
 32389  		if x.Op != OpRsh16Ux64 {
 32390  			break
 32391  		}
 32392  		_ = x.Args[1]
 32393  		x_1 := x.Args[1]
 32394  		if x_1.Op != OpConst64 {
 32395  			break
 32396  		}
 32397  		s := auxIntToInt64(x_1.AuxInt)
 32398  		if !(s >= 8) {
 32399  			break
 32400  		}
 32401  		v.copyOf(x)
 32402  		return true
 32403  	}
 32404  	return false
 32405  }
 32406  func rewriteValuegeneric_OpZeroExt8to32(v *Value) bool {
 32407  	v_0 := v.Args[0]
 32408  	// match: (ZeroExt8to32 (Const8 [c]))
 32409  	// result: (Const32 [int32( uint8(c))])
 32410  	for {
 32411  		if v_0.Op != OpConst8 {
 32412  			break
 32413  		}
 32414  		c := auxIntToInt8(v_0.AuxInt)
 32415  		v.reset(OpConst32)
 32416  		v.AuxInt = int32ToAuxInt(int32(uint8(c)))
 32417  		return true
 32418  	}
 32419  	// match: (ZeroExt8to32 (Trunc32to8 x:(Rsh32Ux64 _ (Const64 [s]))))
 32420  	// cond: s >= 24
 32421  	// result: x
 32422  	for {
 32423  		if v_0.Op != OpTrunc32to8 {
 32424  			break
 32425  		}
 32426  		x := v_0.Args[0]
 32427  		if x.Op != OpRsh32Ux64 {
 32428  			break
 32429  		}
 32430  		_ = x.Args[1]
 32431  		x_1 := x.Args[1]
 32432  		if x_1.Op != OpConst64 {
 32433  			break
 32434  		}
 32435  		s := auxIntToInt64(x_1.AuxInt)
 32436  		if !(s >= 24) {
 32437  			break
 32438  		}
 32439  		v.copyOf(x)
 32440  		return true
 32441  	}
 32442  	return false
 32443  }
 32444  func rewriteValuegeneric_OpZeroExt8to64(v *Value) bool {
 32445  	v_0 := v.Args[0]
 32446  	// match: (ZeroExt8to64 (Const8 [c]))
 32447  	// result: (Const64 [int64( uint8(c))])
 32448  	for {
 32449  		if v_0.Op != OpConst8 {
 32450  			break
 32451  		}
 32452  		c := auxIntToInt8(v_0.AuxInt)
 32453  		v.reset(OpConst64)
 32454  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
 32455  		return true
 32456  	}
 32457  	// match: (ZeroExt8to64 (Trunc64to8 x:(Rsh64Ux64 _ (Const64 [s]))))
 32458  	// cond: s >= 56
 32459  	// result: x
 32460  	for {
 32461  		if v_0.Op != OpTrunc64to8 {
 32462  			break
 32463  		}
 32464  		x := v_0.Args[0]
 32465  		if x.Op != OpRsh64Ux64 {
 32466  			break
 32467  		}
 32468  		_ = x.Args[1]
 32469  		x_1 := x.Args[1]
 32470  		if x_1.Op != OpConst64 {
 32471  			break
 32472  		}
 32473  		s := auxIntToInt64(x_1.AuxInt)
 32474  		if !(s >= 56) {
 32475  			break
 32476  		}
 32477  		v.copyOf(x)
 32478  		return true
 32479  	}
 32480  	return false
 32481  }
 32482  func rewriteBlockgeneric(b *Block) bool {
 32483  	switch b.Kind {
 32484  	case BlockIf:
 32485  		// match: (If (Not cond) yes no)
 32486  		// result: (If cond no yes)
 32487  		for b.Controls[0].Op == OpNot {
 32488  			v_0 := b.Controls[0]
 32489  			cond := v_0.Args[0]
 32490  			b.resetWithControl(BlockIf, cond)
 32491  			b.swapSuccessors()
 32492  			return true
 32493  		}
 32494  		// match: (If (ConstBool [c]) yes no)
 32495  		// cond: c
 32496  		// result: (First yes no)
 32497  		for b.Controls[0].Op == OpConstBool {
 32498  			v_0 := b.Controls[0]
 32499  			c := auxIntToBool(v_0.AuxInt)
 32500  			if !(c) {
 32501  				break
 32502  			}
 32503  			b.Reset(BlockFirst)
 32504  			return true
 32505  		}
 32506  		// match: (If (ConstBool [c]) yes no)
 32507  		// cond: !c
 32508  		// result: (First no yes)
 32509  		for b.Controls[0].Op == OpConstBool {
 32510  			v_0 := b.Controls[0]
 32511  			c := auxIntToBool(v_0.AuxInt)
 32512  			if !(!c) {
 32513  				break
 32514  			}
 32515  			b.Reset(BlockFirst)
 32516  			b.swapSuccessors()
 32517  			return true
 32518  		}
 32519  	}
 32520  	return false
 32521  }