github.com/rakyll/go@v0.0.0-20170216000551-64c02460d703/src/cmd/compile/internal/ssa/rewritegeneric.go (about)

     1  // autogenerated from gen/generic.rules: do not edit!
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  
     8  var _ = math.MinInt8 // in case not otherwise used
     9  func rewriteValuegeneric(v *Value, config *Config) bool {
    10  	switch v.Op {
    11  	case OpAdd16:
    12  		return rewriteValuegeneric_OpAdd16(v, config)
    13  	case OpAdd32:
    14  		return rewriteValuegeneric_OpAdd32(v, config)
    15  	case OpAdd32F:
    16  		return rewriteValuegeneric_OpAdd32F(v, config)
    17  	case OpAdd64:
    18  		return rewriteValuegeneric_OpAdd64(v, config)
    19  	case OpAdd64F:
    20  		return rewriteValuegeneric_OpAdd64F(v, config)
    21  	case OpAdd8:
    22  		return rewriteValuegeneric_OpAdd8(v, config)
    23  	case OpAddPtr:
    24  		return rewriteValuegeneric_OpAddPtr(v, config)
    25  	case OpAnd16:
    26  		return rewriteValuegeneric_OpAnd16(v, config)
    27  	case OpAnd32:
    28  		return rewriteValuegeneric_OpAnd32(v, config)
    29  	case OpAnd64:
    30  		return rewriteValuegeneric_OpAnd64(v, config)
    31  	case OpAnd8:
    32  		return rewriteValuegeneric_OpAnd8(v, config)
    33  	case OpArg:
    34  		return rewriteValuegeneric_OpArg(v, config)
    35  	case OpArraySelect:
    36  		return rewriteValuegeneric_OpArraySelect(v, config)
    37  	case OpCom16:
    38  		return rewriteValuegeneric_OpCom16(v, config)
    39  	case OpCom32:
    40  		return rewriteValuegeneric_OpCom32(v, config)
    41  	case OpCom64:
    42  		return rewriteValuegeneric_OpCom64(v, config)
    43  	case OpCom8:
    44  		return rewriteValuegeneric_OpCom8(v, config)
    45  	case OpConstInterface:
    46  		return rewriteValuegeneric_OpConstInterface(v, config)
    47  	case OpConstSlice:
    48  		return rewriteValuegeneric_OpConstSlice(v, config)
    49  	case OpConstString:
    50  		return rewriteValuegeneric_OpConstString(v, config)
    51  	case OpConvert:
    52  		return rewriteValuegeneric_OpConvert(v, config)
    53  	case OpCvt32Fto64F:
    54  		return rewriteValuegeneric_OpCvt32Fto64F(v, config)
    55  	case OpCvt64Fto32F:
    56  		return rewriteValuegeneric_OpCvt64Fto32F(v, config)
    57  	case OpDiv32F:
    58  		return rewriteValuegeneric_OpDiv32F(v, config)
    59  	case OpDiv64:
    60  		return rewriteValuegeneric_OpDiv64(v, config)
    61  	case OpDiv64F:
    62  		return rewriteValuegeneric_OpDiv64F(v, config)
    63  	case OpDiv64u:
    64  		return rewriteValuegeneric_OpDiv64u(v, config)
    65  	case OpEq16:
    66  		return rewriteValuegeneric_OpEq16(v, config)
    67  	case OpEq32:
    68  		return rewriteValuegeneric_OpEq32(v, config)
    69  	case OpEq64:
    70  		return rewriteValuegeneric_OpEq64(v, config)
    71  	case OpEq8:
    72  		return rewriteValuegeneric_OpEq8(v, config)
    73  	case OpEqB:
    74  		return rewriteValuegeneric_OpEqB(v, config)
    75  	case OpEqInter:
    76  		return rewriteValuegeneric_OpEqInter(v, config)
    77  	case OpEqPtr:
    78  		return rewriteValuegeneric_OpEqPtr(v, config)
    79  	case OpEqSlice:
    80  		return rewriteValuegeneric_OpEqSlice(v, config)
    81  	case OpGeq16:
    82  		return rewriteValuegeneric_OpGeq16(v, config)
    83  	case OpGeq16U:
    84  		return rewriteValuegeneric_OpGeq16U(v, config)
    85  	case OpGeq32:
    86  		return rewriteValuegeneric_OpGeq32(v, config)
    87  	case OpGeq32U:
    88  		return rewriteValuegeneric_OpGeq32U(v, config)
    89  	case OpGeq64:
    90  		return rewriteValuegeneric_OpGeq64(v, config)
    91  	case OpGeq64U:
    92  		return rewriteValuegeneric_OpGeq64U(v, config)
    93  	case OpGeq8:
    94  		return rewriteValuegeneric_OpGeq8(v, config)
    95  	case OpGeq8U:
    96  		return rewriteValuegeneric_OpGeq8U(v, config)
    97  	case OpGreater16:
    98  		return rewriteValuegeneric_OpGreater16(v, config)
    99  	case OpGreater16U:
   100  		return rewriteValuegeneric_OpGreater16U(v, config)
   101  	case OpGreater32:
   102  		return rewriteValuegeneric_OpGreater32(v, config)
   103  	case OpGreater32U:
   104  		return rewriteValuegeneric_OpGreater32U(v, config)
   105  	case OpGreater64:
   106  		return rewriteValuegeneric_OpGreater64(v, config)
   107  	case OpGreater64U:
   108  		return rewriteValuegeneric_OpGreater64U(v, config)
   109  	case OpGreater8:
   110  		return rewriteValuegeneric_OpGreater8(v, config)
   111  	case OpGreater8U:
   112  		return rewriteValuegeneric_OpGreater8U(v, config)
   113  	case OpIMake:
   114  		return rewriteValuegeneric_OpIMake(v, config)
   115  	case OpIsInBounds:
   116  		return rewriteValuegeneric_OpIsInBounds(v, config)
   117  	case OpIsNonNil:
   118  		return rewriteValuegeneric_OpIsNonNil(v, config)
   119  	case OpIsSliceInBounds:
   120  		return rewriteValuegeneric_OpIsSliceInBounds(v, config)
   121  	case OpLeq16:
   122  		return rewriteValuegeneric_OpLeq16(v, config)
   123  	case OpLeq16U:
   124  		return rewriteValuegeneric_OpLeq16U(v, config)
   125  	case OpLeq32:
   126  		return rewriteValuegeneric_OpLeq32(v, config)
   127  	case OpLeq32U:
   128  		return rewriteValuegeneric_OpLeq32U(v, config)
   129  	case OpLeq64:
   130  		return rewriteValuegeneric_OpLeq64(v, config)
   131  	case OpLeq64U:
   132  		return rewriteValuegeneric_OpLeq64U(v, config)
   133  	case OpLeq8:
   134  		return rewriteValuegeneric_OpLeq8(v, config)
   135  	case OpLeq8U:
   136  		return rewriteValuegeneric_OpLeq8U(v, config)
   137  	case OpLess16:
   138  		return rewriteValuegeneric_OpLess16(v, config)
   139  	case OpLess16U:
   140  		return rewriteValuegeneric_OpLess16U(v, config)
   141  	case OpLess32:
   142  		return rewriteValuegeneric_OpLess32(v, config)
   143  	case OpLess32U:
   144  		return rewriteValuegeneric_OpLess32U(v, config)
   145  	case OpLess64:
   146  		return rewriteValuegeneric_OpLess64(v, config)
   147  	case OpLess64U:
   148  		return rewriteValuegeneric_OpLess64U(v, config)
   149  	case OpLess8:
   150  		return rewriteValuegeneric_OpLess8(v, config)
   151  	case OpLess8U:
   152  		return rewriteValuegeneric_OpLess8U(v, config)
   153  	case OpLoad:
   154  		return rewriteValuegeneric_OpLoad(v, config)
   155  	case OpLsh16x16:
   156  		return rewriteValuegeneric_OpLsh16x16(v, config)
   157  	case OpLsh16x32:
   158  		return rewriteValuegeneric_OpLsh16x32(v, config)
   159  	case OpLsh16x64:
   160  		return rewriteValuegeneric_OpLsh16x64(v, config)
   161  	case OpLsh16x8:
   162  		return rewriteValuegeneric_OpLsh16x8(v, config)
   163  	case OpLsh32x16:
   164  		return rewriteValuegeneric_OpLsh32x16(v, config)
   165  	case OpLsh32x32:
   166  		return rewriteValuegeneric_OpLsh32x32(v, config)
   167  	case OpLsh32x64:
   168  		return rewriteValuegeneric_OpLsh32x64(v, config)
   169  	case OpLsh32x8:
   170  		return rewriteValuegeneric_OpLsh32x8(v, config)
   171  	case OpLsh64x16:
   172  		return rewriteValuegeneric_OpLsh64x16(v, config)
   173  	case OpLsh64x32:
   174  		return rewriteValuegeneric_OpLsh64x32(v, config)
   175  	case OpLsh64x64:
   176  		return rewriteValuegeneric_OpLsh64x64(v, config)
   177  	case OpLsh64x8:
   178  		return rewriteValuegeneric_OpLsh64x8(v, config)
   179  	case OpLsh8x16:
   180  		return rewriteValuegeneric_OpLsh8x16(v, config)
   181  	case OpLsh8x32:
   182  		return rewriteValuegeneric_OpLsh8x32(v, config)
   183  	case OpLsh8x64:
   184  		return rewriteValuegeneric_OpLsh8x64(v, config)
   185  	case OpLsh8x8:
   186  		return rewriteValuegeneric_OpLsh8x8(v, config)
   187  	case OpMod16:
   188  		return rewriteValuegeneric_OpMod16(v, config)
   189  	case OpMod16u:
   190  		return rewriteValuegeneric_OpMod16u(v, config)
   191  	case OpMod32:
   192  		return rewriteValuegeneric_OpMod32(v, config)
   193  	case OpMod32u:
   194  		return rewriteValuegeneric_OpMod32u(v, config)
   195  	case OpMod64:
   196  		return rewriteValuegeneric_OpMod64(v, config)
   197  	case OpMod64u:
   198  		return rewriteValuegeneric_OpMod64u(v, config)
   199  	case OpMod8:
   200  		return rewriteValuegeneric_OpMod8(v, config)
   201  	case OpMod8u:
   202  		return rewriteValuegeneric_OpMod8u(v, config)
   203  	case OpMul16:
   204  		return rewriteValuegeneric_OpMul16(v, config)
   205  	case OpMul32:
   206  		return rewriteValuegeneric_OpMul32(v, config)
   207  	case OpMul32F:
   208  		return rewriteValuegeneric_OpMul32F(v, config)
   209  	case OpMul64:
   210  		return rewriteValuegeneric_OpMul64(v, config)
   211  	case OpMul64F:
   212  		return rewriteValuegeneric_OpMul64F(v, config)
   213  	case OpMul8:
   214  		return rewriteValuegeneric_OpMul8(v, config)
   215  	case OpNeg16:
   216  		return rewriteValuegeneric_OpNeg16(v, config)
   217  	case OpNeg32:
   218  		return rewriteValuegeneric_OpNeg32(v, config)
   219  	case OpNeg64:
   220  		return rewriteValuegeneric_OpNeg64(v, config)
   221  	case OpNeg8:
   222  		return rewriteValuegeneric_OpNeg8(v, config)
   223  	case OpNeq16:
   224  		return rewriteValuegeneric_OpNeq16(v, config)
   225  	case OpNeq32:
   226  		return rewriteValuegeneric_OpNeq32(v, config)
   227  	case OpNeq64:
   228  		return rewriteValuegeneric_OpNeq64(v, config)
   229  	case OpNeq8:
   230  		return rewriteValuegeneric_OpNeq8(v, config)
   231  	case OpNeqB:
   232  		return rewriteValuegeneric_OpNeqB(v, config)
   233  	case OpNeqInter:
   234  		return rewriteValuegeneric_OpNeqInter(v, config)
   235  	case OpNeqPtr:
   236  		return rewriteValuegeneric_OpNeqPtr(v, config)
   237  	case OpNeqSlice:
   238  		return rewriteValuegeneric_OpNeqSlice(v, config)
   239  	case OpNilCheck:
   240  		return rewriteValuegeneric_OpNilCheck(v, config)
   241  	case OpNot:
   242  		return rewriteValuegeneric_OpNot(v, config)
   243  	case OpOffPtr:
   244  		return rewriteValuegeneric_OpOffPtr(v, config)
   245  	case OpOr16:
   246  		return rewriteValuegeneric_OpOr16(v, config)
   247  	case OpOr32:
   248  		return rewriteValuegeneric_OpOr32(v, config)
   249  	case OpOr64:
   250  		return rewriteValuegeneric_OpOr64(v, config)
   251  	case OpOr8:
   252  		return rewriteValuegeneric_OpOr8(v, config)
   253  	case OpPhi:
   254  		return rewriteValuegeneric_OpPhi(v, config)
   255  	case OpPtrIndex:
   256  		return rewriteValuegeneric_OpPtrIndex(v, config)
   257  	case OpRsh16Ux16:
   258  		return rewriteValuegeneric_OpRsh16Ux16(v, config)
   259  	case OpRsh16Ux32:
   260  		return rewriteValuegeneric_OpRsh16Ux32(v, config)
   261  	case OpRsh16Ux64:
   262  		return rewriteValuegeneric_OpRsh16Ux64(v, config)
   263  	case OpRsh16Ux8:
   264  		return rewriteValuegeneric_OpRsh16Ux8(v, config)
   265  	case OpRsh16x16:
   266  		return rewriteValuegeneric_OpRsh16x16(v, config)
   267  	case OpRsh16x32:
   268  		return rewriteValuegeneric_OpRsh16x32(v, config)
   269  	case OpRsh16x64:
   270  		return rewriteValuegeneric_OpRsh16x64(v, config)
   271  	case OpRsh16x8:
   272  		return rewriteValuegeneric_OpRsh16x8(v, config)
   273  	case OpRsh32Ux16:
   274  		return rewriteValuegeneric_OpRsh32Ux16(v, config)
   275  	case OpRsh32Ux32:
   276  		return rewriteValuegeneric_OpRsh32Ux32(v, config)
   277  	case OpRsh32Ux64:
   278  		return rewriteValuegeneric_OpRsh32Ux64(v, config)
   279  	case OpRsh32Ux8:
   280  		return rewriteValuegeneric_OpRsh32Ux8(v, config)
   281  	case OpRsh32x16:
   282  		return rewriteValuegeneric_OpRsh32x16(v, config)
   283  	case OpRsh32x32:
   284  		return rewriteValuegeneric_OpRsh32x32(v, config)
   285  	case OpRsh32x64:
   286  		return rewriteValuegeneric_OpRsh32x64(v, config)
   287  	case OpRsh32x8:
   288  		return rewriteValuegeneric_OpRsh32x8(v, config)
   289  	case OpRsh64Ux16:
   290  		return rewriteValuegeneric_OpRsh64Ux16(v, config)
   291  	case OpRsh64Ux32:
   292  		return rewriteValuegeneric_OpRsh64Ux32(v, config)
   293  	case OpRsh64Ux64:
   294  		return rewriteValuegeneric_OpRsh64Ux64(v, config)
   295  	case OpRsh64Ux8:
   296  		return rewriteValuegeneric_OpRsh64Ux8(v, config)
   297  	case OpRsh64x16:
   298  		return rewriteValuegeneric_OpRsh64x16(v, config)
   299  	case OpRsh64x32:
   300  		return rewriteValuegeneric_OpRsh64x32(v, config)
   301  	case OpRsh64x64:
   302  		return rewriteValuegeneric_OpRsh64x64(v, config)
   303  	case OpRsh64x8:
   304  		return rewriteValuegeneric_OpRsh64x8(v, config)
   305  	case OpRsh8Ux16:
   306  		return rewriteValuegeneric_OpRsh8Ux16(v, config)
   307  	case OpRsh8Ux32:
   308  		return rewriteValuegeneric_OpRsh8Ux32(v, config)
   309  	case OpRsh8Ux64:
   310  		return rewriteValuegeneric_OpRsh8Ux64(v, config)
   311  	case OpRsh8Ux8:
   312  		return rewriteValuegeneric_OpRsh8Ux8(v, config)
   313  	case OpRsh8x16:
   314  		return rewriteValuegeneric_OpRsh8x16(v, config)
   315  	case OpRsh8x32:
   316  		return rewriteValuegeneric_OpRsh8x32(v, config)
   317  	case OpRsh8x64:
   318  		return rewriteValuegeneric_OpRsh8x64(v, config)
   319  	case OpRsh8x8:
   320  		return rewriteValuegeneric_OpRsh8x8(v, config)
   321  	case OpSignExt16to32:
   322  		return rewriteValuegeneric_OpSignExt16to32(v, config)
   323  	case OpSignExt16to64:
   324  		return rewriteValuegeneric_OpSignExt16to64(v, config)
   325  	case OpSignExt32to64:
   326  		return rewriteValuegeneric_OpSignExt32to64(v, config)
   327  	case OpSignExt8to16:
   328  		return rewriteValuegeneric_OpSignExt8to16(v, config)
   329  	case OpSignExt8to32:
   330  		return rewriteValuegeneric_OpSignExt8to32(v, config)
   331  	case OpSignExt8to64:
   332  		return rewriteValuegeneric_OpSignExt8to64(v, config)
   333  	case OpSliceCap:
   334  		return rewriteValuegeneric_OpSliceCap(v, config)
   335  	case OpSliceLen:
   336  		return rewriteValuegeneric_OpSliceLen(v, config)
   337  	case OpSlicePtr:
   338  		return rewriteValuegeneric_OpSlicePtr(v, config)
   339  	case OpSlicemask:
   340  		return rewriteValuegeneric_OpSlicemask(v, config)
   341  	case OpSqrt:
   342  		return rewriteValuegeneric_OpSqrt(v, config)
   343  	case OpStore:
   344  		return rewriteValuegeneric_OpStore(v, config)
   345  	case OpStringLen:
   346  		return rewriteValuegeneric_OpStringLen(v, config)
   347  	case OpStringPtr:
   348  		return rewriteValuegeneric_OpStringPtr(v, config)
   349  	case OpStructSelect:
   350  		return rewriteValuegeneric_OpStructSelect(v, config)
   351  	case OpSub16:
   352  		return rewriteValuegeneric_OpSub16(v, config)
   353  	case OpSub32:
   354  		return rewriteValuegeneric_OpSub32(v, config)
   355  	case OpSub32F:
   356  		return rewriteValuegeneric_OpSub32F(v, config)
   357  	case OpSub64:
   358  		return rewriteValuegeneric_OpSub64(v, config)
   359  	case OpSub64F:
   360  		return rewriteValuegeneric_OpSub64F(v, config)
   361  	case OpSub8:
   362  		return rewriteValuegeneric_OpSub8(v, config)
   363  	case OpTrunc16to8:
   364  		return rewriteValuegeneric_OpTrunc16to8(v, config)
   365  	case OpTrunc32to16:
   366  		return rewriteValuegeneric_OpTrunc32to16(v, config)
   367  	case OpTrunc32to8:
   368  		return rewriteValuegeneric_OpTrunc32to8(v, config)
   369  	case OpTrunc64to16:
   370  		return rewriteValuegeneric_OpTrunc64to16(v, config)
   371  	case OpTrunc64to32:
   372  		return rewriteValuegeneric_OpTrunc64to32(v, config)
   373  	case OpTrunc64to8:
   374  		return rewriteValuegeneric_OpTrunc64to8(v, config)
   375  	case OpXor16:
   376  		return rewriteValuegeneric_OpXor16(v, config)
   377  	case OpXor32:
   378  		return rewriteValuegeneric_OpXor32(v, config)
   379  	case OpXor64:
   380  		return rewriteValuegeneric_OpXor64(v, config)
   381  	case OpXor8:
   382  		return rewriteValuegeneric_OpXor8(v, config)
   383  	case OpZero:
   384  		return rewriteValuegeneric_OpZero(v, config)
   385  	case OpZeroExt16to32:
   386  		return rewriteValuegeneric_OpZeroExt16to32(v, config)
   387  	case OpZeroExt16to64:
   388  		return rewriteValuegeneric_OpZeroExt16to64(v, config)
   389  	case OpZeroExt32to64:
   390  		return rewriteValuegeneric_OpZeroExt32to64(v, config)
   391  	case OpZeroExt8to16:
   392  		return rewriteValuegeneric_OpZeroExt8to16(v, config)
   393  	case OpZeroExt8to32:
   394  		return rewriteValuegeneric_OpZeroExt8to32(v, config)
   395  	case OpZeroExt8to64:
   396  		return rewriteValuegeneric_OpZeroExt8to64(v, config)
   397  	}
   398  	return false
   399  }
   400  func rewriteValuegeneric_OpAdd16(v *Value, config *Config) bool {
   401  	b := v.Block
   402  	_ = b
   403  	// match: (Add16  (Const16 [c])  (Const16 [d]))
   404  	// cond:
   405  	// result: (Const16 [int64(int16(c+d))])
   406  	for {
   407  		v_0 := v.Args[0]
   408  		if v_0.Op != OpConst16 {
   409  			break
   410  		}
   411  		c := v_0.AuxInt
   412  		v_1 := v.Args[1]
   413  		if v_1.Op != OpConst16 {
   414  			break
   415  		}
   416  		d := v_1.AuxInt
   417  		v.reset(OpConst16)
   418  		v.AuxInt = int64(int16(c + d))
   419  		return true
   420  	}
   421  	// match: (Add16 x (Const16 <t> [c]))
   422  	// cond: x.Op != OpConst16
   423  	// result: (Add16 (Const16 <t> [c]) x)
   424  	for {
   425  		x := v.Args[0]
   426  		v_1 := v.Args[1]
   427  		if v_1.Op != OpConst16 {
   428  			break
   429  		}
   430  		t := v_1.Type
   431  		c := v_1.AuxInt
   432  		if !(x.Op != OpConst16) {
   433  			break
   434  		}
   435  		v.reset(OpAdd16)
   436  		v0 := b.NewValue0(v.Pos, OpConst16, t)
   437  		v0.AuxInt = c
   438  		v.AddArg(v0)
   439  		v.AddArg(x)
   440  		return true
   441  	}
   442  	// match: (Add16 (Const16 [0]) x)
   443  	// cond:
   444  	// result: x
   445  	for {
   446  		v_0 := v.Args[0]
   447  		if v_0.Op != OpConst16 {
   448  			break
   449  		}
   450  		if v_0.AuxInt != 0 {
   451  			break
   452  		}
   453  		x := v.Args[1]
   454  		v.reset(OpCopy)
   455  		v.Type = x.Type
   456  		v.AddArg(x)
   457  		return true
   458  	}
   459  	return false
   460  }
   461  func rewriteValuegeneric_OpAdd32(v *Value, config *Config) bool {
   462  	b := v.Block
   463  	_ = b
   464  	// match: (Add32  (Const32 [c])  (Const32 [d]))
   465  	// cond:
   466  	// result: (Const32 [int64(int32(c+d))])
   467  	for {
   468  		v_0 := v.Args[0]
   469  		if v_0.Op != OpConst32 {
   470  			break
   471  		}
   472  		c := v_0.AuxInt
   473  		v_1 := v.Args[1]
   474  		if v_1.Op != OpConst32 {
   475  			break
   476  		}
   477  		d := v_1.AuxInt
   478  		v.reset(OpConst32)
   479  		v.AuxInt = int64(int32(c + d))
   480  		return true
   481  	}
   482  	// match: (Add32 x (Const32 <t> [c]))
   483  	// cond: x.Op != OpConst32
   484  	// result: (Add32 (Const32 <t> [c]) x)
   485  	for {
   486  		x := v.Args[0]
   487  		v_1 := v.Args[1]
   488  		if v_1.Op != OpConst32 {
   489  			break
   490  		}
   491  		t := v_1.Type
   492  		c := v_1.AuxInt
   493  		if !(x.Op != OpConst32) {
   494  			break
   495  		}
   496  		v.reset(OpAdd32)
   497  		v0 := b.NewValue0(v.Pos, OpConst32, t)
   498  		v0.AuxInt = c
   499  		v.AddArg(v0)
   500  		v.AddArg(x)
   501  		return true
   502  	}
   503  	// match: (Add32 (Const32 [0]) x)
   504  	// cond:
   505  	// result: x
   506  	for {
   507  		v_0 := v.Args[0]
   508  		if v_0.Op != OpConst32 {
   509  			break
   510  		}
   511  		if v_0.AuxInt != 0 {
   512  			break
   513  		}
   514  		x := v.Args[1]
   515  		v.reset(OpCopy)
   516  		v.Type = x.Type
   517  		v.AddArg(x)
   518  		return true
   519  	}
   520  	return false
   521  }
   522  func rewriteValuegeneric_OpAdd32F(v *Value, config *Config) bool {
   523  	b := v.Block
   524  	_ = b
   525  	// match: (Add32F (Const32F [c]) (Const32F [d]))
   526  	// cond:
   527  	// result: (Const32F [f2i(float64(i2f32(c) + i2f32(d)))])
   528  	for {
   529  		v_0 := v.Args[0]
   530  		if v_0.Op != OpConst32F {
   531  			break
   532  		}
   533  		c := v_0.AuxInt
   534  		v_1 := v.Args[1]
   535  		if v_1.Op != OpConst32F {
   536  			break
   537  		}
   538  		d := v_1.AuxInt
   539  		v.reset(OpConst32F)
   540  		v.AuxInt = f2i(float64(i2f32(c) + i2f32(d)))
   541  		return true
   542  	}
   543  	// match: (Add32F x (Const32F [0]))
   544  	// cond:
   545  	// result: x
   546  	for {
   547  		x := v.Args[0]
   548  		v_1 := v.Args[1]
   549  		if v_1.Op != OpConst32F {
   550  			break
   551  		}
   552  		if v_1.AuxInt != 0 {
   553  			break
   554  		}
   555  		v.reset(OpCopy)
   556  		v.Type = x.Type
   557  		v.AddArg(x)
   558  		return true
   559  	}
   560  	// match: (Add32F (Const32F [0]) x)
   561  	// cond:
   562  	// result: x
   563  	for {
   564  		v_0 := v.Args[0]
   565  		if v_0.Op != OpConst32F {
   566  			break
   567  		}
   568  		if v_0.AuxInt != 0 {
   569  			break
   570  		}
   571  		x := v.Args[1]
   572  		v.reset(OpCopy)
   573  		v.Type = x.Type
   574  		v.AddArg(x)
   575  		return true
   576  	}
   577  	return false
   578  }
   579  func rewriteValuegeneric_OpAdd64(v *Value, config *Config) bool {
   580  	b := v.Block
   581  	_ = b
   582  	// match: (Add64  (Const64 [c])  (Const64 [d]))
   583  	// cond:
   584  	// result: (Const64 [c+d])
   585  	for {
   586  		v_0 := v.Args[0]
   587  		if v_0.Op != OpConst64 {
   588  			break
   589  		}
   590  		c := v_0.AuxInt
   591  		v_1 := v.Args[1]
   592  		if v_1.Op != OpConst64 {
   593  			break
   594  		}
   595  		d := v_1.AuxInt
   596  		v.reset(OpConst64)
   597  		v.AuxInt = c + d
   598  		return true
   599  	}
   600  	// match: (Add64 x (Const64 <t> [c]))
   601  	// cond: x.Op != OpConst64
   602  	// result: (Add64 (Const64 <t> [c]) x)
   603  	for {
   604  		x := v.Args[0]
   605  		v_1 := v.Args[1]
   606  		if v_1.Op != OpConst64 {
   607  			break
   608  		}
   609  		t := v_1.Type
   610  		c := v_1.AuxInt
   611  		if !(x.Op != OpConst64) {
   612  			break
   613  		}
   614  		v.reset(OpAdd64)
   615  		v0 := b.NewValue0(v.Pos, OpConst64, t)
   616  		v0.AuxInt = c
   617  		v.AddArg(v0)
   618  		v.AddArg(x)
   619  		return true
   620  	}
   621  	// match: (Add64 (Const64 [0]) x)
   622  	// cond:
   623  	// result: x
   624  	for {
   625  		v_0 := v.Args[0]
   626  		if v_0.Op != OpConst64 {
   627  			break
   628  		}
   629  		if v_0.AuxInt != 0 {
   630  			break
   631  		}
   632  		x := v.Args[1]
   633  		v.reset(OpCopy)
   634  		v.Type = x.Type
   635  		v.AddArg(x)
   636  		return true
   637  	}
   638  	return false
   639  }
   640  func rewriteValuegeneric_OpAdd64F(v *Value, config *Config) bool {
   641  	b := v.Block
   642  	_ = b
   643  	// match: (Add64F (Const64F [c]) (Const64F [d]))
   644  	// cond:
   645  	// result: (Const64F [f2i(i2f(c) + i2f(d))])
   646  	for {
   647  		v_0 := v.Args[0]
   648  		if v_0.Op != OpConst64F {
   649  			break
   650  		}
   651  		c := v_0.AuxInt
   652  		v_1 := v.Args[1]
   653  		if v_1.Op != OpConst64F {
   654  			break
   655  		}
   656  		d := v_1.AuxInt
   657  		v.reset(OpConst64F)
   658  		v.AuxInt = f2i(i2f(c) + i2f(d))
   659  		return true
   660  	}
   661  	// match: (Add64F x (Const64F [0]))
   662  	// cond:
   663  	// result: x
   664  	for {
   665  		x := v.Args[0]
   666  		v_1 := v.Args[1]
   667  		if v_1.Op != OpConst64F {
   668  			break
   669  		}
   670  		if v_1.AuxInt != 0 {
   671  			break
   672  		}
   673  		v.reset(OpCopy)
   674  		v.Type = x.Type
   675  		v.AddArg(x)
   676  		return true
   677  	}
   678  	// match: (Add64F (Const64F [0]) x)
   679  	// cond:
   680  	// result: x
   681  	for {
   682  		v_0 := v.Args[0]
   683  		if v_0.Op != OpConst64F {
   684  			break
   685  		}
   686  		if v_0.AuxInt != 0 {
   687  			break
   688  		}
   689  		x := v.Args[1]
   690  		v.reset(OpCopy)
   691  		v.Type = x.Type
   692  		v.AddArg(x)
   693  		return true
   694  	}
   695  	return false
   696  }
   697  func rewriteValuegeneric_OpAdd8(v *Value, config *Config) bool {
   698  	b := v.Block
   699  	_ = b
   700  	// match: (Add8   (Const8 [c])   (Const8 [d]))
   701  	// cond:
   702  	// result: (Const8  [int64(int8(c+d))])
   703  	for {
   704  		v_0 := v.Args[0]
   705  		if v_0.Op != OpConst8 {
   706  			break
   707  		}
   708  		c := v_0.AuxInt
   709  		v_1 := v.Args[1]
   710  		if v_1.Op != OpConst8 {
   711  			break
   712  		}
   713  		d := v_1.AuxInt
   714  		v.reset(OpConst8)
   715  		v.AuxInt = int64(int8(c + d))
   716  		return true
   717  	}
   718  	// match: (Add8  x (Const8  <t> [c]))
   719  	// cond: x.Op != OpConst8
   720  	// result: (Add8  (Const8  <t> [c]) x)
   721  	for {
   722  		x := v.Args[0]
   723  		v_1 := v.Args[1]
   724  		if v_1.Op != OpConst8 {
   725  			break
   726  		}
   727  		t := v_1.Type
   728  		c := v_1.AuxInt
   729  		if !(x.Op != OpConst8) {
   730  			break
   731  		}
   732  		v.reset(OpAdd8)
   733  		v0 := b.NewValue0(v.Pos, OpConst8, t)
   734  		v0.AuxInt = c
   735  		v.AddArg(v0)
   736  		v.AddArg(x)
   737  		return true
   738  	}
   739  	// match: (Add8  (Const8  [0]) x)
   740  	// cond:
   741  	// result: x
   742  	for {
   743  		v_0 := v.Args[0]
   744  		if v_0.Op != OpConst8 {
   745  			break
   746  		}
   747  		if v_0.AuxInt != 0 {
   748  			break
   749  		}
   750  		x := v.Args[1]
   751  		v.reset(OpCopy)
   752  		v.Type = x.Type
   753  		v.AddArg(x)
   754  		return true
   755  	}
   756  	return false
   757  }
   758  func rewriteValuegeneric_OpAddPtr(v *Value, config *Config) bool {
   759  	b := v.Block
   760  	_ = b
   761  	// match: (AddPtr <t> x (Const64 [c]))
   762  	// cond:
   763  	// result: (OffPtr <t> x [c])
   764  	for {
   765  		t := v.Type
   766  		x := v.Args[0]
   767  		v_1 := v.Args[1]
   768  		if v_1.Op != OpConst64 {
   769  			break
   770  		}
   771  		c := v_1.AuxInt
   772  		v.reset(OpOffPtr)
   773  		v.Type = t
   774  		v.AuxInt = c
   775  		v.AddArg(x)
   776  		return true
   777  	}
   778  	return false
   779  }
   780  func rewriteValuegeneric_OpAnd16(v *Value, config *Config) bool {
   781  	b := v.Block
   782  	_ = b
   783  	// match: (And16 x (Const16 <t> [c]))
   784  	// cond: x.Op != OpConst16
   785  	// result: (And16 (Const16 <t> [c]) x)
   786  	for {
   787  		x := v.Args[0]
   788  		v_1 := v.Args[1]
   789  		if v_1.Op != OpConst16 {
   790  			break
   791  		}
   792  		t := v_1.Type
   793  		c := v_1.AuxInt
   794  		if !(x.Op != OpConst16) {
   795  			break
   796  		}
   797  		v.reset(OpAnd16)
   798  		v0 := b.NewValue0(v.Pos, OpConst16, t)
   799  		v0.AuxInt = c
   800  		v.AddArg(v0)
   801  		v.AddArg(x)
   802  		return true
   803  	}
   804  	// match: (And16 x x)
   805  	// cond:
   806  	// result: x
   807  	for {
   808  		x := v.Args[0]
   809  		if x != v.Args[1] {
   810  			break
   811  		}
   812  		v.reset(OpCopy)
   813  		v.Type = x.Type
   814  		v.AddArg(x)
   815  		return true
   816  	}
   817  	// match: (And16 (Const16 [-1]) x)
   818  	// cond:
   819  	// result: x
   820  	for {
   821  		v_0 := v.Args[0]
   822  		if v_0.Op != OpConst16 {
   823  			break
   824  		}
   825  		if v_0.AuxInt != -1 {
   826  			break
   827  		}
   828  		x := v.Args[1]
   829  		v.reset(OpCopy)
   830  		v.Type = x.Type
   831  		v.AddArg(x)
   832  		return true
   833  	}
   834  	// match: (And16 (Const16 [0]) _)
   835  	// cond:
   836  	// result: (Const16 [0])
   837  	for {
   838  		v_0 := v.Args[0]
   839  		if v_0.Op != OpConst16 {
   840  			break
   841  		}
   842  		if v_0.AuxInt != 0 {
   843  			break
   844  		}
   845  		v.reset(OpConst16)
   846  		v.AuxInt = 0
   847  		return true
   848  	}
   849  	// match: (And16 x (And16 x y))
   850  	// cond:
   851  	// result: (And16 x y)
   852  	for {
   853  		x := v.Args[0]
   854  		v_1 := v.Args[1]
   855  		if v_1.Op != OpAnd16 {
   856  			break
   857  		}
   858  		if x != v_1.Args[0] {
   859  			break
   860  		}
   861  		y := v_1.Args[1]
   862  		v.reset(OpAnd16)
   863  		v.AddArg(x)
   864  		v.AddArg(y)
   865  		return true
   866  	}
   867  	// match: (And16 x (And16 y x))
   868  	// cond:
   869  	// result: (And16 x y)
   870  	for {
   871  		x := v.Args[0]
   872  		v_1 := v.Args[1]
   873  		if v_1.Op != OpAnd16 {
   874  			break
   875  		}
   876  		y := v_1.Args[0]
   877  		if x != v_1.Args[1] {
   878  			break
   879  		}
   880  		v.reset(OpAnd16)
   881  		v.AddArg(x)
   882  		v.AddArg(y)
   883  		return true
   884  	}
   885  	// match: (And16 (And16 x y) x)
   886  	// cond:
   887  	// result: (And16 x y)
   888  	for {
   889  		v_0 := v.Args[0]
   890  		if v_0.Op != OpAnd16 {
   891  			break
   892  		}
   893  		x := v_0.Args[0]
   894  		y := v_0.Args[1]
   895  		if x != v.Args[1] {
   896  			break
   897  		}
   898  		v.reset(OpAnd16)
   899  		v.AddArg(x)
   900  		v.AddArg(y)
   901  		return true
   902  	}
   903  	// match: (And16 (And16 x y) y)
   904  	// cond:
   905  	// result: (And16 x y)
   906  	for {
   907  		v_0 := v.Args[0]
   908  		if v_0.Op != OpAnd16 {
   909  			break
   910  		}
   911  		x := v_0.Args[0]
   912  		y := v_0.Args[1]
   913  		if y != v.Args[1] {
   914  			break
   915  		}
   916  		v.reset(OpAnd16)
   917  		v.AddArg(x)
   918  		v.AddArg(y)
   919  		return true
   920  	}
   921  	return false
   922  }
   923  func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool {
   924  	b := v.Block
   925  	_ = b
   926  	// match: (And32 x (Const32 <t> [c]))
   927  	// cond: x.Op != OpConst32
   928  	// result: (And32 (Const32 <t> [c]) x)
   929  	for {
   930  		x := v.Args[0]
   931  		v_1 := v.Args[1]
   932  		if v_1.Op != OpConst32 {
   933  			break
   934  		}
   935  		t := v_1.Type
   936  		c := v_1.AuxInt
   937  		if !(x.Op != OpConst32) {
   938  			break
   939  		}
   940  		v.reset(OpAnd32)
   941  		v0 := b.NewValue0(v.Pos, OpConst32, t)
   942  		v0.AuxInt = c
   943  		v.AddArg(v0)
   944  		v.AddArg(x)
   945  		return true
   946  	}
   947  	// match: (And32 x x)
   948  	// cond:
   949  	// result: x
   950  	for {
   951  		x := v.Args[0]
   952  		if x != v.Args[1] {
   953  			break
   954  		}
   955  		v.reset(OpCopy)
   956  		v.Type = x.Type
   957  		v.AddArg(x)
   958  		return true
   959  	}
   960  	// match: (And32 (Const32 [-1]) x)
   961  	// cond:
   962  	// result: x
   963  	for {
   964  		v_0 := v.Args[0]
   965  		if v_0.Op != OpConst32 {
   966  			break
   967  		}
   968  		if v_0.AuxInt != -1 {
   969  			break
   970  		}
   971  		x := v.Args[1]
   972  		v.reset(OpCopy)
   973  		v.Type = x.Type
   974  		v.AddArg(x)
   975  		return true
   976  	}
   977  	// match: (And32 (Const32 [0]) _)
   978  	// cond:
   979  	// result: (Const32 [0])
   980  	for {
   981  		v_0 := v.Args[0]
   982  		if v_0.Op != OpConst32 {
   983  			break
   984  		}
   985  		if v_0.AuxInt != 0 {
   986  			break
   987  		}
   988  		v.reset(OpConst32)
   989  		v.AuxInt = 0
   990  		return true
   991  	}
   992  	// match: (And32 x (And32 x y))
   993  	// cond:
   994  	// result: (And32 x y)
   995  	for {
   996  		x := v.Args[0]
   997  		v_1 := v.Args[1]
   998  		if v_1.Op != OpAnd32 {
   999  			break
  1000  		}
  1001  		if x != v_1.Args[0] {
  1002  			break
  1003  		}
  1004  		y := v_1.Args[1]
  1005  		v.reset(OpAnd32)
  1006  		v.AddArg(x)
  1007  		v.AddArg(y)
  1008  		return true
  1009  	}
  1010  	// match: (And32 x (And32 y x))
  1011  	// cond:
  1012  	// result: (And32 x y)
  1013  	for {
  1014  		x := v.Args[0]
  1015  		v_1 := v.Args[1]
  1016  		if v_1.Op != OpAnd32 {
  1017  			break
  1018  		}
  1019  		y := v_1.Args[0]
  1020  		if x != v_1.Args[1] {
  1021  			break
  1022  		}
  1023  		v.reset(OpAnd32)
  1024  		v.AddArg(x)
  1025  		v.AddArg(y)
  1026  		return true
  1027  	}
  1028  	// match: (And32 (And32 x y) x)
  1029  	// cond:
  1030  	// result: (And32 x y)
  1031  	for {
  1032  		v_0 := v.Args[0]
  1033  		if v_0.Op != OpAnd32 {
  1034  			break
  1035  		}
  1036  		x := v_0.Args[0]
  1037  		y := v_0.Args[1]
  1038  		if x != v.Args[1] {
  1039  			break
  1040  		}
  1041  		v.reset(OpAnd32)
  1042  		v.AddArg(x)
  1043  		v.AddArg(y)
  1044  		return true
  1045  	}
  1046  	// match: (And32 (And32 x y) y)
  1047  	// cond:
  1048  	// result: (And32 x y)
  1049  	for {
  1050  		v_0 := v.Args[0]
  1051  		if v_0.Op != OpAnd32 {
  1052  			break
  1053  		}
  1054  		x := v_0.Args[0]
  1055  		y := v_0.Args[1]
  1056  		if y != v.Args[1] {
  1057  			break
  1058  		}
  1059  		v.reset(OpAnd32)
  1060  		v.AddArg(x)
  1061  		v.AddArg(y)
  1062  		return true
  1063  	}
  1064  	return false
  1065  }
  1066  func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool {
  1067  	b := v.Block
  1068  	_ = b
  1069  	// match: (And64 x (Const64 <t> [c]))
  1070  	// cond: x.Op != OpConst64
  1071  	// result: (And64 (Const64 <t> [c]) x)
  1072  	for {
  1073  		x := v.Args[0]
  1074  		v_1 := v.Args[1]
  1075  		if v_1.Op != OpConst64 {
  1076  			break
  1077  		}
  1078  		t := v_1.Type
  1079  		c := v_1.AuxInt
  1080  		if !(x.Op != OpConst64) {
  1081  			break
  1082  		}
  1083  		v.reset(OpAnd64)
  1084  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  1085  		v0.AuxInt = c
  1086  		v.AddArg(v0)
  1087  		v.AddArg(x)
  1088  		return true
  1089  	}
  1090  	// match: (And64 x x)
  1091  	// cond:
  1092  	// result: x
  1093  	for {
  1094  		x := v.Args[0]
  1095  		if x != v.Args[1] {
  1096  			break
  1097  		}
  1098  		v.reset(OpCopy)
  1099  		v.Type = x.Type
  1100  		v.AddArg(x)
  1101  		return true
  1102  	}
  1103  	// match: (And64 (Const64 [-1]) x)
  1104  	// cond:
  1105  	// result: x
  1106  	for {
  1107  		v_0 := v.Args[0]
  1108  		if v_0.Op != OpConst64 {
  1109  			break
  1110  		}
  1111  		if v_0.AuxInt != -1 {
  1112  			break
  1113  		}
  1114  		x := v.Args[1]
  1115  		v.reset(OpCopy)
  1116  		v.Type = x.Type
  1117  		v.AddArg(x)
  1118  		return true
  1119  	}
  1120  	// match: (And64 (Const64 [0]) _)
  1121  	// cond:
  1122  	// result: (Const64 [0])
  1123  	for {
  1124  		v_0 := v.Args[0]
  1125  		if v_0.Op != OpConst64 {
  1126  			break
  1127  		}
  1128  		if v_0.AuxInt != 0 {
  1129  			break
  1130  		}
  1131  		v.reset(OpConst64)
  1132  		v.AuxInt = 0
  1133  		return true
  1134  	}
  1135  	// match: (And64 x (And64 x y))
  1136  	// cond:
  1137  	// result: (And64 x y)
  1138  	for {
  1139  		x := v.Args[0]
  1140  		v_1 := v.Args[1]
  1141  		if v_1.Op != OpAnd64 {
  1142  			break
  1143  		}
  1144  		if x != v_1.Args[0] {
  1145  			break
  1146  		}
  1147  		y := v_1.Args[1]
  1148  		v.reset(OpAnd64)
  1149  		v.AddArg(x)
  1150  		v.AddArg(y)
  1151  		return true
  1152  	}
  1153  	// match: (And64 x (And64 y x))
  1154  	// cond:
  1155  	// result: (And64 x y)
  1156  	for {
  1157  		x := v.Args[0]
  1158  		v_1 := v.Args[1]
  1159  		if v_1.Op != OpAnd64 {
  1160  			break
  1161  		}
  1162  		y := v_1.Args[0]
  1163  		if x != v_1.Args[1] {
  1164  			break
  1165  		}
  1166  		v.reset(OpAnd64)
  1167  		v.AddArg(x)
  1168  		v.AddArg(y)
  1169  		return true
  1170  	}
  1171  	// match: (And64 (And64 x y) x)
  1172  	// cond:
  1173  	// result: (And64 x y)
  1174  	for {
  1175  		v_0 := v.Args[0]
  1176  		if v_0.Op != OpAnd64 {
  1177  			break
  1178  		}
  1179  		x := v_0.Args[0]
  1180  		y := v_0.Args[1]
  1181  		if x != v.Args[1] {
  1182  			break
  1183  		}
  1184  		v.reset(OpAnd64)
  1185  		v.AddArg(x)
  1186  		v.AddArg(y)
  1187  		return true
  1188  	}
  1189  	// match: (And64 (And64 x y) y)
  1190  	// cond:
  1191  	// result: (And64 x y)
  1192  	for {
  1193  		v_0 := v.Args[0]
  1194  		if v_0.Op != OpAnd64 {
  1195  			break
  1196  		}
  1197  		x := v_0.Args[0]
  1198  		y := v_0.Args[1]
  1199  		if y != v.Args[1] {
  1200  			break
  1201  		}
  1202  		v.reset(OpAnd64)
  1203  		v.AddArg(x)
  1204  		v.AddArg(y)
  1205  		return true
  1206  	}
  1207  	// match: (And64 <t> (Const64 [y]) x)
  1208  	// cond: nlz(y) + nto(y) == 64 && nto(y) >= 32
  1209  	// result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)]))
  1210  	for {
  1211  		t := v.Type
  1212  		v_0 := v.Args[0]
  1213  		if v_0.Op != OpConst64 {
  1214  			break
  1215  		}
  1216  		y := v_0.AuxInt
  1217  		x := v.Args[1]
  1218  		if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) {
  1219  			break
  1220  		}
  1221  		v.reset(OpRsh64Ux64)
  1222  		v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  1223  		v0.AddArg(x)
  1224  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  1225  		v1.AuxInt = nlz(y)
  1226  		v0.AddArg(v1)
  1227  		v.AddArg(v0)
  1228  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  1229  		v2.AuxInt = nlz(y)
  1230  		v.AddArg(v2)
  1231  		return true
  1232  	}
  1233  	// match: (And64 <t> (Const64 [y]) x)
  1234  	// cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32
  1235  	// result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)]))
  1236  	for {
  1237  		t := v.Type
  1238  		v_0 := v.Args[0]
  1239  		if v_0.Op != OpConst64 {
  1240  			break
  1241  		}
  1242  		y := v_0.AuxInt
  1243  		x := v.Args[1]
  1244  		if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) {
  1245  			break
  1246  		}
  1247  		v.reset(OpLsh64x64)
  1248  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  1249  		v0.AddArg(x)
  1250  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  1251  		v1.AuxInt = ntz(y)
  1252  		v0.AddArg(v1)
  1253  		v.AddArg(v0)
  1254  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  1255  		v2.AuxInt = ntz(y)
  1256  		v.AddArg(v2)
  1257  		return true
  1258  	}
  1259  	return false
  1260  }
  1261  func rewriteValuegeneric_OpAnd8(v *Value, config *Config) bool {
  1262  	b := v.Block
  1263  	_ = b
  1264  	// match: (And8  x (Const8  <t> [c]))
  1265  	// cond: x.Op != OpConst8
  1266  	// result: (And8  (Const8  <t> [c]) x)
  1267  	for {
  1268  		x := v.Args[0]
  1269  		v_1 := v.Args[1]
  1270  		if v_1.Op != OpConst8 {
  1271  			break
  1272  		}
  1273  		t := v_1.Type
  1274  		c := v_1.AuxInt
  1275  		if !(x.Op != OpConst8) {
  1276  			break
  1277  		}
  1278  		v.reset(OpAnd8)
  1279  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  1280  		v0.AuxInt = c
  1281  		v.AddArg(v0)
  1282  		v.AddArg(x)
  1283  		return true
  1284  	}
  1285  	// match: (And8  x x)
  1286  	// cond:
  1287  	// result: x
  1288  	for {
  1289  		x := v.Args[0]
  1290  		if x != v.Args[1] {
  1291  			break
  1292  		}
  1293  		v.reset(OpCopy)
  1294  		v.Type = x.Type
  1295  		v.AddArg(x)
  1296  		return true
  1297  	}
  1298  	// match: (And8  (Const8  [-1]) x)
  1299  	// cond:
  1300  	// result: x
  1301  	for {
  1302  		v_0 := v.Args[0]
  1303  		if v_0.Op != OpConst8 {
  1304  			break
  1305  		}
  1306  		if v_0.AuxInt != -1 {
  1307  			break
  1308  		}
  1309  		x := v.Args[1]
  1310  		v.reset(OpCopy)
  1311  		v.Type = x.Type
  1312  		v.AddArg(x)
  1313  		return true
  1314  	}
  1315  	// match: (And8  (Const8  [0]) _)
  1316  	// cond:
  1317  	// result: (Const8  [0])
  1318  	for {
  1319  		v_0 := v.Args[0]
  1320  		if v_0.Op != OpConst8 {
  1321  			break
  1322  		}
  1323  		if v_0.AuxInt != 0 {
  1324  			break
  1325  		}
  1326  		v.reset(OpConst8)
  1327  		v.AuxInt = 0
  1328  		return true
  1329  	}
  1330  	// match: (And8  x (And8  x y))
  1331  	// cond:
  1332  	// result: (And8  x y)
  1333  	for {
  1334  		x := v.Args[0]
  1335  		v_1 := v.Args[1]
  1336  		if v_1.Op != OpAnd8 {
  1337  			break
  1338  		}
  1339  		if x != v_1.Args[0] {
  1340  			break
  1341  		}
  1342  		y := v_1.Args[1]
  1343  		v.reset(OpAnd8)
  1344  		v.AddArg(x)
  1345  		v.AddArg(y)
  1346  		return true
  1347  	}
  1348  	// match: (And8  x (And8  y x))
  1349  	// cond:
  1350  	// result: (And8  x y)
  1351  	for {
  1352  		x := v.Args[0]
  1353  		v_1 := v.Args[1]
  1354  		if v_1.Op != OpAnd8 {
  1355  			break
  1356  		}
  1357  		y := v_1.Args[0]
  1358  		if x != v_1.Args[1] {
  1359  			break
  1360  		}
  1361  		v.reset(OpAnd8)
  1362  		v.AddArg(x)
  1363  		v.AddArg(y)
  1364  		return true
  1365  	}
  1366  	// match: (And8  (And8  x y) x)
  1367  	// cond:
  1368  	// result: (And8  x y)
  1369  	for {
  1370  		v_0 := v.Args[0]
  1371  		if v_0.Op != OpAnd8 {
  1372  			break
  1373  		}
  1374  		x := v_0.Args[0]
  1375  		y := v_0.Args[1]
  1376  		if x != v.Args[1] {
  1377  			break
  1378  		}
  1379  		v.reset(OpAnd8)
  1380  		v.AddArg(x)
  1381  		v.AddArg(y)
  1382  		return true
  1383  	}
  1384  	// match: (And8  (And8  x y) y)
  1385  	// cond:
  1386  	// result: (And8  x y)
  1387  	for {
  1388  		v_0 := v.Args[0]
  1389  		if v_0.Op != OpAnd8 {
  1390  			break
  1391  		}
  1392  		x := v_0.Args[0]
  1393  		y := v_0.Args[1]
  1394  		if y != v.Args[1] {
  1395  			break
  1396  		}
  1397  		v.reset(OpAnd8)
  1398  		v.AddArg(x)
  1399  		v.AddArg(y)
  1400  		return true
  1401  	}
  1402  	return false
  1403  }
  1404  func rewriteValuegeneric_OpArg(v *Value, config *Config) bool {
  1405  	b := v.Block
  1406  	_ = b
  1407  	// match: (Arg {n} [off])
  1408  	// cond: v.Type.IsString()
  1409  	// result: (StringMake     (Arg <config.fe.TypeBytePtr()> {n} [off])     (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]))
  1410  	for {
  1411  		off := v.AuxInt
  1412  		n := v.Aux
  1413  		if !(v.Type.IsString()) {
  1414  			break
  1415  		}
  1416  		v.reset(OpStringMake)
  1417  		v0 := b.NewValue0(v.Pos, OpArg, config.fe.TypeBytePtr())
  1418  		v0.AuxInt = off
  1419  		v0.Aux = n
  1420  		v.AddArg(v0)
  1421  		v1 := b.NewValue0(v.Pos, OpArg, config.fe.TypeInt())
  1422  		v1.AuxInt = off + config.PtrSize
  1423  		v1.Aux = n
  1424  		v.AddArg(v1)
  1425  		return true
  1426  	}
  1427  	// match: (Arg {n} [off])
  1428  	// cond: v.Type.IsSlice()
  1429  	// result: (SliceMake     (Arg <v.Type.ElemType().PtrTo()> {n} [off])     (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])     (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize]))
  1430  	for {
  1431  		off := v.AuxInt
  1432  		n := v.Aux
  1433  		if !(v.Type.IsSlice()) {
  1434  			break
  1435  		}
  1436  		v.reset(OpSliceMake)
  1437  		v0 := b.NewValue0(v.Pos, OpArg, v.Type.ElemType().PtrTo())
  1438  		v0.AuxInt = off
  1439  		v0.Aux = n
  1440  		v.AddArg(v0)
  1441  		v1 := b.NewValue0(v.Pos, OpArg, config.fe.TypeInt())
  1442  		v1.AuxInt = off + config.PtrSize
  1443  		v1.Aux = n
  1444  		v.AddArg(v1)
  1445  		v2 := b.NewValue0(v.Pos, OpArg, config.fe.TypeInt())
  1446  		v2.AuxInt = off + 2*config.PtrSize
  1447  		v2.Aux = n
  1448  		v.AddArg(v2)
  1449  		return true
  1450  	}
  1451  	// match: (Arg {n} [off])
  1452  	// cond: v.Type.IsInterface()
  1453  	// result: (IMake     (Arg <config.fe.TypeBytePtr()> {n} [off])     (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize]))
  1454  	for {
  1455  		off := v.AuxInt
  1456  		n := v.Aux
  1457  		if !(v.Type.IsInterface()) {
  1458  			break
  1459  		}
  1460  		v.reset(OpIMake)
  1461  		v0 := b.NewValue0(v.Pos, OpArg, config.fe.TypeBytePtr())
  1462  		v0.AuxInt = off
  1463  		v0.Aux = n
  1464  		v.AddArg(v0)
  1465  		v1 := b.NewValue0(v.Pos, OpArg, config.fe.TypeBytePtr())
  1466  		v1.AuxInt = off + config.PtrSize
  1467  		v1.Aux = n
  1468  		v.AddArg(v1)
  1469  		return true
  1470  	}
  1471  	// match: (Arg {n} [off])
  1472  	// cond: v.Type.IsComplex() && v.Type.Size() == 16
  1473  	// result: (ComplexMake     (Arg <config.fe.TypeFloat64()> {n} [off])     (Arg <config.fe.TypeFloat64()> {n} [off+8]))
  1474  	for {
  1475  		off := v.AuxInt
  1476  		n := v.Aux
  1477  		if !(v.Type.IsComplex() && v.Type.Size() == 16) {
  1478  			break
  1479  		}
  1480  		v.reset(OpComplexMake)
  1481  		v0 := b.NewValue0(v.Pos, OpArg, config.fe.TypeFloat64())
  1482  		v0.AuxInt = off
  1483  		v0.Aux = n
  1484  		v.AddArg(v0)
  1485  		v1 := b.NewValue0(v.Pos, OpArg, config.fe.TypeFloat64())
  1486  		v1.AuxInt = off + 8
  1487  		v1.Aux = n
  1488  		v.AddArg(v1)
  1489  		return true
  1490  	}
  1491  	// match: (Arg {n} [off])
  1492  	// cond: v.Type.IsComplex() && v.Type.Size() == 8
  1493  	// result: (ComplexMake     (Arg <config.fe.TypeFloat32()> {n} [off])     (Arg <config.fe.TypeFloat32()> {n} [off+4]))
  1494  	for {
  1495  		off := v.AuxInt
  1496  		n := v.Aux
  1497  		if !(v.Type.IsComplex() && v.Type.Size() == 8) {
  1498  			break
  1499  		}
  1500  		v.reset(OpComplexMake)
  1501  		v0 := b.NewValue0(v.Pos, OpArg, config.fe.TypeFloat32())
  1502  		v0.AuxInt = off
  1503  		v0.Aux = n
  1504  		v.AddArg(v0)
  1505  		v1 := b.NewValue0(v.Pos, OpArg, config.fe.TypeFloat32())
  1506  		v1.AuxInt = off + 4
  1507  		v1.Aux = n
  1508  		v.AddArg(v1)
  1509  		return true
  1510  	}
  1511  	// match: (Arg <t>)
  1512  	// cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)
  1513  	// result: (StructMake0)
  1514  	for {
  1515  		t := v.Type
  1516  		if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) {
  1517  			break
  1518  		}
  1519  		v.reset(OpStructMake0)
  1520  		return true
  1521  	}
  1522  	// match: (Arg <t> {n} [off])
  1523  	// cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)
  1524  	// result: (StructMake1     (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]))
  1525  	for {
  1526  		t := v.Type
  1527  		off := v.AuxInt
  1528  		n := v.Aux
  1529  		if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) {
  1530  			break
  1531  		}
  1532  		v.reset(OpStructMake1)
  1533  		v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
  1534  		v0.AuxInt = off + t.FieldOff(0)
  1535  		v0.Aux = n
  1536  		v.AddArg(v0)
  1537  		return true
  1538  	}
  1539  	// match: (Arg <t> {n} [off])
  1540  	// cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)
  1541  	// result: (StructMake2     (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])     (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]))
  1542  	for {
  1543  		t := v.Type
  1544  		off := v.AuxInt
  1545  		n := v.Aux
  1546  		if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) {
  1547  			break
  1548  		}
  1549  		v.reset(OpStructMake2)
  1550  		v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
  1551  		v0.AuxInt = off + t.FieldOff(0)
  1552  		v0.Aux = n
  1553  		v.AddArg(v0)
  1554  		v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1))
  1555  		v1.AuxInt = off + t.FieldOff(1)
  1556  		v1.Aux = n
  1557  		v.AddArg(v1)
  1558  		return true
  1559  	}
  1560  	// match: (Arg <t> {n} [off])
  1561  	// cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)
  1562  	// result: (StructMake3     (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])     (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])     (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]))
  1563  	for {
  1564  		t := v.Type
  1565  		off := v.AuxInt
  1566  		n := v.Aux
  1567  		if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) {
  1568  			break
  1569  		}
  1570  		v.reset(OpStructMake3)
  1571  		v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
  1572  		v0.AuxInt = off + t.FieldOff(0)
  1573  		v0.Aux = n
  1574  		v.AddArg(v0)
  1575  		v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1))
  1576  		v1.AuxInt = off + t.FieldOff(1)
  1577  		v1.Aux = n
  1578  		v.AddArg(v1)
  1579  		v2 := b.NewValue0(v.Pos, OpArg, t.FieldType(2))
  1580  		v2.AuxInt = off + t.FieldOff(2)
  1581  		v2.Aux = n
  1582  		v.AddArg(v2)
  1583  		return true
  1584  	}
  1585  	// match: (Arg <t> {n} [off])
  1586  	// cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)
  1587  	// result: (StructMake4     (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])     (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])     (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])     (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)]))
  1588  	for {
  1589  		t := v.Type
  1590  		off := v.AuxInt
  1591  		n := v.Aux
  1592  		if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) {
  1593  			break
  1594  		}
  1595  		v.reset(OpStructMake4)
  1596  		v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
  1597  		v0.AuxInt = off + t.FieldOff(0)
  1598  		v0.Aux = n
  1599  		v.AddArg(v0)
  1600  		v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1))
  1601  		v1.AuxInt = off + t.FieldOff(1)
  1602  		v1.Aux = n
  1603  		v.AddArg(v1)
  1604  		v2 := b.NewValue0(v.Pos, OpArg, t.FieldType(2))
  1605  		v2.AuxInt = off + t.FieldOff(2)
  1606  		v2.Aux = n
  1607  		v.AddArg(v2)
  1608  		v3 := b.NewValue0(v.Pos, OpArg, t.FieldType(3))
  1609  		v3.AuxInt = off + t.FieldOff(3)
  1610  		v3.Aux = n
  1611  		v.AddArg(v3)
  1612  		return true
  1613  	}
  1614  	// match: (Arg <t>)
  1615  	// cond: t.IsArray() && t.NumElem() == 0
  1616  	// result: (ArrayMake0)
  1617  	for {
  1618  		t := v.Type
  1619  		if !(t.IsArray() && t.NumElem() == 0) {
  1620  			break
  1621  		}
  1622  		v.reset(OpArrayMake0)
  1623  		return true
  1624  	}
  1625  	// match: (Arg <t> {n} [off])
  1626  	// cond: t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)
  1627  	// result: (ArrayMake1 (Arg <t.ElemType()> {n} [off]))
  1628  	for {
  1629  		t := v.Type
  1630  		off := v.AuxInt
  1631  		n := v.Aux
  1632  		if !(t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)) {
  1633  			break
  1634  		}
  1635  		v.reset(OpArrayMake1)
  1636  		v0 := b.NewValue0(v.Pos, OpArg, t.ElemType())
  1637  		v0.AuxInt = off
  1638  		v0.Aux = n
  1639  		v.AddArg(v0)
  1640  		return true
  1641  	}
  1642  	return false
  1643  }
  1644  func rewriteValuegeneric_OpArraySelect(v *Value, config *Config) bool {
  1645  	b := v.Block
  1646  	_ = b
  1647  	// match: (ArraySelect (ArrayMake1 x))
  1648  	// cond:
  1649  	// result: x
  1650  	for {
  1651  		v_0 := v.Args[0]
  1652  		if v_0.Op != OpArrayMake1 {
  1653  			break
  1654  		}
  1655  		x := v_0.Args[0]
  1656  		v.reset(OpCopy)
  1657  		v.Type = x.Type
  1658  		v.AddArg(x)
  1659  		return true
  1660  	}
  1661  	// match: (ArraySelect [0] (Load ptr mem))
  1662  	// cond:
  1663  	// result: (Load ptr mem)
  1664  	for {
  1665  		if v.AuxInt != 0 {
  1666  			break
  1667  		}
  1668  		v_0 := v.Args[0]
  1669  		if v_0.Op != OpLoad {
  1670  			break
  1671  		}
  1672  		ptr := v_0.Args[0]
  1673  		mem := v_0.Args[1]
  1674  		v.reset(OpLoad)
  1675  		v.AddArg(ptr)
  1676  		v.AddArg(mem)
  1677  		return true
  1678  	}
  1679  	// match: (ArraySelect [0] x:(IData _))
  1680  	// cond:
  1681  	// result: x
  1682  	for {
  1683  		if v.AuxInt != 0 {
  1684  			break
  1685  		}
  1686  		x := v.Args[0]
  1687  		if x.Op != OpIData {
  1688  			break
  1689  		}
  1690  		v.reset(OpCopy)
  1691  		v.Type = x.Type
  1692  		v.AddArg(x)
  1693  		return true
  1694  	}
  1695  	return false
  1696  }
  1697  func rewriteValuegeneric_OpCom16(v *Value, config *Config) bool {
  1698  	b := v.Block
  1699  	_ = b
  1700  	// match: (Com16 (Com16 x))
  1701  	// cond:
  1702  	// result: x
  1703  	for {
  1704  		v_0 := v.Args[0]
  1705  		if v_0.Op != OpCom16 {
  1706  			break
  1707  		}
  1708  		x := v_0.Args[0]
  1709  		v.reset(OpCopy)
  1710  		v.Type = x.Type
  1711  		v.AddArg(x)
  1712  		return true
  1713  	}
  1714  	return false
  1715  }
  1716  func rewriteValuegeneric_OpCom32(v *Value, config *Config) bool {
  1717  	b := v.Block
  1718  	_ = b
  1719  	// match: (Com32 (Com32 x))
  1720  	// cond:
  1721  	// result: x
  1722  	for {
  1723  		v_0 := v.Args[0]
  1724  		if v_0.Op != OpCom32 {
  1725  			break
  1726  		}
  1727  		x := v_0.Args[0]
  1728  		v.reset(OpCopy)
  1729  		v.Type = x.Type
  1730  		v.AddArg(x)
  1731  		return true
  1732  	}
  1733  	return false
  1734  }
  1735  func rewriteValuegeneric_OpCom64(v *Value, config *Config) bool {
  1736  	b := v.Block
  1737  	_ = b
  1738  	// match: (Com64 (Com64 x))
  1739  	// cond:
  1740  	// result: x
  1741  	for {
  1742  		v_0 := v.Args[0]
  1743  		if v_0.Op != OpCom64 {
  1744  			break
  1745  		}
  1746  		x := v_0.Args[0]
  1747  		v.reset(OpCopy)
  1748  		v.Type = x.Type
  1749  		v.AddArg(x)
  1750  		return true
  1751  	}
  1752  	return false
  1753  }
  1754  func rewriteValuegeneric_OpCom8(v *Value, config *Config) bool {
  1755  	b := v.Block
  1756  	_ = b
  1757  	// match: (Com8  (Com8  x))
  1758  	// cond:
  1759  	// result: x
  1760  	for {
  1761  		v_0 := v.Args[0]
  1762  		if v_0.Op != OpCom8 {
  1763  			break
  1764  		}
  1765  		x := v_0.Args[0]
  1766  		v.reset(OpCopy)
  1767  		v.Type = x.Type
  1768  		v.AddArg(x)
  1769  		return true
  1770  	}
  1771  	return false
  1772  }
  1773  func rewriteValuegeneric_OpConstInterface(v *Value, config *Config) bool {
  1774  	b := v.Block
  1775  	_ = b
  1776  	// match: (ConstInterface)
  1777  	// cond:
  1778  	// result: (IMake     (ConstNil <config.fe.TypeBytePtr()>)     (ConstNil <config.fe.TypeBytePtr()>))
  1779  	for {
  1780  		v.reset(OpIMake)
  1781  		v0 := b.NewValue0(v.Pos, OpConstNil, config.fe.TypeBytePtr())
  1782  		v.AddArg(v0)
  1783  		v1 := b.NewValue0(v.Pos, OpConstNil, config.fe.TypeBytePtr())
  1784  		v.AddArg(v1)
  1785  		return true
  1786  	}
  1787  }
  1788  func rewriteValuegeneric_OpConstSlice(v *Value, config *Config) bool {
  1789  	b := v.Block
  1790  	_ = b
  1791  	// match: (ConstSlice)
  1792  	// cond: config.PtrSize == 4
  1793  	// result: (SliceMake     (ConstNil <v.Type.ElemType().PtrTo()>)     (Const32 <config.fe.TypeInt()> [0])     (Const32 <config.fe.TypeInt()> [0]))
  1794  	for {
  1795  		if !(config.PtrSize == 4) {
  1796  			break
  1797  		}
  1798  		v.reset(OpSliceMake)
  1799  		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.ElemType().PtrTo())
  1800  		v.AddArg(v0)
  1801  		v1 := b.NewValue0(v.Pos, OpConst32, config.fe.TypeInt())
  1802  		v1.AuxInt = 0
  1803  		v.AddArg(v1)
  1804  		v2 := b.NewValue0(v.Pos, OpConst32, config.fe.TypeInt())
  1805  		v2.AuxInt = 0
  1806  		v.AddArg(v2)
  1807  		return true
  1808  	}
  1809  	// match: (ConstSlice)
  1810  	// cond: config.PtrSize == 8
  1811  	// result: (SliceMake     (ConstNil <v.Type.ElemType().PtrTo()>)     (Const64 <config.fe.TypeInt()> [0])     (Const64 <config.fe.TypeInt()> [0]))
  1812  	for {
  1813  		if !(config.PtrSize == 8) {
  1814  			break
  1815  		}
  1816  		v.reset(OpSliceMake)
  1817  		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.ElemType().PtrTo())
  1818  		v.AddArg(v0)
  1819  		v1 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeInt())
  1820  		v1.AuxInt = 0
  1821  		v.AddArg(v1)
  1822  		v2 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeInt())
  1823  		v2.AuxInt = 0
  1824  		v.AddArg(v2)
  1825  		return true
  1826  	}
  1827  	return false
  1828  }
  1829  func rewriteValuegeneric_OpConstString(v *Value, config *Config) bool {
  1830  	b := v.Block
  1831  	_ = b
  1832  	// match: (ConstString {s})
  1833  	// cond: config.PtrSize == 4 && s.(string) == ""
  1834  	// result: (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0]))
  1835  	for {
  1836  		s := v.Aux
  1837  		if !(config.PtrSize == 4 && s.(string) == "") {
  1838  			break
  1839  		}
  1840  		v.reset(OpStringMake)
  1841  		v0 := b.NewValue0(v.Pos, OpConstNil, config.fe.TypeBytePtr())
  1842  		v.AddArg(v0)
  1843  		v1 := b.NewValue0(v.Pos, OpConst32, config.fe.TypeInt())
  1844  		v1.AuxInt = 0
  1845  		v.AddArg(v1)
  1846  		return true
  1847  	}
  1848  	// match: (ConstString {s})
  1849  	// cond: config.PtrSize == 8 && s.(string) == ""
  1850  	// result: (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0]))
  1851  	for {
  1852  		s := v.Aux
  1853  		if !(config.PtrSize == 8 && s.(string) == "") {
  1854  			break
  1855  		}
  1856  		v.reset(OpStringMake)
  1857  		v0 := b.NewValue0(v.Pos, OpConstNil, config.fe.TypeBytePtr())
  1858  		v.AddArg(v0)
  1859  		v1 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeInt())
  1860  		v1.AuxInt = 0
  1861  		v.AddArg(v1)
  1862  		return true
  1863  	}
  1864  	// match: (ConstString {s})
  1865  	// cond: config.PtrSize == 4 && s.(string) != ""
  1866  	// result: (StringMake     (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))}       (SB))     (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))]))
  1867  	for {
  1868  		s := v.Aux
  1869  		if !(config.PtrSize == 4 && s.(string) != "") {
  1870  			break
  1871  		}
  1872  		v.reset(OpStringMake)
  1873  		v0 := b.NewValue0(v.Pos, OpAddr, config.fe.TypeBytePtr())
  1874  		v0.Aux = config.fe.StringData(s.(string))
  1875  		v1 := b.NewValue0(v.Pos, OpSB, config.fe.TypeUintptr())
  1876  		v0.AddArg(v1)
  1877  		v.AddArg(v0)
  1878  		v2 := b.NewValue0(v.Pos, OpConst32, config.fe.TypeInt())
  1879  		v2.AuxInt = int64(len(s.(string)))
  1880  		v.AddArg(v2)
  1881  		return true
  1882  	}
  1883  	// match: (ConstString {s})
  1884  	// cond: config.PtrSize == 8 && s.(string) != ""
  1885  	// result: (StringMake     (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))}       (SB))     (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))]))
  1886  	for {
  1887  		s := v.Aux
  1888  		if !(config.PtrSize == 8 && s.(string) != "") {
  1889  			break
  1890  		}
  1891  		v.reset(OpStringMake)
  1892  		v0 := b.NewValue0(v.Pos, OpAddr, config.fe.TypeBytePtr())
  1893  		v0.Aux = config.fe.StringData(s.(string))
  1894  		v1 := b.NewValue0(v.Pos, OpSB, config.fe.TypeUintptr())
  1895  		v0.AddArg(v1)
  1896  		v.AddArg(v0)
  1897  		v2 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeInt())
  1898  		v2.AuxInt = int64(len(s.(string)))
  1899  		v.AddArg(v2)
  1900  		return true
  1901  	}
  1902  	return false
  1903  }
  1904  func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool {
  1905  	b := v.Block
  1906  	_ = b
  1907  	// match: (Convert (Add64 (Convert ptr mem) off) mem)
  1908  	// cond:
  1909  	// result: (Add64 ptr off)
  1910  	for {
  1911  		v_0 := v.Args[0]
  1912  		if v_0.Op != OpAdd64 {
  1913  			break
  1914  		}
  1915  		v_0_0 := v_0.Args[0]
  1916  		if v_0_0.Op != OpConvert {
  1917  			break
  1918  		}
  1919  		ptr := v_0_0.Args[0]
  1920  		mem := v_0_0.Args[1]
  1921  		off := v_0.Args[1]
  1922  		if mem != v.Args[1] {
  1923  			break
  1924  		}
  1925  		v.reset(OpAdd64)
  1926  		v.AddArg(ptr)
  1927  		v.AddArg(off)
  1928  		return true
  1929  	}
  1930  	// match: (Convert (Add64 off (Convert ptr mem)) mem)
  1931  	// cond:
  1932  	// result: (Add64 ptr off)
  1933  	for {
  1934  		v_0 := v.Args[0]
  1935  		if v_0.Op != OpAdd64 {
  1936  			break
  1937  		}
  1938  		off := v_0.Args[0]
  1939  		v_0_1 := v_0.Args[1]
  1940  		if v_0_1.Op != OpConvert {
  1941  			break
  1942  		}
  1943  		ptr := v_0_1.Args[0]
  1944  		mem := v_0_1.Args[1]
  1945  		if mem != v.Args[1] {
  1946  			break
  1947  		}
  1948  		v.reset(OpAdd64)
  1949  		v.AddArg(ptr)
  1950  		v.AddArg(off)
  1951  		return true
  1952  	}
  1953  	// match: (Convert (Convert ptr mem) mem)
  1954  	// cond:
  1955  	// result: ptr
  1956  	for {
  1957  		v_0 := v.Args[0]
  1958  		if v_0.Op != OpConvert {
  1959  			break
  1960  		}
  1961  		ptr := v_0.Args[0]
  1962  		mem := v_0.Args[1]
  1963  		if mem != v.Args[1] {
  1964  			break
  1965  		}
  1966  		v.reset(OpCopy)
  1967  		v.Type = ptr.Type
  1968  		v.AddArg(ptr)
  1969  		return true
  1970  	}
  1971  	return false
  1972  }
  1973  func rewriteValuegeneric_OpCvt32Fto64F(v *Value, config *Config) bool {
  1974  	b := v.Block
  1975  	_ = b
  1976  	// match: (Cvt32Fto64F (Const32F [c]))
  1977  	// cond:
  1978  	// result: (Const64F [c])
  1979  	for {
  1980  		v_0 := v.Args[0]
  1981  		if v_0.Op != OpConst32F {
  1982  			break
  1983  		}
  1984  		c := v_0.AuxInt
  1985  		v.reset(OpConst64F)
  1986  		v.AuxInt = c
  1987  		return true
  1988  	}
  1989  	return false
  1990  }
  1991  func rewriteValuegeneric_OpCvt64Fto32F(v *Value, config *Config) bool {
  1992  	b := v.Block
  1993  	_ = b
  1994  	// match: (Cvt64Fto32F (Const64F [c]))
  1995  	// cond:
  1996  	// result: (Const32F [f2i(float64(i2f32(c)))])
  1997  	for {
  1998  		v_0 := v.Args[0]
  1999  		if v_0.Op != OpConst64F {
  2000  			break
  2001  		}
  2002  		c := v_0.AuxInt
  2003  		v.reset(OpConst32F)
  2004  		v.AuxInt = f2i(float64(i2f32(c)))
  2005  		return true
  2006  	}
  2007  	return false
  2008  }
  2009  func rewriteValuegeneric_OpDiv32F(v *Value, config *Config) bool {
  2010  	b := v.Block
  2011  	_ = b
  2012  	// match: (Div32F x (Const32F [f2i(1)]))
  2013  	// cond:
  2014  	// result: x
  2015  	for {
  2016  		x := v.Args[0]
  2017  		v_1 := v.Args[1]
  2018  		if v_1.Op != OpConst32F {
  2019  			break
  2020  		}
  2021  		if v_1.AuxInt != f2i(1) {
  2022  			break
  2023  		}
  2024  		v.reset(OpCopy)
  2025  		v.Type = x.Type
  2026  		v.AddArg(x)
  2027  		return true
  2028  	}
  2029  	// match: (Div32F x (Const32F [f2i(-1)]))
  2030  	// cond:
  2031  	// result: (Neg32F x)
  2032  	for {
  2033  		x := v.Args[0]
  2034  		v_1 := v.Args[1]
  2035  		if v_1.Op != OpConst32F {
  2036  			break
  2037  		}
  2038  		if v_1.AuxInt != f2i(-1) {
  2039  			break
  2040  		}
  2041  		v.reset(OpNeg32F)
  2042  		v.AddArg(x)
  2043  		return true
  2044  	}
  2045  	return false
  2046  }
  2047  func rewriteValuegeneric_OpDiv64(v *Value, config *Config) bool {
  2048  	b := v.Block
  2049  	_ = b
  2050  	// match: (Div64 <t> x (Const64 [c]))
  2051  	// cond: c > 0 && smagic64ok(c) && smagic64m(c) > 0
  2052  	// result: (Sub64 <t>     (Rsh64x64 <t>       (Hmul64 <t>         (Const64 <t> [smagic64m(c)])         x)       (Const64 <t> [smagic64s(c)]))     (Rsh64x64 <t>       x       (Const64 <t> [63])))
  2053  	for {
  2054  		t := v.Type
  2055  		x := v.Args[0]
  2056  		v_1 := v.Args[1]
  2057  		if v_1.Op != OpConst64 {
  2058  			break
  2059  		}
  2060  		c := v_1.AuxInt
  2061  		if !(c > 0 && smagic64ok(c) && smagic64m(c) > 0) {
  2062  			break
  2063  		}
  2064  		v.reset(OpSub64)
  2065  		v.Type = t
  2066  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2067  		v1 := b.NewValue0(v.Pos, OpHmul64, t)
  2068  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  2069  		v2.AuxInt = smagic64m(c)
  2070  		v1.AddArg(v2)
  2071  		v1.AddArg(x)
  2072  		v0.AddArg(v1)
  2073  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  2074  		v3.AuxInt = smagic64s(c)
  2075  		v0.AddArg(v3)
  2076  		v.AddArg(v0)
  2077  		v4 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2078  		v4.AddArg(x)
  2079  		v5 := b.NewValue0(v.Pos, OpConst64, t)
  2080  		v5.AuxInt = 63
  2081  		v4.AddArg(v5)
  2082  		v.AddArg(v4)
  2083  		return true
  2084  	}
  2085  	// match: (Div64 <t> x (Const64 [c]))
  2086  	// cond: c > 0 && smagic64ok(c) && smagic64m(c) < 0
  2087  	// result: (Sub64 <t>     (Rsh64x64 <t>       (Add64 <t>         (Hmul64 <t>           (Const64 <t> [smagic64m(c)])           x)         x)       (Const64 <t> [smagic64s(c)]))     (Rsh64x64 <t>       x       (Const64 <t> [63])))
  2088  	for {
  2089  		t := v.Type
  2090  		x := v.Args[0]
  2091  		v_1 := v.Args[1]
  2092  		if v_1.Op != OpConst64 {
  2093  			break
  2094  		}
  2095  		c := v_1.AuxInt
  2096  		if !(c > 0 && smagic64ok(c) && smagic64m(c) < 0) {
  2097  			break
  2098  		}
  2099  		v.reset(OpSub64)
  2100  		v.Type = t
  2101  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2102  		v1 := b.NewValue0(v.Pos, OpAdd64, t)
  2103  		v2 := b.NewValue0(v.Pos, OpHmul64, t)
  2104  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  2105  		v3.AuxInt = smagic64m(c)
  2106  		v2.AddArg(v3)
  2107  		v2.AddArg(x)
  2108  		v1.AddArg(v2)
  2109  		v1.AddArg(x)
  2110  		v0.AddArg(v1)
  2111  		v4 := b.NewValue0(v.Pos, OpConst64, t)
  2112  		v4.AuxInt = smagic64s(c)
  2113  		v0.AddArg(v4)
  2114  		v.AddArg(v0)
  2115  		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2116  		v5.AddArg(x)
  2117  		v6 := b.NewValue0(v.Pos, OpConst64, t)
  2118  		v6.AuxInt = 63
  2119  		v5.AddArg(v6)
  2120  		v.AddArg(v5)
  2121  		return true
  2122  	}
  2123  	// match: (Div64 <t> x (Const64 [c]))
  2124  	// cond: c < 0 && smagic64ok(c) && smagic64m(c) > 0
  2125  	// result: (Neg64 <t>     (Sub64 <t>       (Rsh64x64 <t>         (Hmul64 <t>           (Const64 <t> [smagic64m(c)])           x)         (Const64 <t> [smagic64s(c)]))       (Rsh64x64 <t>         x         (Const64 <t> [63]))))
  2126  	for {
  2127  		t := v.Type
  2128  		x := v.Args[0]
  2129  		v_1 := v.Args[1]
  2130  		if v_1.Op != OpConst64 {
  2131  			break
  2132  		}
  2133  		c := v_1.AuxInt
  2134  		if !(c < 0 && smagic64ok(c) && smagic64m(c) > 0) {
  2135  			break
  2136  		}
  2137  		v.reset(OpNeg64)
  2138  		v.Type = t
  2139  		v0 := b.NewValue0(v.Pos, OpSub64, t)
  2140  		v1 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2141  		v2 := b.NewValue0(v.Pos, OpHmul64, t)
  2142  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  2143  		v3.AuxInt = smagic64m(c)
  2144  		v2.AddArg(v3)
  2145  		v2.AddArg(x)
  2146  		v1.AddArg(v2)
  2147  		v4 := b.NewValue0(v.Pos, OpConst64, t)
  2148  		v4.AuxInt = smagic64s(c)
  2149  		v1.AddArg(v4)
  2150  		v0.AddArg(v1)
  2151  		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2152  		v5.AddArg(x)
  2153  		v6 := b.NewValue0(v.Pos, OpConst64, t)
  2154  		v6.AuxInt = 63
  2155  		v5.AddArg(v6)
  2156  		v0.AddArg(v5)
  2157  		v.AddArg(v0)
  2158  		return true
  2159  	}
  2160  	// match: (Div64 <t> x (Const64 [c]))
  2161  	// cond: c < 0 && smagic64ok(c) && smagic64m(c) < 0
  2162  	// result: (Neg64 <t>     (Sub64 <t>       (Rsh64x64 <t>         (Add64 <t>           (Hmul64 <t>             (Const64 <t> [smagic64m(c)])             x)           x)         (Const64 <t> [smagic64s(c)]))       (Rsh64x64 <t>         x         (Const64 <t> [63]))))
  2163  	for {
  2164  		t := v.Type
  2165  		x := v.Args[0]
  2166  		v_1 := v.Args[1]
  2167  		if v_1.Op != OpConst64 {
  2168  			break
  2169  		}
  2170  		c := v_1.AuxInt
  2171  		if !(c < 0 && smagic64ok(c) && smagic64m(c) < 0) {
  2172  			break
  2173  		}
  2174  		v.reset(OpNeg64)
  2175  		v.Type = t
  2176  		v0 := b.NewValue0(v.Pos, OpSub64, t)
  2177  		v1 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2178  		v2 := b.NewValue0(v.Pos, OpAdd64, t)
  2179  		v3 := b.NewValue0(v.Pos, OpHmul64, t)
  2180  		v4 := b.NewValue0(v.Pos, OpConst64, t)
  2181  		v4.AuxInt = smagic64m(c)
  2182  		v3.AddArg(v4)
  2183  		v3.AddArg(x)
  2184  		v2.AddArg(v3)
  2185  		v2.AddArg(x)
  2186  		v1.AddArg(v2)
  2187  		v5 := b.NewValue0(v.Pos, OpConst64, t)
  2188  		v5.AuxInt = smagic64s(c)
  2189  		v1.AddArg(v5)
  2190  		v0.AddArg(v1)
  2191  		v6 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2192  		v6.AddArg(x)
  2193  		v7 := b.NewValue0(v.Pos, OpConst64, t)
  2194  		v7.AuxInt = 63
  2195  		v6.AddArg(v7)
  2196  		v0.AddArg(v6)
  2197  		v.AddArg(v0)
  2198  		return true
  2199  	}
  2200  	return false
  2201  }
  2202  func rewriteValuegeneric_OpDiv64F(v *Value, config *Config) bool {
  2203  	b := v.Block
  2204  	_ = b
  2205  	// match: (Div64F x (Const64F [f2i(1)]))
  2206  	// cond:
  2207  	// result: x
  2208  	for {
  2209  		x := v.Args[0]
  2210  		v_1 := v.Args[1]
  2211  		if v_1.Op != OpConst64F {
  2212  			break
  2213  		}
  2214  		if v_1.AuxInt != f2i(1) {
  2215  			break
  2216  		}
  2217  		v.reset(OpCopy)
  2218  		v.Type = x.Type
  2219  		v.AddArg(x)
  2220  		return true
  2221  	}
  2222  	// match: (Div64F x (Const64F [f2i(-1)]))
  2223  	// cond:
  2224  	// result: (Neg32F x)
  2225  	for {
  2226  		x := v.Args[0]
  2227  		v_1 := v.Args[1]
  2228  		if v_1.Op != OpConst64F {
  2229  			break
  2230  		}
  2231  		if v_1.AuxInt != f2i(-1) {
  2232  			break
  2233  		}
  2234  		v.reset(OpNeg32F)
  2235  		v.AddArg(x)
  2236  		return true
  2237  	}
  2238  	return false
  2239  }
  2240  func rewriteValuegeneric_OpDiv64u(v *Value, config *Config) bool {
  2241  	b := v.Block
  2242  	_ = b
  2243  	// match: (Div64u <t> n (Const64 [c]))
  2244  	// cond: isPowerOfTwo(c)
  2245  	// result: (Rsh64Ux64 n (Const64 <t> [log2(c)]))
  2246  	for {
  2247  		t := v.Type
  2248  		n := v.Args[0]
  2249  		v_1 := v.Args[1]
  2250  		if v_1.Op != OpConst64 {
  2251  			break
  2252  		}
  2253  		c := v_1.AuxInt
  2254  		if !(isPowerOfTwo(c)) {
  2255  			break
  2256  		}
  2257  		v.reset(OpRsh64Ux64)
  2258  		v.AddArg(n)
  2259  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  2260  		v0.AuxInt = log2(c)
  2261  		v.AddArg(v0)
  2262  		return true
  2263  	}
  2264  	// match: (Div64u <t> x (Const64 [c]))
  2265  	// cond: umagic64ok(c) && !umagic64a(c)
  2266  	// result: (Rsh64Ux64     (Hmul64u <t>       (Const64 <t> [umagic64m(c)])       x)     (Const64 <t> [umagic64s(c)]))
  2267  	for {
  2268  		t := v.Type
  2269  		x := v.Args[0]
  2270  		v_1 := v.Args[1]
  2271  		if v_1.Op != OpConst64 {
  2272  			break
  2273  		}
  2274  		c := v_1.AuxInt
  2275  		if !(umagic64ok(c) && !umagic64a(c)) {
  2276  			break
  2277  		}
  2278  		v.reset(OpRsh64Ux64)
  2279  		v0 := b.NewValue0(v.Pos, OpHmul64u, t)
  2280  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  2281  		v1.AuxInt = umagic64m(c)
  2282  		v0.AddArg(v1)
  2283  		v0.AddArg(x)
  2284  		v.AddArg(v0)
  2285  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  2286  		v2.AuxInt = umagic64s(c)
  2287  		v.AddArg(v2)
  2288  		return true
  2289  	}
  2290  	// match: (Div64u <t> x (Const64 [c]))
  2291  	// cond: umagic64ok(c) && umagic64a(c)
  2292  	// result: (Rsh64Ux64     (Avg64u <t>       (Hmul64u <t>         x         (Const64 <t> [umagic64m(c)]))       x)     (Const64 <t> [umagic64s(c)-1]))
  2293  	for {
  2294  		t := v.Type
  2295  		x := v.Args[0]
  2296  		v_1 := v.Args[1]
  2297  		if v_1.Op != OpConst64 {
  2298  			break
  2299  		}
  2300  		c := v_1.AuxInt
  2301  		if !(umagic64ok(c) && umagic64a(c)) {
  2302  			break
  2303  		}
  2304  		v.reset(OpRsh64Ux64)
  2305  		v0 := b.NewValue0(v.Pos, OpAvg64u, t)
  2306  		v1 := b.NewValue0(v.Pos, OpHmul64u, t)
  2307  		v1.AddArg(x)
  2308  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  2309  		v2.AuxInt = umagic64m(c)
  2310  		v1.AddArg(v2)
  2311  		v0.AddArg(v1)
  2312  		v0.AddArg(x)
  2313  		v.AddArg(v0)
  2314  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  2315  		v3.AuxInt = umagic64s(c) - 1
  2316  		v.AddArg(v3)
  2317  		return true
  2318  	}
  2319  	return false
  2320  }
  2321  func rewriteValuegeneric_OpEq16(v *Value, config *Config) bool {
  2322  	b := v.Block
  2323  	_ = b
  2324  	// match: (Eq16 x x)
  2325  	// cond:
  2326  	// result: (ConstBool [1])
  2327  	for {
  2328  		x := v.Args[0]
  2329  		if x != v.Args[1] {
  2330  			break
  2331  		}
  2332  		v.reset(OpConstBool)
  2333  		v.AuxInt = 1
  2334  		return true
  2335  	}
  2336  	// match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
  2337  	// cond:
  2338  	// result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x)
  2339  	for {
  2340  		v_0 := v.Args[0]
  2341  		if v_0.Op != OpConst16 {
  2342  			break
  2343  		}
  2344  		t := v_0.Type
  2345  		c := v_0.AuxInt
  2346  		v_1 := v.Args[1]
  2347  		if v_1.Op != OpAdd16 {
  2348  			break
  2349  		}
  2350  		v_1_0 := v_1.Args[0]
  2351  		if v_1_0.Op != OpConst16 {
  2352  			break
  2353  		}
  2354  		if v_1_0.Type != t {
  2355  			break
  2356  		}
  2357  		d := v_1_0.AuxInt
  2358  		x := v_1.Args[1]
  2359  		v.reset(OpEq16)
  2360  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  2361  		v0.AuxInt = int64(int16(c - d))
  2362  		v.AddArg(v0)
  2363  		v.AddArg(x)
  2364  		return true
  2365  	}
  2366  	// match: (Eq16 x (Const16 <t> [c]))
  2367  	// cond: x.Op != OpConst16
  2368  	// result: (Eq16 (Const16 <t> [c]) x)
  2369  	for {
  2370  		x := v.Args[0]
  2371  		v_1 := v.Args[1]
  2372  		if v_1.Op != OpConst16 {
  2373  			break
  2374  		}
  2375  		t := v_1.Type
  2376  		c := v_1.AuxInt
  2377  		if !(x.Op != OpConst16) {
  2378  			break
  2379  		}
  2380  		v.reset(OpEq16)
  2381  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  2382  		v0.AuxInt = c
  2383  		v.AddArg(v0)
  2384  		v.AddArg(x)
  2385  		return true
  2386  	}
  2387  	// match: (Eq16 (Const16 [c]) (Const16 [d]))
  2388  	// cond:
  2389  	// result: (ConstBool [b2i(c == d)])
  2390  	for {
  2391  		v_0 := v.Args[0]
  2392  		if v_0.Op != OpConst16 {
  2393  			break
  2394  		}
  2395  		c := v_0.AuxInt
  2396  		v_1 := v.Args[1]
  2397  		if v_1.Op != OpConst16 {
  2398  			break
  2399  		}
  2400  		d := v_1.AuxInt
  2401  		v.reset(OpConstBool)
  2402  		v.AuxInt = b2i(c == d)
  2403  		return true
  2404  	}
  2405  	return false
  2406  }
  2407  func rewriteValuegeneric_OpEq32(v *Value, config *Config) bool {
  2408  	b := v.Block
  2409  	_ = b
  2410  	// match: (Eq32 x x)
  2411  	// cond:
  2412  	// result: (ConstBool [1])
  2413  	for {
  2414  		x := v.Args[0]
  2415  		if x != v.Args[1] {
  2416  			break
  2417  		}
  2418  		v.reset(OpConstBool)
  2419  		v.AuxInt = 1
  2420  		return true
  2421  	}
  2422  	// match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
  2423  	// cond:
  2424  	// result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
  2425  	for {
  2426  		v_0 := v.Args[0]
  2427  		if v_0.Op != OpConst32 {
  2428  			break
  2429  		}
  2430  		t := v_0.Type
  2431  		c := v_0.AuxInt
  2432  		v_1 := v.Args[1]
  2433  		if v_1.Op != OpAdd32 {
  2434  			break
  2435  		}
  2436  		v_1_0 := v_1.Args[0]
  2437  		if v_1_0.Op != OpConst32 {
  2438  			break
  2439  		}
  2440  		if v_1_0.Type != t {
  2441  			break
  2442  		}
  2443  		d := v_1_0.AuxInt
  2444  		x := v_1.Args[1]
  2445  		v.reset(OpEq32)
  2446  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2447  		v0.AuxInt = int64(int32(c - d))
  2448  		v.AddArg(v0)
  2449  		v.AddArg(x)
  2450  		return true
  2451  	}
  2452  	// match: (Eq32 x (Const32 <t> [c]))
  2453  	// cond: x.Op != OpConst32
  2454  	// result: (Eq32 (Const32 <t> [c]) x)
  2455  	for {
  2456  		x := v.Args[0]
  2457  		v_1 := v.Args[1]
  2458  		if v_1.Op != OpConst32 {
  2459  			break
  2460  		}
  2461  		t := v_1.Type
  2462  		c := v_1.AuxInt
  2463  		if !(x.Op != OpConst32) {
  2464  			break
  2465  		}
  2466  		v.reset(OpEq32)
  2467  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2468  		v0.AuxInt = c
  2469  		v.AddArg(v0)
  2470  		v.AddArg(x)
  2471  		return true
  2472  	}
  2473  	// match: (Eq32 (Const32 [c]) (Const32 [d]))
  2474  	// cond:
  2475  	// result: (ConstBool [b2i(c == d)])
  2476  	for {
  2477  		v_0 := v.Args[0]
  2478  		if v_0.Op != OpConst32 {
  2479  			break
  2480  		}
  2481  		c := v_0.AuxInt
  2482  		v_1 := v.Args[1]
  2483  		if v_1.Op != OpConst32 {
  2484  			break
  2485  		}
  2486  		d := v_1.AuxInt
  2487  		v.reset(OpConstBool)
  2488  		v.AuxInt = b2i(c == d)
  2489  		return true
  2490  	}
  2491  	return false
  2492  }
  2493  func rewriteValuegeneric_OpEq64(v *Value, config *Config) bool {
  2494  	b := v.Block
  2495  	_ = b
  2496  	// match: (Eq64 x x)
  2497  	// cond:
  2498  	// result: (ConstBool [1])
  2499  	for {
  2500  		x := v.Args[0]
  2501  		if x != v.Args[1] {
  2502  			break
  2503  		}
  2504  		v.reset(OpConstBool)
  2505  		v.AuxInt = 1
  2506  		return true
  2507  	}
  2508  	// match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
  2509  	// cond:
  2510  	// result: (Eq64 (Const64 <t> [c-d]) x)
  2511  	for {
  2512  		v_0 := v.Args[0]
  2513  		if v_0.Op != OpConst64 {
  2514  			break
  2515  		}
  2516  		t := v_0.Type
  2517  		c := v_0.AuxInt
  2518  		v_1 := v.Args[1]
  2519  		if v_1.Op != OpAdd64 {
  2520  			break
  2521  		}
  2522  		v_1_0 := v_1.Args[0]
  2523  		if v_1_0.Op != OpConst64 {
  2524  			break
  2525  		}
  2526  		if v_1_0.Type != t {
  2527  			break
  2528  		}
  2529  		d := v_1_0.AuxInt
  2530  		x := v_1.Args[1]
  2531  		v.reset(OpEq64)
  2532  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  2533  		v0.AuxInt = c - d
  2534  		v.AddArg(v0)
  2535  		v.AddArg(x)
  2536  		return true
  2537  	}
  2538  	// match: (Eq64 x (Const64 <t> [c]))
  2539  	// cond: x.Op != OpConst64
  2540  	// result: (Eq64 (Const64 <t> [c]) x)
  2541  	for {
  2542  		x := v.Args[0]
  2543  		v_1 := v.Args[1]
  2544  		if v_1.Op != OpConst64 {
  2545  			break
  2546  		}
  2547  		t := v_1.Type
  2548  		c := v_1.AuxInt
  2549  		if !(x.Op != OpConst64) {
  2550  			break
  2551  		}
  2552  		v.reset(OpEq64)
  2553  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  2554  		v0.AuxInt = c
  2555  		v.AddArg(v0)
  2556  		v.AddArg(x)
  2557  		return true
  2558  	}
  2559  	// match: (Eq64 (Const64 [c]) (Const64 [d]))
  2560  	// cond:
  2561  	// result: (ConstBool [b2i(c == d)])
  2562  	for {
  2563  		v_0 := v.Args[0]
  2564  		if v_0.Op != OpConst64 {
  2565  			break
  2566  		}
  2567  		c := v_0.AuxInt
  2568  		v_1 := v.Args[1]
  2569  		if v_1.Op != OpConst64 {
  2570  			break
  2571  		}
  2572  		d := v_1.AuxInt
  2573  		v.reset(OpConstBool)
  2574  		v.AuxInt = b2i(c == d)
  2575  		return true
  2576  	}
  2577  	return false
  2578  }
  2579  func rewriteValuegeneric_OpEq8(v *Value, config *Config) bool {
  2580  	b := v.Block
  2581  	_ = b
  2582  	// match: (Eq8  x x)
  2583  	// cond:
  2584  	// result: (ConstBool [1])
  2585  	for {
  2586  		x := v.Args[0]
  2587  		if x != v.Args[1] {
  2588  			break
  2589  		}
  2590  		v.reset(OpConstBool)
  2591  		v.AuxInt = 1
  2592  		return true
  2593  	}
  2594  	// match: (Eq8  (Const8  <t> [c]) (Add8  (Const8  <t> [d]) x))
  2595  	// cond:
  2596  	// result: (Eq8  (Const8 <t> [int64(int8(c-d))]) x)
  2597  	for {
  2598  		v_0 := v.Args[0]
  2599  		if v_0.Op != OpConst8 {
  2600  			break
  2601  		}
  2602  		t := v_0.Type
  2603  		c := v_0.AuxInt
  2604  		v_1 := v.Args[1]
  2605  		if v_1.Op != OpAdd8 {
  2606  			break
  2607  		}
  2608  		v_1_0 := v_1.Args[0]
  2609  		if v_1_0.Op != OpConst8 {
  2610  			break
  2611  		}
  2612  		if v_1_0.Type != t {
  2613  			break
  2614  		}
  2615  		d := v_1_0.AuxInt
  2616  		x := v_1.Args[1]
  2617  		v.reset(OpEq8)
  2618  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  2619  		v0.AuxInt = int64(int8(c - d))
  2620  		v.AddArg(v0)
  2621  		v.AddArg(x)
  2622  		return true
  2623  	}
  2624  	// match: (Eq8  x (Const8  <t> [c]))
  2625  	// cond: x.Op != OpConst8
  2626  	// result: (Eq8  (Const8  <t> [c]) x)
  2627  	for {
  2628  		x := v.Args[0]
  2629  		v_1 := v.Args[1]
  2630  		if v_1.Op != OpConst8 {
  2631  			break
  2632  		}
  2633  		t := v_1.Type
  2634  		c := v_1.AuxInt
  2635  		if !(x.Op != OpConst8) {
  2636  			break
  2637  		}
  2638  		v.reset(OpEq8)
  2639  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  2640  		v0.AuxInt = c
  2641  		v.AddArg(v0)
  2642  		v.AddArg(x)
  2643  		return true
  2644  	}
  2645  	// match: (Eq8  (Const8  [c]) (Const8  [d]))
  2646  	// cond:
  2647  	// result: (ConstBool [b2i(c == d)])
  2648  	for {
  2649  		v_0 := v.Args[0]
  2650  		if v_0.Op != OpConst8 {
  2651  			break
  2652  		}
  2653  		c := v_0.AuxInt
  2654  		v_1 := v.Args[1]
  2655  		if v_1.Op != OpConst8 {
  2656  			break
  2657  		}
  2658  		d := v_1.AuxInt
  2659  		v.reset(OpConstBool)
  2660  		v.AuxInt = b2i(c == d)
  2661  		return true
  2662  	}
  2663  	return false
  2664  }
  2665  func rewriteValuegeneric_OpEqB(v *Value, config *Config) bool {
  2666  	b := v.Block
  2667  	_ = b
  2668  	// match: (EqB (ConstBool [c]) (ConstBool [d]))
  2669  	// cond:
  2670  	// result: (ConstBool [b2i(c == d)])
  2671  	for {
  2672  		v_0 := v.Args[0]
  2673  		if v_0.Op != OpConstBool {
  2674  			break
  2675  		}
  2676  		c := v_0.AuxInt
  2677  		v_1 := v.Args[1]
  2678  		if v_1.Op != OpConstBool {
  2679  			break
  2680  		}
  2681  		d := v_1.AuxInt
  2682  		v.reset(OpConstBool)
  2683  		v.AuxInt = b2i(c == d)
  2684  		return true
  2685  	}
  2686  	// match: (EqB (ConstBool [0]) x)
  2687  	// cond:
  2688  	// result: (Not x)
  2689  	for {
  2690  		v_0 := v.Args[0]
  2691  		if v_0.Op != OpConstBool {
  2692  			break
  2693  		}
  2694  		if v_0.AuxInt != 0 {
  2695  			break
  2696  		}
  2697  		x := v.Args[1]
  2698  		v.reset(OpNot)
  2699  		v.AddArg(x)
  2700  		return true
  2701  	}
  2702  	// match: (EqB (ConstBool [1]) x)
  2703  	// cond:
  2704  	// result: x
  2705  	for {
  2706  		v_0 := v.Args[0]
  2707  		if v_0.Op != OpConstBool {
  2708  			break
  2709  		}
  2710  		if v_0.AuxInt != 1 {
  2711  			break
  2712  		}
  2713  		x := v.Args[1]
  2714  		v.reset(OpCopy)
  2715  		v.Type = x.Type
  2716  		v.AddArg(x)
  2717  		return true
  2718  	}
  2719  	return false
  2720  }
  2721  func rewriteValuegeneric_OpEqInter(v *Value, config *Config) bool {
  2722  	b := v.Block
  2723  	_ = b
  2724  	// match: (EqInter x y)
  2725  	// cond:
  2726  	// result: (EqPtr  (ITab x) (ITab y))
  2727  	for {
  2728  		x := v.Args[0]
  2729  		y := v.Args[1]
  2730  		v.reset(OpEqPtr)
  2731  		v0 := b.NewValue0(v.Pos, OpITab, config.fe.TypeBytePtr())
  2732  		v0.AddArg(x)
  2733  		v.AddArg(v0)
  2734  		v1 := b.NewValue0(v.Pos, OpITab, config.fe.TypeBytePtr())
  2735  		v1.AddArg(y)
  2736  		v.AddArg(v1)
  2737  		return true
  2738  	}
  2739  }
  2740  func rewriteValuegeneric_OpEqPtr(v *Value, config *Config) bool {
  2741  	b := v.Block
  2742  	_ = b
  2743  	// match: (EqPtr p (ConstNil))
  2744  	// cond:
  2745  	// result: (Not (IsNonNil p))
  2746  	for {
  2747  		p := v.Args[0]
  2748  		v_1 := v.Args[1]
  2749  		if v_1.Op != OpConstNil {
  2750  			break
  2751  		}
  2752  		v.reset(OpNot)
  2753  		v0 := b.NewValue0(v.Pos, OpIsNonNil, config.fe.TypeBool())
  2754  		v0.AddArg(p)
  2755  		v.AddArg(v0)
  2756  		return true
  2757  	}
  2758  	// match: (EqPtr (ConstNil) p)
  2759  	// cond:
  2760  	// result: (Not (IsNonNil p))
  2761  	for {
  2762  		v_0 := v.Args[0]
  2763  		if v_0.Op != OpConstNil {
  2764  			break
  2765  		}
  2766  		p := v.Args[1]
  2767  		v.reset(OpNot)
  2768  		v0 := b.NewValue0(v.Pos, OpIsNonNil, config.fe.TypeBool())
  2769  		v0.AddArg(p)
  2770  		v.AddArg(v0)
  2771  		return true
  2772  	}
  2773  	return false
  2774  }
  2775  func rewriteValuegeneric_OpEqSlice(v *Value, config *Config) bool {
  2776  	b := v.Block
  2777  	_ = b
  2778  	// match: (EqSlice x y)
  2779  	// cond:
  2780  	// result: (EqPtr  (SlicePtr x) (SlicePtr y))
  2781  	for {
  2782  		x := v.Args[0]
  2783  		y := v.Args[1]
  2784  		v.reset(OpEqPtr)
  2785  		v0 := b.NewValue0(v.Pos, OpSlicePtr, config.fe.TypeBytePtr())
  2786  		v0.AddArg(x)
  2787  		v.AddArg(v0)
  2788  		v1 := b.NewValue0(v.Pos, OpSlicePtr, config.fe.TypeBytePtr())
  2789  		v1.AddArg(y)
  2790  		v.AddArg(v1)
  2791  		return true
  2792  	}
  2793  }
  2794  func rewriteValuegeneric_OpGeq16(v *Value, config *Config) bool {
  2795  	b := v.Block
  2796  	_ = b
  2797  	// match: (Geq16 (Const16 [c]) (Const16 [d]))
  2798  	// cond:
  2799  	// result: (ConstBool [b2i(c >= d)])
  2800  	for {
  2801  		v_0 := v.Args[0]
  2802  		if v_0.Op != OpConst16 {
  2803  			break
  2804  		}
  2805  		c := v_0.AuxInt
  2806  		v_1 := v.Args[1]
  2807  		if v_1.Op != OpConst16 {
  2808  			break
  2809  		}
  2810  		d := v_1.AuxInt
  2811  		v.reset(OpConstBool)
  2812  		v.AuxInt = b2i(c >= d)
  2813  		return true
  2814  	}
  2815  	return false
  2816  }
  2817  func rewriteValuegeneric_OpGeq16U(v *Value, config *Config) bool {
  2818  	b := v.Block
  2819  	_ = b
  2820  	// match: (Geq16U (Const16 [c]) (Const16 [d]))
  2821  	// cond:
  2822  	// result: (ConstBool [b2i(uint16(c) >= uint16(d))])
  2823  	for {
  2824  		v_0 := v.Args[0]
  2825  		if v_0.Op != OpConst16 {
  2826  			break
  2827  		}
  2828  		c := v_0.AuxInt
  2829  		v_1 := v.Args[1]
  2830  		if v_1.Op != OpConst16 {
  2831  			break
  2832  		}
  2833  		d := v_1.AuxInt
  2834  		v.reset(OpConstBool)
  2835  		v.AuxInt = b2i(uint16(c) >= uint16(d))
  2836  		return true
  2837  	}
  2838  	return false
  2839  }
  2840  func rewriteValuegeneric_OpGeq32(v *Value, config *Config) bool {
  2841  	b := v.Block
  2842  	_ = b
  2843  	// match: (Geq32 (Const32 [c]) (Const32 [d]))
  2844  	// cond:
  2845  	// result: (ConstBool [b2i(c >= d)])
  2846  	for {
  2847  		v_0 := v.Args[0]
  2848  		if v_0.Op != OpConst32 {
  2849  			break
  2850  		}
  2851  		c := v_0.AuxInt
  2852  		v_1 := v.Args[1]
  2853  		if v_1.Op != OpConst32 {
  2854  			break
  2855  		}
  2856  		d := v_1.AuxInt
  2857  		v.reset(OpConstBool)
  2858  		v.AuxInt = b2i(c >= d)
  2859  		return true
  2860  	}
  2861  	return false
  2862  }
  2863  func rewriteValuegeneric_OpGeq32U(v *Value, config *Config) bool {
  2864  	b := v.Block
  2865  	_ = b
  2866  	// match: (Geq32U (Const32 [c]) (Const32 [d]))
  2867  	// cond:
  2868  	// result: (ConstBool [b2i(uint32(c) >= uint32(d))])
  2869  	for {
  2870  		v_0 := v.Args[0]
  2871  		if v_0.Op != OpConst32 {
  2872  			break
  2873  		}
  2874  		c := v_0.AuxInt
  2875  		v_1 := v.Args[1]
  2876  		if v_1.Op != OpConst32 {
  2877  			break
  2878  		}
  2879  		d := v_1.AuxInt
  2880  		v.reset(OpConstBool)
  2881  		v.AuxInt = b2i(uint32(c) >= uint32(d))
  2882  		return true
  2883  	}
  2884  	return false
  2885  }
  2886  func rewriteValuegeneric_OpGeq64(v *Value, config *Config) bool {
  2887  	b := v.Block
  2888  	_ = b
  2889  	// match: (Geq64 (Const64 [c]) (Const64 [d]))
  2890  	// cond:
  2891  	// result: (ConstBool [b2i(c >= d)])
  2892  	for {
  2893  		v_0 := v.Args[0]
  2894  		if v_0.Op != OpConst64 {
  2895  			break
  2896  		}
  2897  		c := v_0.AuxInt
  2898  		v_1 := v.Args[1]
  2899  		if v_1.Op != OpConst64 {
  2900  			break
  2901  		}
  2902  		d := v_1.AuxInt
  2903  		v.reset(OpConstBool)
  2904  		v.AuxInt = b2i(c >= d)
  2905  		return true
  2906  	}
  2907  	return false
  2908  }
  2909  func rewriteValuegeneric_OpGeq64U(v *Value, config *Config) bool {
  2910  	b := v.Block
  2911  	_ = b
  2912  	// match: (Geq64U (Const64 [c]) (Const64 [d]))
  2913  	// cond:
  2914  	// result: (ConstBool [b2i(uint64(c) >= uint64(d))])
  2915  	for {
  2916  		v_0 := v.Args[0]
  2917  		if v_0.Op != OpConst64 {
  2918  			break
  2919  		}
  2920  		c := v_0.AuxInt
  2921  		v_1 := v.Args[1]
  2922  		if v_1.Op != OpConst64 {
  2923  			break
  2924  		}
  2925  		d := v_1.AuxInt
  2926  		v.reset(OpConstBool)
  2927  		v.AuxInt = b2i(uint64(c) >= uint64(d))
  2928  		return true
  2929  	}
  2930  	return false
  2931  }
  2932  func rewriteValuegeneric_OpGeq8(v *Value, config *Config) bool {
  2933  	b := v.Block
  2934  	_ = b
  2935  	// match: (Geq8  (Const8  [c]) (Const8  [d]))
  2936  	// cond:
  2937  	// result: (ConstBool [b2i(c >= d)])
  2938  	for {
  2939  		v_0 := v.Args[0]
  2940  		if v_0.Op != OpConst8 {
  2941  			break
  2942  		}
  2943  		c := v_0.AuxInt
  2944  		v_1 := v.Args[1]
  2945  		if v_1.Op != OpConst8 {
  2946  			break
  2947  		}
  2948  		d := v_1.AuxInt
  2949  		v.reset(OpConstBool)
  2950  		v.AuxInt = b2i(c >= d)
  2951  		return true
  2952  	}
  2953  	return false
  2954  }
  2955  func rewriteValuegeneric_OpGeq8U(v *Value, config *Config) bool {
  2956  	b := v.Block
  2957  	_ = b
  2958  	// match: (Geq8U  (Const8  [c]) (Const8  [d]))
  2959  	// cond:
  2960  	// result: (ConstBool [b2i(uint8(c)  >= uint8(d))])
  2961  	for {
  2962  		v_0 := v.Args[0]
  2963  		if v_0.Op != OpConst8 {
  2964  			break
  2965  		}
  2966  		c := v_0.AuxInt
  2967  		v_1 := v.Args[1]
  2968  		if v_1.Op != OpConst8 {
  2969  			break
  2970  		}
  2971  		d := v_1.AuxInt
  2972  		v.reset(OpConstBool)
  2973  		v.AuxInt = b2i(uint8(c) >= uint8(d))
  2974  		return true
  2975  	}
  2976  	return false
  2977  }
  2978  func rewriteValuegeneric_OpGreater16(v *Value, config *Config) bool {
  2979  	b := v.Block
  2980  	_ = b
  2981  	// match: (Greater16 (Const16 [c]) (Const16 [d]))
  2982  	// cond:
  2983  	// result: (ConstBool [b2i(c > d)])
  2984  	for {
  2985  		v_0 := v.Args[0]
  2986  		if v_0.Op != OpConst16 {
  2987  			break
  2988  		}
  2989  		c := v_0.AuxInt
  2990  		v_1 := v.Args[1]
  2991  		if v_1.Op != OpConst16 {
  2992  			break
  2993  		}
  2994  		d := v_1.AuxInt
  2995  		v.reset(OpConstBool)
  2996  		v.AuxInt = b2i(c > d)
  2997  		return true
  2998  	}
  2999  	return false
  3000  }
  3001  func rewriteValuegeneric_OpGreater16U(v *Value, config *Config) bool {
  3002  	b := v.Block
  3003  	_ = b
  3004  	// match: (Greater16U (Const16 [c]) (Const16 [d]))
  3005  	// cond:
  3006  	// result: (ConstBool [b2i(uint16(c) > uint16(d))])
  3007  	for {
  3008  		v_0 := v.Args[0]
  3009  		if v_0.Op != OpConst16 {
  3010  			break
  3011  		}
  3012  		c := v_0.AuxInt
  3013  		v_1 := v.Args[1]
  3014  		if v_1.Op != OpConst16 {
  3015  			break
  3016  		}
  3017  		d := v_1.AuxInt
  3018  		v.reset(OpConstBool)
  3019  		v.AuxInt = b2i(uint16(c) > uint16(d))
  3020  		return true
  3021  	}
  3022  	return false
  3023  }
  3024  func rewriteValuegeneric_OpGreater32(v *Value, config *Config) bool {
  3025  	b := v.Block
  3026  	_ = b
  3027  	// match: (Greater32 (Const32 [c]) (Const32 [d]))
  3028  	// cond:
  3029  	// result: (ConstBool [b2i(c > d)])
  3030  	for {
  3031  		v_0 := v.Args[0]
  3032  		if v_0.Op != OpConst32 {
  3033  			break
  3034  		}
  3035  		c := v_0.AuxInt
  3036  		v_1 := v.Args[1]
  3037  		if v_1.Op != OpConst32 {
  3038  			break
  3039  		}
  3040  		d := v_1.AuxInt
  3041  		v.reset(OpConstBool)
  3042  		v.AuxInt = b2i(c > d)
  3043  		return true
  3044  	}
  3045  	return false
  3046  }
  3047  func rewriteValuegeneric_OpGreater32U(v *Value, config *Config) bool {
  3048  	b := v.Block
  3049  	_ = b
  3050  	// match: (Greater32U (Const32 [c]) (Const32 [d]))
  3051  	// cond:
  3052  	// result: (ConstBool [b2i(uint32(c) > uint32(d))])
  3053  	for {
  3054  		v_0 := v.Args[0]
  3055  		if v_0.Op != OpConst32 {
  3056  			break
  3057  		}
  3058  		c := v_0.AuxInt
  3059  		v_1 := v.Args[1]
  3060  		if v_1.Op != OpConst32 {
  3061  			break
  3062  		}
  3063  		d := v_1.AuxInt
  3064  		v.reset(OpConstBool)
  3065  		v.AuxInt = b2i(uint32(c) > uint32(d))
  3066  		return true
  3067  	}
  3068  	return false
  3069  }
  3070  func rewriteValuegeneric_OpGreater64(v *Value, config *Config) bool {
  3071  	b := v.Block
  3072  	_ = b
  3073  	// match: (Greater64 (Const64 [c]) (Const64 [d]))
  3074  	// cond:
  3075  	// result: (ConstBool [b2i(c > d)])
  3076  	for {
  3077  		v_0 := v.Args[0]
  3078  		if v_0.Op != OpConst64 {
  3079  			break
  3080  		}
  3081  		c := v_0.AuxInt
  3082  		v_1 := v.Args[1]
  3083  		if v_1.Op != OpConst64 {
  3084  			break
  3085  		}
  3086  		d := v_1.AuxInt
  3087  		v.reset(OpConstBool)
  3088  		v.AuxInt = b2i(c > d)
  3089  		return true
  3090  	}
  3091  	return false
  3092  }
  3093  func rewriteValuegeneric_OpGreater64U(v *Value, config *Config) bool {
  3094  	b := v.Block
  3095  	_ = b
  3096  	// match: (Greater64U (Const64 [c]) (Const64 [d]))
  3097  	// cond:
  3098  	// result: (ConstBool [b2i(uint64(c) > uint64(d))])
  3099  	for {
  3100  		v_0 := v.Args[0]
  3101  		if v_0.Op != OpConst64 {
  3102  			break
  3103  		}
  3104  		c := v_0.AuxInt
  3105  		v_1 := v.Args[1]
  3106  		if v_1.Op != OpConst64 {
  3107  			break
  3108  		}
  3109  		d := v_1.AuxInt
  3110  		v.reset(OpConstBool)
  3111  		v.AuxInt = b2i(uint64(c) > uint64(d))
  3112  		return true
  3113  	}
  3114  	return false
  3115  }
  3116  func rewriteValuegeneric_OpGreater8(v *Value, config *Config) bool {
  3117  	b := v.Block
  3118  	_ = b
  3119  	// match: (Greater8  (Const8  [c]) (Const8  [d]))
  3120  	// cond:
  3121  	// result: (ConstBool [b2i(c > d)])
  3122  	for {
  3123  		v_0 := v.Args[0]
  3124  		if v_0.Op != OpConst8 {
  3125  			break
  3126  		}
  3127  		c := v_0.AuxInt
  3128  		v_1 := v.Args[1]
  3129  		if v_1.Op != OpConst8 {
  3130  			break
  3131  		}
  3132  		d := v_1.AuxInt
  3133  		v.reset(OpConstBool)
  3134  		v.AuxInt = b2i(c > d)
  3135  		return true
  3136  	}
  3137  	return false
  3138  }
  3139  func rewriteValuegeneric_OpGreater8U(v *Value, config *Config) bool {
  3140  	b := v.Block
  3141  	_ = b
  3142  	// match: (Greater8U  (Const8  [c]) (Const8  [d]))
  3143  	// cond:
  3144  	// result: (ConstBool [b2i(uint8(c)  > uint8(d))])
  3145  	for {
  3146  		v_0 := v.Args[0]
  3147  		if v_0.Op != OpConst8 {
  3148  			break
  3149  		}
  3150  		c := v_0.AuxInt
  3151  		v_1 := v.Args[1]
  3152  		if v_1.Op != OpConst8 {
  3153  			break
  3154  		}
  3155  		d := v_1.AuxInt
  3156  		v.reset(OpConstBool)
  3157  		v.AuxInt = b2i(uint8(c) > uint8(d))
  3158  		return true
  3159  	}
  3160  	return false
  3161  }
  3162  func rewriteValuegeneric_OpIMake(v *Value, config *Config) bool {
  3163  	b := v.Block
  3164  	_ = b
  3165  	// match: (IMake typ (StructMake1 val))
  3166  	// cond:
  3167  	// result: (IMake typ val)
  3168  	for {
  3169  		typ := v.Args[0]
  3170  		v_1 := v.Args[1]
  3171  		if v_1.Op != OpStructMake1 {
  3172  			break
  3173  		}
  3174  		val := v_1.Args[0]
  3175  		v.reset(OpIMake)
  3176  		v.AddArg(typ)
  3177  		v.AddArg(val)
  3178  		return true
  3179  	}
  3180  	// match: (IMake typ (ArrayMake1 val))
  3181  	// cond:
  3182  	// result: (IMake typ val)
  3183  	for {
  3184  		typ := v.Args[0]
  3185  		v_1 := v.Args[1]
  3186  		if v_1.Op != OpArrayMake1 {
  3187  			break
  3188  		}
  3189  		val := v_1.Args[0]
  3190  		v.reset(OpIMake)
  3191  		v.AddArg(typ)
  3192  		v.AddArg(val)
  3193  		return true
  3194  	}
  3195  	return false
  3196  }
  3197  func rewriteValuegeneric_OpIsInBounds(v *Value, config *Config) bool {
  3198  	b := v.Block
  3199  	_ = b
  3200  	// match: (IsInBounds (ZeroExt8to32  _) (Const32 [c]))
  3201  	// cond: (1 << 8)  <= c
  3202  	// result: (ConstBool [1])
  3203  	for {
  3204  		v_0 := v.Args[0]
  3205  		if v_0.Op != OpZeroExt8to32 {
  3206  			break
  3207  		}
  3208  		v_1 := v.Args[1]
  3209  		if v_1.Op != OpConst32 {
  3210  			break
  3211  		}
  3212  		c := v_1.AuxInt
  3213  		if !((1 << 8) <= c) {
  3214  			break
  3215  		}
  3216  		v.reset(OpConstBool)
  3217  		v.AuxInt = 1
  3218  		return true
  3219  	}
  3220  	// match: (IsInBounds (ZeroExt8to64  _) (Const64 [c]))
  3221  	// cond: (1 << 8)  <= c
  3222  	// result: (ConstBool [1])
  3223  	for {
  3224  		v_0 := v.Args[0]
  3225  		if v_0.Op != OpZeroExt8to64 {
  3226  			break
  3227  		}
  3228  		v_1 := v.Args[1]
  3229  		if v_1.Op != OpConst64 {
  3230  			break
  3231  		}
  3232  		c := v_1.AuxInt
  3233  		if !((1 << 8) <= c) {
  3234  			break
  3235  		}
  3236  		v.reset(OpConstBool)
  3237  		v.AuxInt = 1
  3238  		return true
  3239  	}
  3240  	// match: (IsInBounds (ZeroExt16to32 _) (Const32 [c]))
  3241  	// cond: (1 << 16) <= c
  3242  	// result: (ConstBool [1])
  3243  	for {
  3244  		v_0 := v.Args[0]
  3245  		if v_0.Op != OpZeroExt16to32 {
  3246  			break
  3247  		}
  3248  		v_1 := v.Args[1]
  3249  		if v_1.Op != OpConst32 {
  3250  			break
  3251  		}
  3252  		c := v_1.AuxInt
  3253  		if !((1 << 16) <= c) {
  3254  			break
  3255  		}
  3256  		v.reset(OpConstBool)
  3257  		v.AuxInt = 1
  3258  		return true
  3259  	}
  3260  	// match: (IsInBounds (ZeroExt16to64 _) (Const64 [c]))
  3261  	// cond: (1 << 16) <= c
  3262  	// result: (ConstBool [1])
  3263  	for {
  3264  		v_0 := v.Args[0]
  3265  		if v_0.Op != OpZeroExt16to64 {
  3266  			break
  3267  		}
  3268  		v_1 := v.Args[1]
  3269  		if v_1.Op != OpConst64 {
  3270  			break
  3271  		}
  3272  		c := v_1.AuxInt
  3273  		if !((1 << 16) <= c) {
  3274  			break
  3275  		}
  3276  		v.reset(OpConstBool)
  3277  		v.AuxInt = 1
  3278  		return true
  3279  	}
  3280  	// match: (IsInBounds x x)
  3281  	// cond:
  3282  	// result: (ConstBool [0])
  3283  	for {
  3284  		x := v.Args[0]
  3285  		if x != v.Args[1] {
  3286  			break
  3287  		}
  3288  		v.reset(OpConstBool)
  3289  		v.AuxInt = 0
  3290  		return true
  3291  	}
  3292  	// match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d]))
  3293  	// cond: 0 <= c && c < d
  3294  	// result: (ConstBool [1])
  3295  	for {
  3296  		v_0 := v.Args[0]
  3297  		if v_0.Op != OpAnd32 {
  3298  			break
  3299  		}
  3300  		v_0_0 := v_0.Args[0]
  3301  		if v_0_0.Op != OpConst32 {
  3302  			break
  3303  		}
  3304  		c := v_0_0.AuxInt
  3305  		v_1 := v.Args[1]
  3306  		if v_1.Op != OpConst32 {
  3307  			break
  3308  		}
  3309  		d := v_1.AuxInt
  3310  		if !(0 <= c && c < d) {
  3311  			break
  3312  		}
  3313  		v.reset(OpConstBool)
  3314  		v.AuxInt = 1
  3315  		return true
  3316  	}
  3317  	// match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d]))
  3318  	// cond: 0 <= c && c < d
  3319  	// result: (ConstBool [1])
  3320  	for {
  3321  		v_0 := v.Args[0]
  3322  		if v_0.Op != OpAnd64 {
  3323  			break
  3324  		}
  3325  		v_0_0 := v_0.Args[0]
  3326  		if v_0_0.Op != OpConst64 {
  3327  			break
  3328  		}
  3329  		c := v_0_0.AuxInt
  3330  		v_1 := v.Args[1]
  3331  		if v_1.Op != OpConst64 {
  3332  			break
  3333  		}
  3334  		d := v_1.AuxInt
  3335  		if !(0 <= c && c < d) {
  3336  			break
  3337  		}
  3338  		v.reset(OpConstBool)
  3339  		v.AuxInt = 1
  3340  		return true
  3341  	}
  3342  	// match: (IsInBounds (Const32 [c]) (Const32 [d]))
  3343  	// cond:
  3344  	// result: (ConstBool [b2i(0 <= c && c < d)])
  3345  	for {
  3346  		v_0 := v.Args[0]
  3347  		if v_0.Op != OpConst32 {
  3348  			break
  3349  		}
  3350  		c := v_0.AuxInt
  3351  		v_1 := v.Args[1]
  3352  		if v_1.Op != OpConst32 {
  3353  			break
  3354  		}
  3355  		d := v_1.AuxInt
  3356  		v.reset(OpConstBool)
  3357  		v.AuxInt = b2i(0 <= c && c < d)
  3358  		return true
  3359  	}
  3360  	// match: (IsInBounds (Const64 [c]) (Const64 [d]))
  3361  	// cond:
  3362  	// result: (ConstBool [b2i(0 <= c && c < d)])
  3363  	for {
  3364  		v_0 := v.Args[0]
  3365  		if v_0.Op != OpConst64 {
  3366  			break
  3367  		}
  3368  		c := v_0.AuxInt
  3369  		v_1 := v.Args[1]
  3370  		if v_1.Op != OpConst64 {
  3371  			break
  3372  		}
  3373  		d := v_1.AuxInt
  3374  		v.reset(OpConstBool)
  3375  		v.AuxInt = b2i(0 <= c && c < d)
  3376  		return true
  3377  	}
  3378  	// match: (IsInBounds (Mod32u _ y) y)
  3379  	// cond:
  3380  	// result: (ConstBool [1])
  3381  	for {
  3382  		v_0 := v.Args[0]
  3383  		if v_0.Op != OpMod32u {
  3384  			break
  3385  		}
  3386  		y := v_0.Args[1]
  3387  		if y != v.Args[1] {
  3388  			break
  3389  		}
  3390  		v.reset(OpConstBool)
  3391  		v.AuxInt = 1
  3392  		return true
  3393  	}
  3394  	// match: (IsInBounds (Mod64u _ y) y)
  3395  	// cond:
  3396  	// result: (ConstBool [1])
  3397  	for {
  3398  		v_0 := v.Args[0]
  3399  		if v_0.Op != OpMod64u {
  3400  			break
  3401  		}
  3402  		y := v_0.Args[1]
  3403  		if y != v.Args[1] {
  3404  			break
  3405  		}
  3406  		v.reset(OpConstBool)
  3407  		v.AuxInt = 1
  3408  		return true
  3409  	}
  3410  	return false
  3411  }
  3412  func rewriteValuegeneric_OpIsNonNil(v *Value, config *Config) bool {
  3413  	b := v.Block
  3414  	_ = b
  3415  	// match: (IsNonNil (ConstNil))
  3416  	// cond:
  3417  	// result: (ConstBool [0])
  3418  	for {
  3419  		v_0 := v.Args[0]
  3420  		if v_0.Op != OpConstNil {
  3421  			break
  3422  		}
  3423  		v.reset(OpConstBool)
  3424  		v.AuxInt = 0
  3425  		return true
  3426  	}
  3427  	return false
  3428  }
  3429  func rewriteValuegeneric_OpIsSliceInBounds(v *Value, config *Config) bool {
  3430  	b := v.Block
  3431  	_ = b
  3432  	// match: (IsSliceInBounds x x)
  3433  	// cond:
  3434  	// result: (ConstBool [1])
  3435  	for {
  3436  		x := v.Args[0]
  3437  		if x != v.Args[1] {
  3438  			break
  3439  		}
  3440  		v.reset(OpConstBool)
  3441  		v.AuxInt = 1
  3442  		return true
  3443  	}
  3444  	// match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d]))
  3445  	// cond: 0 <= c && c <= d
  3446  	// result: (ConstBool [1])
  3447  	for {
  3448  		v_0 := v.Args[0]
  3449  		if v_0.Op != OpAnd32 {
  3450  			break
  3451  		}
  3452  		v_0_0 := v_0.Args[0]
  3453  		if v_0_0.Op != OpConst32 {
  3454  			break
  3455  		}
  3456  		c := v_0_0.AuxInt
  3457  		v_1 := v.Args[1]
  3458  		if v_1.Op != OpConst32 {
  3459  			break
  3460  		}
  3461  		d := v_1.AuxInt
  3462  		if !(0 <= c && c <= d) {
  3463  			break
  3464  		}
  3465  		v.reset(OpConstBool)
  3466  		v.AuxInt = 1
  3467  		return true
  3468  	}
  3469  	// match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d]))
  3470  	// cond: 0 <= c && c <= d
  3471  	// result: (ConstBool [1])
  3472  	for {
  3473  		v_0 := v.Args[0]
  3474  		if v_0.Op != OpAnd64 {
  3475  			break
  3476  		}
  3477  		v_0_0 := v_0.Args[0]
  3478  		if v_0_0.Op != OpConst64 {
  3479  			break
  3480  		}
  3481  		c := v_0_0.AuxInt
  3482  		v_1 := v.Args[1]
  3483  		if v_1.Op != OpConst64 {
  3484  			break
  3485  		}
  3486  		d := v_1.AuxInt
  3487  		if !(0 <= c && c <= d) {
  3488  			break
  3489  		}
  3490  		v.reset(OpConstBool)
  3491  		v.AuxInt = 1
  3492  		return true
  3493  	}
  3494  	// match: (IsSliceInBounds (Const32 [0]) _)
  3495  	// cond:
  3496  	// result: (ConstBool [1])
  3497  	for {
  3498  		v_0 := v.Args[0]
  3499  		if v_0.Op != OpConst32 {
  3500  			break
  3501  		}
  3502  		if v_0.AuxInt != 0 {
  3503  			break
  3504  		}
  3505  		v.reset(OpConstBool)
  3506  		v.AuxInt = 1
  3507  		return true
  3508  	}
  3509  	// match: (IsSliceInBounds (Const64 [0]) _)
  3510  	// cond:
  3511  	// result: (ConstBool [1])
  3512  	for {
  3513  		v_0 := v.Args[0]
  3514  		if v_0.Op != OpConst64 {
  3515  			break
  3516  		}
  3517  		if v_0.AuxInt != 0 {
  3518  			break
  3519  		}
  3520  		v.reset(OpConstBool)
  3521  		v.AuxInt = 1
  3522  		return true
  3523  	}
  3524  	// match: (IsSliceInBounds (Const32 [c]) (Const32 [d]))
  3525  	// cond:
  3526  	// result: (ConstBool [b2i(0 <= c && c <= d)])
  3527  	for {
  3528  		v_0 := v.Args[0]
  3529  		if v_0.Op != OpConst32 {
  3530  			break
  3531  		}
  3532  		c := v_0.AuxInt
  3533  		v_1 := v.Args[1]
  3534  		if v_1.Op != OpConst32 {
  3535  			break
  3536  		}
  3537  		d := v_1.AuxInt
  3538  		v.reset(OpConstBool)
  3539  		v.AuxInt = b2i(0 <= c && c <= d)
  3540  		return true
  3541  	}
  3542  	// match: (IsSliceInBounds (Const64 [c]) (Const64 [d]))
  3543  	// cond:
  3544  	// result: (ConstBool [b2i(0 <= c && c <= d)])
  3545  	for {
  3546  		v_0 := v.Args[0]
  3547  		if v_0.Op != OpConst64 {
  3548  			break
  3549  		}
  3550  		c := v_0.AuxInt
  3551  		v_1 := v.Args[1]
  3552  		if v_1.Op != OpConst64 {
  3553  			break
  3554  		}
  3555  		d := v_1.AuxInt
  3556  		v.reset(OpConstBool)
  3557  		v.AuxInt = b2i(0 <= c && c <= d)
  3558  		return true
  3559  	}
  3560  	// match: (IsSliceInBounds (SliceLen x) (SliceCap x))
  3561  	// cond:
  3562  	// result: (ConstBool [1])
  3563  	for {
  3564  		v_0 := v.Args[0]
  3565  		if v_0.Op != OpSliceLen {
  3566  			break
  3567  		}
  3568  		x := v_0.Args[0]
  3569  		v_1 := v.Args[1]
  3570  		if v_1.Op != OpSliceCap {
  3571  			break
  3572  		}
  3573  		if x != v_1.Args[0] {
  3574  			break
  3575  		}
  3576  		v.reset(OpConstBool)
  3577  		v.AuxInt = 1
  3578  		return true
  3579  	}
  3580  	return false
  3581  }
  3582  func rewriteValuegeneric_OpLeq16(v *Value, config *Config) bool {
  3583  	b := v.Block
  3584  	_ = b
  3585  	// match: (Leq16 (Const16 [c]) (Const16 [d]))
  3586  	// cond:
  3587  	// result: (ConstBool [b2i(c <= d)])
  3588  	for {
  3589  		v_0 := v.Args[0]
  3590  		if v_0.Op != OpConst16 {
  3591  			break
  3592  		}
  3593  		c := v_0.AuxInt
  3594  		v_1 := v.Args[1]
  3595  		if v_1.Op != OpConst16 {
  3596  			break
  3597  		}
  3598  		d := v_1.AuxInt
  3599  		v.reset(OpConstBool)
  3600  		v.AuxInt = b2i(c <= d)
  3601  		return true
  3602  	}
  3603  	return false
  3604  }
  3605  func rewriteValuegeneric_OpLeq16U(v *Value, config *Config) bool {
  3606  	b := v.Block
  3607  	_ = b
  3608  	// match: (Leq16U (Const16 [c]) (Const16 [d]))
  3609  	// cond:
  3610  	// result: (ConstBool [b2i(uint16(c) <= uint16(d))])
  3611  	for {
  3612  		v_0 := v.Args[0]
  3613  		if v_0.Op != OpConst16 {
  3614  			break
  3615  		}
  3616  		c := v_0.AuxInt
  3617  		v_1 := v.Args[1]
  3618  		if v_1.Op != OpConst16 {
  3619  			break
  3620  		}
  3621  		d := v_1.AuxInt
  3622  		v.reset(OpConstBool)
  3623  		v.AuxInt = b2i(uint16(c) <= uint16(d))
  3624  		return true
  3625  	}
  3626  	return false
  3627  }
  3628  func rewriteValuegeneric_OpLeq32(v *Value, config *Config) bool {
  3629  	b := v.Block
  3630  	_ = b
  3631  	// match: (Leq32 (Const32 [c]) (Const32 [d]))
  3632  	// cond:
  3633  	// result: (ConstBool [b2i(c <= d)])
  3634  	for {
  3635  		v_0 := v.Args[0]
  3636  		if v_0.Op != OpConst32 {
  3637  			break
  3638  		}
  3639  		c := v_0.AuxInt
  3640  		v_1 := v.Args[1]
  3641  		if v_1.Op != OpConst32 {
  3642  			break
  3643  		}
  3644  		d := v_1.AuxInt
  3645  		v.reset(OpConstBool)
  3646  		v.AuxInt = b2i(c <= d)
  3647  		return true
  3648  	}
  3649  	return false
  3650  }
  3651  func rewriteValuegeneric_OpLeq32U(v *Value, config *Config) bool {
  3652  	b := v.Block
  3653  	_ = b
  3654  	// match: (Leq32U (Const32 [c]) (Const32 [d]))
  3655  	// cond:
  3656  	// result: (ConstBool [b2i(uint32(c) <= uint32(d))])
  3657  	for {
  3658  		v_0 := v.Args[0]
  3659  		if v_0.Op != OpConst32 {
  3660  			break
  3661  		}
  3662  		c := v_0.AuxInt
  3663  		v_1 := v.Args[1]
  3664  		if v_1.Op != OpConst32 {
  3665  			break
  3666  		}
  3667  		d := v_1.AuxInt
  3668  		v.reset(OpConstBool)
  3669  		v.AuxInt = b2i(uint32(c) <= uint32(d))
  3670  		return true
  3671  	}
  3672  	return false
  3673  }
  3674  func rewriteValuegeneric_OpLeq64(v *Value, config *Config) bool {
  3675  	b := v.Block
  3676  	_ = b
  3677  	// match: (Leq64 (Const64 [c]) (Const64 [d]))
  3678  	// cond:
  3679  	// result: (ConstBool [b2i(c <= d)])
  3680  	for {
  3681  		v_0 := v.Args[0]
  3682  		if v_0.Op != OpConst64 {
  3683  			break
  3684  		}
  3685  		c := v_0.AuxInt
  3686  		v_1 := v.Args[1]
  3687  		if v_1.Op != OpConst64 {
  3688  			break
  3689  		}
  3690  		d := v_1.AuxInt
  3691  		v.reset(OpConstBool)
  3692  		v.AuxInt = b2i(c <= d)
  3693  		return true
  3694  	}
  3695  	return false
  3696  }
  3697  func rewriteValuegeneric_OpLeq64U(v *Value, config *Config) bool {
  3698  	b := v.Block
  3699  	_ = b
  3700  	// match: (Leq64U (Const64 [c]) (Const64 [d]))
  3701  	// cond:
  3702  	// result: (ConstBool [b2i(uint64(c) <= uint64(d))])
  3703  	for {
  3704  		v_0 := v.Args[0]
  3705  		if v_0.Op != OpConst64 {
  3706  			break
  3707  		}
  3708  		c := v_0.AuxInt
  3709  		v_1 := v.Args[1]
  3710  		if v_1.Op != OpConst64 {
  3711  			break
  3712  		}
  3713  		d := v_1.AuxInt
  3714  		v.reset(OpConstBool)
  3715  		v.AuxInt = b2i(uint64(c) <= uint64(d))
  3716  		return true
  3717  	}
  3718  	return false
  3719  }
  3720  func rewriteValuegeneric_OpLeq8(v *Value, config *Config) bool {
  3721  	b := v.Block
  3722  	_ = b
  3723  	// match: (Leq8  (Const8  [c]) (Const8  [d]))
  3724  	// cond:
  3725  	// result: (ConstBool [b2i(c <= d)])
  3726  	for {
  3727  		v_0 := v.Args[0]
  3728  		if v_0.Op != OpConst8 {
  3729  			break
  3730  		}
  3731  		c := v_0.AuxInt
  3732  		v_1 := v.Args[1]
  3733  		if v_1.Op != OpConst8 {
  3734  			break
  3735  		}
  3736  		d := v_1.AuxInt
  3737  		v.reset(OpConstBool)
  3738  		v.AuxInt = b2i(c <= d)
  3739  		return true
  3740  	}
  3741  	return false
  3742  }
  3743  func rewriteValuegeneric_OpLeq8U(v *Value, config *Config) bool {
  3744  	b := v.Block
  3745  	_ = b
  3746  	// match: (Leq8U  (Const8  [c]) (Const8  [d]))
  3747  	// cond:
  3748  	// result: (ConstBool [b2i(uint8(c)  <= uint8(d))])
  3749  	for {
  3750  		v_0 := v.Args[0]
  3751  		if v_0.Op != OpConst8 {
  3752  			break
  3753  		}
  3754  		c := v_0.AuxInt
  3755  		v_1 := v.Args[1]
  3756  		if v_1.Op != OpConst8 {
  3757  			break
  3758  		}
  3759  		d := v_1.AuxInt
  3760  		v.reset(OpConstBool)
  3761  		v.AuxInt = b2i(uint8(c) <= uint8(d))
  3762  		return true
  3763  	}
  3764  	return false
  3765  }
  3766  func rewriteValuegeneric_OpLess16(v *Value, config *Config) bool {
  3767  	b := v.Block
  3768  	_ = b
  3769  	// match: (Less16 (Const16 [c]) (Const16 [d]))
  3770  	// cond:
  3771  	// result: (ConstBool [b2i(c < d)])
  3772  	for {
  3773  		v_0 := v.Args[0]
  3774  		if v_0.Op != OpConst16 {
  3775  			break
  3776  		}
  3777  		c := v_0.AuxInt
  3778  		v_1 := v.Args[1]
  3779  		if v_1.Op != OpConst16 {
  3780  			break
  3781  		}
  3782  		d := v_1.AuxInt
  3783  		v.reset(OpConstBool)
  3784  		v.AuxInt = b2i(c < d)
  3785  		return true
  3786  	}
  3787  	return false
  3788  }
  3789  func rewriteValuegeneric_OpLess16U(v *Value, config *Config) bool {
  3790  	b := v.Block
  3791  	_ = b
  3792  	// match: (Less16U (Const16 [c]) (Const16 [d]))
  3793  	// cond:
  3794  	// result: (ConstBool [b2i(uint16(c) < uint16(d))])
  3795  	for {
  3796  		v_0 := v.Args[0]
  3797  		if v_0.Op != OpConst16 {
  3798  			break
  3799  		}
  3800  		c := v_0.AuxInt
  3801  		v_1 := v.Args[1]
  3802  		if v_1.Op != OpConst16 {
  3803  			break
  3804  		}
  3805  		d := v_1.AuxInt
  3806  		v.reset(OpConstBool)
  3807  		v.AuxInt = b2i(uint16(c) < uint16(d))
  3808  		return true
  3809  	}
  3810  	return false
  3811  }
  3812  func rewriteValuegeneric_OpLess32(v *Value, config *Config) bool {
  3813  	b := v.Block
  3814  	_ = b
  3815  	// match: (Less32 (Const32 [c]) (Const32 [d]))
  3816  	// cond:
  3817  	// result: (ConstBool [b2i(c < d)])
  3818  	for {
  3819  		v_0 := v.Args[0]
  3820  		if v_0.Op != OpConst32 {
  3821  			break
  3822  		}
  3823  		c := v_0.AuxInt
  3824  		v_1 := v.Args[1]
  3825  		if v_1.Op != OpConst32 {
  3826  			break
  3827  		}
  3828  		d := v_1.AuxInt
  3829  		v.reset(OpConstBool)
  3830  		v.AuxInt = b2i(c < d)
  3831  		return true
  3832  	}
  3833  	return false
  3834  }
  3835  func rewriteValuegeneric_OpLess32U(v *Value, config *Config) bool {
  3836  	b := v.Block
  3837  	_ = b
  3838  	// match: (Less32U (Const32 [c]) (Const32 [d]))
  3839  	// cond:
  3840  	// result: (ConstBool [b2i(uint32(c) < uint32(d))])
  3841  	for {
  3842  		v_0 := v.Args[0]
  3843  		if v_0.Op != OpConst32 {
  3844  			break
  3845  		}
  3846  		c := v_0.AuxInt
  3847  		v_1 := v.Args[1]
  3848  		if v_1.Op != OpConst32 {
  3849  			break
  3850  		}
  3851  		d := v_1.AuxInt
  3852  		v.reset(OpConstBool)
  3853  		v.AuxInt = b2i(uint32(c) < uint32(d))
  3854  		return true
  3855  	}
  3856  	return false
  3857  }
  3858  func rewriteValuegeneric_OpLess64(v *Value, config *Config) bool {
  3859  	b := v.Block
  3860  	_ = b
  3861  	// match: (Less64 (Const64 [c]) (Const64 [d]))
  3862  	// cond:
  3863  	// result: (ConstBool [b2i(c < d)])
  3864  	for {
  3865  		v_0 := v.Args[0]
  3866  		if v_0.Op != OpConst64 {
  3867  			break
  3868  		}
  3869  		c := v_0.AuxInt
  3870  		v_1 := v.Args[1]
  3871  		if v_1.Op != OpConst64 {
  3872  			break
  3873  		}
  3874  		d := v_1.AuxInt
  3875  		v.reset(OpConstBool)
  3876  		v.AuxInt = b2i(c < d)
  3877  		return true
  3878  	}
  3879  	return false
  3880  }
  3881  func rewriteValuegeneric_OpLess64U(v *Value, config *Config) bool {
  3882  	b := v.Block
  3883  	_ = b
  3884  	// match: (Less64U (Const64 [c]) (Const64 [d]))
  3885  	// cond:
  3886  	// result: (ConstBool [b2i(uint64(c) < uint64(d))])
  3887  	for {
  3888  		v_0 := v.Args[0]
  3889  		if v_0.Op != OpConst64 {
  3890  			break
  3891  		}
  3892  		c := v_0.AuxInt
  3893  		v_1 := v.Args[1]
  3894  		if v_1.Op != OpConst64 {
  3895  			break
  3896  		}
  3897  		d := v_1.AuxInt
  3898  		v.reset(OpConstBool)
  3899  		v.AuxInt = b2i(uint64(c) < uint64(d))
  3900  		return true
  3901  	}
  3902  	return false
  3903  }
  3904  func rewriteValuegeneric_OpLess8(v *Value, config *Config) bool {
  3905  	b := v.Block
  3906  	_ = b
  3907  	// match: (Less8  (Const8  [c]) (Const8  [d]))
  3908  	// cond:
  3909  	// result: (ConstBool [b2i(c < d)])
  3910  	for {
  3911  		v_0 := v.Args[0]
  3912  		if v_0.Op != OpConst8 {
  3913  			break
  3914  		}
  3915  		c := v_0.AuxInt
  3916  		v_1 := v.Args[1]
  3917  		if v_1.Op != OpConst8 {
  3918  			break
  3919  		}
  3920  		d := v_1.AuxInt
  3921  		v.reset(OpConstBool)
  3922  		v.AuxInt = b2i(c < d)
  3923  		return true
  3924  	}
  3925  	return false
  3926  }
  3927  func rewriteValuegeneric_OpLess8U(v *Value, config *Config) bool {
  3928  	b := v.Block
  3929  	_ = b
  3930  	// match: (Less8U  (Const8  [c]) (Const8  [d]))
  3931  	// cond:
  3932  	// result: (ConstBool [b2i(uint8(c)  < uint8(d))])
  3933  	for {
  3934  		v_0 := v.Args[0]
  3935  		if v_0.Op != OpConst8 {
  3936  			break
  3937  		}
  3938  		c := v_0.AuxInt
  3939  		v_1 := v.Args[1]
  3940  		if v_1.Op != OpConst8 {
  3941  			break
  3942  		}
  3943  		d := v_1.AuxInt
  3944  		v.reset(OpConstBool)
  3945  		v.AuxInt = b2i(uint8(c) < uint8(d))
  3946  		return true
  3947  	}
  3948  	return false
  3949  }
  3950  func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool {
  3951  	b := v.Block
  3952  	_ = b
  3953  	// match: (Load <t1> p1 (Store [w] p2 x _))
  3954  	// cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size()
  3955  	// result: x
  3956  	for {
  3957  		t1 := v.Type
  3958  		p1 := v.Args[0]
  3959  		v_1 := v.Args[1]
  3960  		if v_1.Op != OpStore {
  3961  			break
  3962  		}
  3963  		w := v_1.AuxInt
  3964  		p2 := v_1.Args[0]
  3965  		x := v_1.Args[1]
  3966  		if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && w == t1.Size()) {
  3967  			break
  3968  		}
  3969  		v.reset(OpCopy)
  3970  		v.Type = x.Type
  3971  		v.AddArg(x)
  3972  		return true
  3973  	}
  3974  	// match: (Load <t> _ _)
  3975  	// cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)
  3976  	// result: (StructMake0)
  3977  	for {
  3978  		t := v.Type
  3979  		if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) {
  3980  			break
  3981  		}
  3982  		v.reset(OpStructMake0)
  3983  		return true
  3984  	}
  3985  	// match: (Load <t> ptr mem)
  3986  	// cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)
  3987  	// result: (StructMake1     (Load <t.FieldType(0)> ptr mem))
  3988  	for {
  3989  		t := v.Type
  3990  		ptr := v.Args[0]
  3991  		mem := v.Args[1]
  3992  		if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) {
  3993  			break
  3994  		}
  3995  		v.reset(OpStructMake1)
  3996  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
  3997  		v0.AddArg(ptr)
  3998  		v0.AddArg(mem)
  3999  		v.AddArg(v0)
  4000  		return true
  4001  	}
  4002  	// match: (Load <t> ptr mem)
  4003  	// cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)
  4004  	// result: (StructMake2     (Load <t.FieldType(0)> ptr mem)     (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem))
  4005  	for {
  4006  		t := v.Type
  4007  		ptr := v.Args[0]
  4008  		mem := v.Args[1]
  4009  		if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) {
  4010  			break
  4011  		}
  4012  		v.reset(OpStructMake2)
  4013  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
  4014  		v0.AddArg(ptr)
  4015  		v0.AddArg(mem)
  4016  		v.AddArg(v0)
  4017  		v1 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
  4018  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
  4019  		v2.AuxInt = t.FieldOff(1)
  4020  		v2.AddArg(ptr)
  4021  		v1.AddArg(v2)
  4022  		v1.AddArg(mem)
  4023  		v.AddArg(v1)
  4024  		return true
  4025  	}
  4026  	// match: (Load <t> ptr mem)
  4027  	// cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)
  4028  	// result: (StructMake3     (Load <t.FieldType(0)> ptr mem)     (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)     (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem))
  4029  	for {
  4030  		t := v.Type
  4031  		ptr := v.Args[0]
  4032  		mem := v.Args[1]
  4033  		if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) {
  4034  			break
  4035  		}
  4036  		v.reset(OpStructMake3)
  4037  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
  4038  		v0.AddArg(ptr)
  4039  		v0.AddArg(mem)
  4040  		v.AddArg(v0)
  4041  		v1 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
  4042  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
  4043  		v2.AuxInt = t.FieldOff(1)
  4044  		v2.AddArg(ptr)
  4045  		v1.AddArg(v2)
  4046  		v1.AddArg(mem)
  4047  		v.AddArg(v1)
  4048  		v3 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
  4049  		v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
  4050  		v4.AuxInt = t.FieldOff(2)
  4051  		v4.AddArg(ptr)
  4052  		v3.AddArg(v4)
  4053  		v3.AddArg(mem)
  4054  		v.AddArg(v3)
  4055  		return true
  4056  	}
  4057  	// match: (Load <t> ptr mem)
  4058  	// cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)
  4059  	// result: (StructMake4     (Load <t.FieldType(0)> ptr mem)     (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)     (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)     (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem))
  4060  	for {
  4061  		t := v.Type
  4062  		ptr := v.Args[0]
  4063  		mem := v.Args[1]
  4064  		if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) {
  4065  			break
  4066  		}
  4067  		v.reset(OpStructMake4)
  4068  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
  4069  		v0.AddArg(ptr)
  4070  		v0.AddArg(mem)
  4071  		v.AddArg(v0)
  4072  		v1 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
  4073  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
  4074  		v2.AuxInt = t.FieldOff(1)
  4075  		v2.AddArg(ptr)
  4076  		v1.AddArg(v2)
  4077  		v1.AddArg(mem)
  4078  		v.AddArg(v1)
  4079  		v3 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
  4080  		v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
  4081  		v4.AuxInt = t.FieldOff(2)
  4082  		v4.AddArg(ptr)
  4083  		v3.AddArg(v4)
  4084  		v3.AddArg(mem)
  4085  		v.AddArg(v3)
  4086  		v5 := b.NewValue0(v.Pos, OpLoad, t.FieldType(3))
  4087  		v6 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
  4088  		v6.AuxInt = t.FieldOff(3)
  4089  		v6.AddArg(ptr)
  4090  		v5.AddArg(v6)
  4091  		v5.AddArg(mem)
  4092  		v.AddArg(v5)
  4093  		return true
  4094  	}
  4095  	// match: (Load <t> _ _)
  4096  	// cond: t.IsArray() && t.NumElem() == 0
  4097  	// result: (ArrayMake0)
  4098  	for {
  4099  		t := v.Type
  4100  		if !(t.IsArray() && t.NumElem() == 0) {
  4101  			break
  4102  		}
  4103  		v.reset(OpArrayMake0)
  4104  		return true
  4105  	}
  4106  	// match: (Load <t> ptr mem)
  4107  	// cond: t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)
  4108  	// result: (ArrayMake1 (Load <t.ElemType()> ptr mem))
  4109  	for {
  4110  		t := v.Type
  4111  		ptr := v.Args[0]
  4112  		mem := v.Args[1]
  4113  		if !(t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)) {
  4114  			break
  4115  		}
  4116  		v.reset(OpArrayMake1)
  4117  		v0 := b.NewValue0(v.Pos, OpLoad, t.ElemType())
  4118  		v0.AddArg(ptr)
  4119  		v0.AddArg(mem)
  4120  		v.AddArg(v0)
  4121  		return true
  4122  	}
  4123  	return false
  4124  }
  4125  func rewriteValuegeneric_OpLsh16x16(v *Value, config *Config) bool {
  4126  	b := v.Block
  4127  	_ = b
  4128  	// match: (Lsh16x16  <t> x (Const16 [c]))
  4129  	// cond:
  4130  	// result: (Lsh16x64  x (Const64 <t> [int64(uint16(c))]))
  4131  	for {
  4132  		t := v.Type
  4133  		x := v.Args[0]
  4134  		v_1 := v.Args[1]
  4135  		if v_1.Op != OpConst16 {
  4136  			break
  4137  		}
  4138  		c := v_1.AuxInt
  4139  		v.reset(OpLsh16x64)
  4140  		v.AddArg(x)
  4141  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4142  		v0.AuxInt = int64(uint16(c))
  4143  		v.AddArg(v0)
  4144  		return true
  4145  	}
  4146  	// match: (Lsh16x16  (Const16 [0]) _)
  4147  	// cond:
  4148  	// result: (Const16 [0])
  4149  	for {
  4150  		v_0 := v.Args[0]
  4151  		if v_0.Op != OpConst16 {
  4152  			break
  4153  		}
  4154  		if v_0.AuxInt != 0 {
  4155  			break
  4156  		}
  4157  		v.reset(OpConst16)
  4158  		v.AuxInt = 0
  4159  		return true
  4160  	}
  4161  	return false
  4162  }
  4163  func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool {
  4164  	b := v.Block
  4165  	_ = b
  4166  	// match: (Lsh16x32  <t> x (Const32 [c]))
  4167  	// cond:
  4168  	// result: (Lsh16x64  x (Const64 <t> [int64(uint32(c))]))
  4169  	for {
  4170  		t := v.Type
  4171  		x := v.Args[0]
  4172  		v_1 := v.Args[1]
  4173  		if v_1.Op != OpConst32 {
  4174  			break
  4175  		}
  4176  		c := v_1.AuxInt
  4177  		v.reset(OpLsh16x64)
  4178  		v.AddArg(x)
  4179  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4180  		v0.AuxInt = int64(uint32(c))
  4181  		v.AddArg(v0)
  4182  		return true
  4183  	}
  4184  	// match: (Lsh16x32  (Const16 [0]) _)
  4185  	// cond:
  4186  	// result: (Const16 [0])
  4187  	for {
  4188  		v_0 := v.Args[0]
  4189  		if v_0.Op != OpConst16 {
  4190  			break
  4191  		}
  4192  		if v_0.AuxInt != 0 {
  4193  			break
  4194  		}
  4195  		v.reset(OpConst16)
  4196  		v.AuxInt = 0
  4197  		return true
  4198  	}
  4199  	return false
  4200  }
  4201  func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool {
  4202  	b := v.Block
  4203  	_ = b
  4204  	// match: (Lsh16x64  (Const16 [c]) (Const64 [d]))
  4205  	// cond:
  4206  	// result: (Const16 [int64(int16(c) << uint64(d))])
  4207  	for {
  4208  		v_0 := v.Args[0]
  4209  		if v_0.Op != OpConst16 {
  4210  			break
  4211  		}
  4212  		c := v_0.AuxInt
  4213  		v_1 := v.Args[1]
  4214  		if v_1.Op != OpConst64 {
  4215  			break
  4216  		}
  4217  		d := v_1.AuxInt
  4218  		v.reset(OpConst16)
  4219  		v.AuxInt = int64(int16(c) << uint64(d))
  4220  		return true
  4221  	}
  4222  	// match: (Lsh16x64  x (Const64 [0]))
  4223  	// cond:
  4224  	// result: x
  4225  	for {
  4226  		x := v.Args[0]
  4227  		v_1 := v.Args[1]
  4228  		if v_1.Op != OpConst64 {
  4229  			break
  4230  		}
  4231  		if v_1.AuxInt != 0 {
  4232  			break
  4233  		}
  4234  		v.reset(OpCopy)
  4235  		v.Type = x.Type
  4236  		v.AddArg(x)
  4237  		return true
  4238  	}
  4239  	// match: (Lsh16x64  (Const16 [0]) _)
  4240  	// cond:
  4241  	// result: (Const16 [0])
  4242  	for {
  4243  		v_0 := v.Args[0]
  4244  		if v_0.Op != OpConst16 {
  4245  			break
  4246  		}
  4247  		if v_0.AuxInt != 0 {
  4248  			break
  4249  		}
  4250  		v.reset(OpConst16)
  4251  		v.AuxInt = 0
  4252  		return true
  4253  	}
  4254  	// match: (Lsh16x64  _ (Const64 [c]))
  4255  	// cond: uint64(c) >= 16
  4256  	// result: (Const16 [0])
  4257  	for {
  4258  		v_1 := v.Args[1]
  4259  		if v_1.Op != OpConst64 {
  4260  			break
  4261  		}
  4262  		c := v_1.AuxInt
  4263  		if !(uint64(c) >= 16) {
  4264  			break
  4265  		}
  4266  		v.reset(OpConst16)
  4267  		v.AuxInt = 0
  4268  		return true
  4269  	}
  4270  	// match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d]))
  4271  	// cond: !uaddOvf(c,d)
  4272  	// result: (Lsh16x64 x (Const64 <t> [c+d]))
  4273  	for {
  4274  		t := v.Type
  4275  		v_0 := v.Args[0]
  4276  		if v_0.Op != OpLsh16x64 {
  4277  			break
  4278  		}
  4279  		x := v_0.Args[0]
  4280  		v_0_1 := v_0.Args[1]
  4281  		if v_0_1.Op != OpConst64 {
  4282  			break
  4283  		}
  4284  		c := v_0_1.AuxInt
  4285  		v_1 := v.Args[1]
  4286  		if v_1.Op != OpConst64 {
  4287  			break
  4288  		}
  4289  		d := v_1.AuxInt
  4290  		if !(!uaddOvf(c, d)) {
  4291  			break
  4292  		}
  4293  		v.reset(OpLsh16x64)
  4294  		v.AddArg(x)
  4295  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4296  		v0.AuxInt = c + d
  4297  		v.AddArg(v0)
  4298  		return true
  4299  	}
  4300  	// match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  4301  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  4302  	// result: (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  4303  	for {
  4304  		v_0 := v.Args[0]
  4305  		if v_0.Op != OpRsh16Ux64 {
  4306  			break
  4307  		}
  4308  		v_0_0 := v_0.Args[0]
  4309  		if v_0_0.Op != OpLsh16x64 {
  4310  			break
  4311  		}
  4312  		x := v_0_0.Args[0]
  4313  		v_0_0_1 := v_0_0.Args[1]
  4314  		if v_0_0_1.Op != OpConst64 {
  4315  			break
  4316  		}
  4317  		c1 := v_0_0_1.AuxInt
  4318  		v_0_1 := v_0.Args[1]
  4319  		if v_0_1.Op != OpConst64 {
  4320  			break
  4321  		}
  4322  		c2 := v_0_1.AuxInt
  4323  		v_1 := v.Args[1]
  4324  		if v_1.Op != OpConst64 {
  4325  			break
  4326  		}
  4327  		c3 := v_1.AuxInt
  4328  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  4329  			break
  4330  		}
  4331  		v.reset(OpLsh16x64)
  4332  		v.AddArg(x)
  4333  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  4334  		v0.AuxInt = c1 - c2 + c3
  4335  		v.AddArg(v0)
  4336  		return true
  4337  	}
  4338  	return false
  4339  }
  4340  func rewriteValuegeneric_OpLsh16x8(v *Value, config *Config) bool {
  4341  	b := v.Block
  4342  	_ = b
  4343  	// match: (Lsh16x8   <t> x (Const8  [c]))
  4344  	// cond:
  4345  	// result: (Lsh16x64  x (Const64 <t> [int64(uint8(c))]))
  4346  	for {
  4347  		t := v.Type
  4348  		x := v.Args[0]
  4349  		v_1 := v.Args[1]
  4350  		if v_1.Op != OpConst8 {
  4351  			break
  4352  		}
  4353  		c := v_1.AuxInt
  4354  		v.reset(OpLsh16x64)
  4355  		v.AddArg(x)
  4356  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4357  		v0.AuxInt = int64(uint8(c))
  4358  		v.AddArg(v0)
  4359  		return true
  4360  	}
  4361  	// match: (Lsh16x8  (Const16 [0]) _)
  4362  	// cond:
  4363  	// result: (Const16 [0])
  4364  	for {
  4365  		v_0 := v.Args[0]
  4366  		if v_0.Op != OpConst16 {
  4367  			break
  4368  		}
  4369  		if v_0.AuxInt != 0 {
  4370  			break
  4371  		}
  4372  		v.reset(OpConst16)
  4373  		v.AuxInt = 0
  4374  		return true
  4375  	}
  4376  	return false
  4377  }
  4378  func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool {
  4379  	b := v.Block
  4380  	_ = b
  4381  	// match: (Lsh32x16  <t> x (Const16 [c]))
  4382  	// cond:
  4383  	// result: (Lsh32x64  x (Const64 <t> [int64(uint16(c))]))
  4384  	for {
  4385  		t := v.Type
  4386  		x := v.Args[0]
  4387  		v_1 := v.Args[1]
  4388  		if v_1.Op != OpConst16 {
  4389  			break
  4390  		}
  4391  		c := v_1.AuxInt
  4392  		v.reset(OpLsh32x64)
  4393  		v.AddArg(x)
  4394  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4395  		v0.AuxInt = int64(uint16(c))
  4396  		v.AddArg(v0)
  4397  		return true
  4398  	}
  4399  	// match: (Lsh32x16  (Const32 [0]) _)
  4400  	// cond:
  4401  	// result: (Const32 [0])
  4402  	for {
  4403  		v_0 := v.Args[0]
  4404  		if v_0.Op != OpConst32 {
  4405  			break
  4406  		}
  4407  		if v_0.AuxInt != 0 {
  4408  			break
  4409  		}
  4410  		v.reset(OpConst32)
  4411  		v.AuxInt = 0
  4412  		return true
  4413  	}
  4414  	return false
  4415  }
  4416  func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool {
  4417  	b := v.Block
  4418  	_ = b
  4419  	// match: (Lsh32x32  <t> x (Const32 [c]))
  4420  	// cond:
  4421  	// result: (Lsh32x64  x (Const64 <t> [int64(uint32(c))]))
  4422  	for {
  4423  		t := v.Type
  4424  		x := v.Args[0]
  4425  		v_1 := v.Args[1]
  4426  		if v_1.Op != OpConst32 {
  4427  			break
  4428  		}
  4429  		c := v_1.AuxInt
  4430  		v.reset(OpLsh32x64)
  4431  		v.AddArg(x)
  4432  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4433  		v0.AuxInt = int64(uint32(c))
  4434  		v.AddArg(v0)
  4435  		return true
  4436  	}
  4437  	// match: (Lsh32x32  (Const32 [0]) _)
  4438  	// cond:
  4439  	// result: (Const32 [0])
  4440  	for {
  4441  		v_0 := v.Args[0]
  4442  		if v_0.Op != OpConst32 {
  4443  			break
  4444  		}
  4445  		if v_0.AuxInt != 0 {
  4446  			break
  4447  		}
  4448  		v.reset(OpConst32)
  4449  		v.AuxInt = 0
  4450  		return true
  4451  	}
  4452  	return false
  4453  }
  4454  func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool {
  4455  	b := v.Block
  4456  	_ = b
  4457  	// match: (Lsh32x64  (Const32 [c]) (Const64 [d]))
  4458  	// cond:
  4459  	// result: (Const32 [int64(int32(c) << uint64(d))])
  4460  	for {
  4461  		v_0 := v.Args[0]
  4462  		if v_0.Op != OpConst32 {
  4463  			break
  4464  		}
  4465  		c := v_0.AuxInt
  4466  		v_1 := v.Args[1]
  4467  		if v_1.Op != OpConst64 {
  4468  			break
  4469  		}
  4470  		d := v_1.AuxInt
  4471  		v.reset(OpConst32)
  4472  		v.AuxInt = int64(int32(c) << uint64(d))
  4473  		return true
  4474  	}
  4475  	// match: (Lsh32x64  x (Const64 [0]))
  4476  	// cond:
  4477  	// result: x
  4478  	for {
  4479  		x := v.Args[0]
  4480  		v_1 := v.Args[1]
  4481  		if v_1.Op != OpConst64 {
  4482  			break
  4483  		}
  4484  		if v_1.AuxInt != 0 {
  4485  			break
  4486  		}
  4487  		v.reset(OpCopy)
  4488  		v.Type = x.Type
  4489  		v.AddArg(x)
  4490  		return true
  4491  	}
  4492  	// match: (Lsh32x64  (Const32 [0]) _)
  4493  	// cond:
  4494  	// result: (Const32 [0])
  4495  	for {
  4496  		v_0 := v.Args[0]
  4497  		if v_0.Op != OpConst32 {
  4498  			break
  4499  		}
  4500  		if v_0.AuxInt != 0 {
  4501  			break
  4502  		}
  4503  		v.reset(OpConst32)
  4504  		v.AuxInt = 0
  4505  		return true
  4506  	}
  4507  	// match: (Lsh32x64  _ (Const64 [c]))
  4508  	// cond: uint64(c) >= 32
  4509  	// result: (Const32 [0])
  4510  	for {
  4511  		v_1 := v.Args[1]
  4512  		if v_1.Op != OpConst64 {
  4513  			break
  4514  		}
  4515  		c := v_1.AuxInt
  4516  		if !(uint64(c) >= 32) {
  4517  			break
  4518  		}
  4519  		v.reset(OpConst32)
  4520  		v.AuxInt = 0
  4521  		return true
  4522  	}
  4523  	// match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d]))
  4524  	// cond: !uaddOvf(c,d)
  4525  	// result: (Lsh32x64 x (Const64 <t> [c+d]))
  4526  	for {
  4527  		t := v.Type
  4528  		v_0 := v.Args[0]
  4529  		if v_0.Op != OpLsh32x64 {
  4530  			break
  4531  		}
  4532  		x := v_0.Args[0]
  4533  		v_0_1 := v_0.Args[1]
  4534  		if v_0_1.Op != OpConst64 {
  4535  			break
  4536  		}
  4537  		c := v_0_1.AuxInt
  4538  		v_1 := v.Args[1]
  4539  		if v_1.Op != OpConst64 {
  4540  			break
  4541  		}
  4542  		d := v_1.AuxInt
  4543  		if !(!uaddOvf(c, d)) {
  4544  			break
  4545  		}
  4546  		v.reset(OpLsh32x64)
  4547  		v.AddArg(x)
  4548  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4549  		v0.AuxInt = c + d
  4550  		v.AddArg(v0)
  4551  		return true
  4552  	}
  4553  	// match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  4554  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  4555  	// result: (Lsh32x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  4556  	for {
  4557  		v_0 := v.Args[0]
  4558  		if v_0.Op != OpRsh32Ux64 {
  4559  			break
  4560  		}
  4561  		v_0_0 := v_0.Args[0]
  4562  		if v_0_0.Op != OpLsh32x64 {
  4563  			break
  4564  		}
  4565  		x := v_0_0.Args[0]
  4566  		v_0_0_1 := v_0_0.Args[1]
  4567  		if v_0_0_1.Op != OpConst64 {
  4568  			break
  4569  		}
  4570  		c1 := v_0_0_1.AuxInt
  4571  		v_0_1 := v_0.Args[1]
  4572  		if v_0_1.Op != OpConst64 {
  4573  			break
  4574  		}
  4575  		c2 := v_0_1.AuxInt
  4576  		v_1 := v.Args[1]
  4577  		if v_1.Op != OpConst64 {
  4578  			break
  4579  		}
  4580  		c3 := v_1.AuxInt
  4581  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  4582  			break
  4583  		}
  4584  		v.reset(OpLsh32x64)
  4585  		v.AddArg(x)
  4586  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  4587  		v0.AuxInt = c1 - c2 + c3
  4588  		v.AddArg(v0)
  4589  		return true
  4590  	}
  4591  	return false
  4592  }
  4593  func rewriteValuegeneric_OpLsh32x8(v *Value, config *Config) bool {
  4594  	b := v.Block
  4595  	_ = b
  4596  	// match: (Lsh32x8   <t> x (Const8  [c]))
  4597  	// cond:
  4598  	// result: (Lsh32x64  x (Const64 <t> [int64(uint8(c))]))
  4599  	for {
  4600  		t := v.Type
  4601  		x := v.Args[0]
  4602  		v_1 := v.Args[1]
  4603  		if v_1.Op != OpConst8 {
  4604  			break
  4605  		}
  4606  		c := v_1.AuxInt
  4607  		v.reset(OpLsh32x64)
  4608  		v.AddArg(x)
  4609  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4610  		v0.AuxInt = int64(uint8(c))
  4611  		v.AddArg(v0)
  4612  		return true
  4613  	}
  4614  	// match: (Lsh32x8  (Const32 [0]) _)
  4615  	// cond:
  4616  	// result: (Const32 [0])
  4617  	for {
  4618  		v_0 := v.Args[0]
  4619  		if v_0.Op != OpConst32 {
  4620  			break
  4621  		}
  4622  		if v_0.AuxInt != 0 {
  4623  			break
  4624  		}
  4625  		v.reset(OpConst32)
  4626  		v.AuxInt = 0
  4627  		return true
  4628  	}
  4629  	return false
  4630  }
  4631  func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool {
  4632  	b := v.Block
  4633  	_ = b
  4634  	// match: (Lsh64x16  <t> x (Const16 [c]))
  4635  	// cond:
  4636  	// result: (Lsh64x64  x (Const64 <t> [int64(uint16(c))]))
  4637  	for {
  4638  		t := v.Type
  4639  		x := v.Args[0]
  4640  		v_1 := v.Args[1]
  4641  		if v_1.Op != OpConst16 {
  4642  			break
  4643  		}
  4644  		c := v_1.AuxInt
  4645  		v.reset(OpLsh64x64)
  4646  		v.AddArg(x)
  4647  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4648  		v0.AuxInt = int64(uint16(c))
  4649  		v.AddArg(v0)
  4650  		return true
  4651  	}
  4652  	// match: (Lsh64x16  (Const64 [0]) _)
  4653  	// cond:
  4654  	// result: (Const64 [0])
  4655  	for {
  4656  		v_0 := v.Args[0]
  4657  		if v_0.Op != OpConst64 {
  4658  			break
  4659  		}
  4660  		if v_0.AuxInt != 0 {
  4661  			break
  4662  		}
  4663  		v.reset(OpConst64)
  4664  		v.AuxInt = 0
  4665  		return true
  4666  	}
  4667  	return false
  4668  }
  4669  func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool {
  4670  	b := v.Block
  4671  	_ = b
  4672  	// match: (Lsh64x32  <t> x (Const32 [c]))
  4673  	// cond:
  4674  	// result: (Lsh64x64  x (Const64 <t> [int64(uint32(c))]))
  4675  	for {
  4676  		t := v.Type
  4677  		x := v.Args[0]
  4678  		v_1 := v.Args[1]
  4679  		if v_1.Op != OpConst32 {
  4680  			break
  4681  		}
  4682  		c := v_1.AuxInt
  4683  		v.reset(OpLsh64x64)
  4684  		v.AddArg(x)
  4685  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4686  		v0.AuxInt = int64(uint32(c))
  4687  		v.AddArg(v0)
  4688  		return true
  4689  	}
  4690  	// match: (Lsh64x32  (Const64 [0]) _)
  4691  	// cond:
  4692  	// result: (Const64 [0])
  4693  	for {
  4694  		v_0 := v.Args[0]
  4695  		if v_0.Op != OpConst64 {
  4696  			break
  4697  		}
  4698  		if v_0.AuxInt != 0 {
  4699  			break
  4700  		}
  4701  		v.reset(OpConst64)
  4702  		v.AuxInt = 0
  4703  		return true
  4704  	}
  4705  	return false
  4706  }
  4707  func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool {
  4708  	b := v.Block
  4709  	_ = b
  4710  	// match: (Lsh64x64  (Const64 [c]) (Const64 [d]))
  4711  	// cond:
  4712  	// result: (Const64 [c << uint64(d)])
  4713  	for {
  4714  		v_0 := v.Args[0]
  4715  		if v_0.Op != OpConst64 {
  4716  			break
  4717  		}
  4718  		c := v_0.AuxInt
  4719  		v_1 := v.Args[1]
  4720  		if v_1.Op != OpConst64 {
  4721  			break
  4722  		}
  4723  		d := v_1.AuxInt
  4724  		v.reset(OpConst64)
  4725  		v.AuxInt = c << uint64(d)
  4726  		return true
  4727  	}
  4728  	// match: (Lsh64x64  x (Const64 [0]))
  4729  	// cond:
  4730  	// result: x
  4731  	for {
  4732  		x := v.Args[0]
  4733  		v_1 := v.Args[1]
  4734  		if v_1.Op != OpConst64 {
  4735  			break
  4736  		}
  4737  		if v_1.AuxInt != 0 {
  4738  			break
  4739  		}
  4740  		v.reset(OpCopy)
  4741  		v.Type = x.Type
  4742  		v.AddArg(x)
  4743  		return true
  4744  	}
  4745  	// match: (Lsh64x64  (Const64 [0]) _)
  4746  	// cond:
  4747  	// result: (Const64 [0])
  4748  	for {
  4749  		v_0 := v.Args[0]
  4750  		if v_0.Op != OpConst64 {
  4751  			break
  4752  		}
  4753  		if v_0.AuxInt != 0 {
  4754  			break
  4755  		}
  4756  		v.reset(OpConst64)
  4757  		v.AuxInt = 0
  4758  		return true
  4759  	}
  4760  	// match: (Lsh64x64  _ (Const64 [c]))
  4761  	// cond: uint64(c) >= 64
  4762  	// result: (Const64 [0])
  4763  	for {
  4764  		v_1 := v.Args[1]
  4765  		if v_1.Op != OpConst64 {
  4766  			break
  4767  		}
  4768  		c := v_1.AuxInt
  4769  		if !(uint64(c) >= 64) {
  4770  			break
  4771  		}
  4772  		v.reset(OpConst64)
  4773  		v.AuxInt = 0
  4774  		return true
  4775  	}
  4776  	// match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d]))
  4777  	// cond: !uaddOvf(c,d)
  4778  	// result: (Lsh64x64 x (Const64 <t> [c+d]))
  4779  	for {
  4780  		t := v.Type
  4781  		v_0 := v.Args[0]
  4782  		if v_0.Op != OpLsh64x64 {
  4783  			break
  4784  		}
  4785  		x := v_0.Args[0]
  4786  		v_0_1 := v_0.Args[1]
  4787  		if v_0_1.Op != OpConst64 {
  4788  			break
  4789  		}
  4790  		c := v_0_1.AuxInt
  4791  		v_1 := v.Args[1]
  4792  		if v_1.Op != OpConst64 {
  4793  			break
  4794  		}
  4795  		d := v_1.AuxInt
  4796  		if !(!uaddOvf(c, d)) {
  4797  			break
  4798  		}
  4799  		v.reset(OpLsh64x64)
  4800  		v.AddArg(x)
  4801  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4802  		v0.AuxInt = c + d
  4803  		v.AddArg(v0)
  4804  		return true
  4805  	}
  4806  	// match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  4807  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  4808  	// result: (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  4809  	for {
  4810  		v_0 := v.Args[0]
  4811  		if v_0.Op != OpRsh64Ux64 {
  4812  			break
  4813  		}
  4814  		v_0_0 := v_0.Args[0]
  4815  		if v_0_0.Op != OpLsh64x64 {
  4816  			break
  4817  		}
  4818  		x := v_0_0.Args[0]
  4819  		v_0_0_1 := v_0_0.Args[1]
  4820  		if v_0_0_1.Op != OpConst64 {
  4821  			break
  4822  		}
  4823  		c1 := v_0_0_1.AuxInt
  4824  		v_0_1 := v_0.Args[1]
  4825  		if v_0_1.Op != OpConst64 {
  4826  			break
  4827  		}
  4828  		c2 := v_0_1.AuxInt
  4829  		v_1 := v.Args[1]
  4830  		if v_1.Op != OpConst64 {
  4831  			break
  4832  		}
  4833  		c3 := v_1.AuxInt
  4834  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  4835  			break
  4836  		}
  4837  		v.reset(OpLsh64x64)
  4838  		v.AddArg(x)
  4839  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  4840  		v0.AuxInt = c1 - c2 + c3
  4841  		v.AddArg(v0)
  4842  		return true
  4843  	}
  4844  	return false
  4845  }
  4846  func rewriteValuegeneric_OpLsh64x8(v *Value, config *Config) bool {
  4847  	b := v.Block
  4848  	_ = b
  4849  	// match: (Lsh64x8   <t> x (Const8  [c]))
  4850  	// cond:
  4851  	// result: (Lsh64x64  x (Const64 <t> [int64(uint8(c))]))
  4852  	for {
  4853  		t := v.Type
  4854  		x := v.Args[0]
  4855  		v_1 := v.Args[1]
  4856  		if v_1.Op != OpConst8 {
  4857  			break
  4858  		}
  4859  		c := v_1.AuxInt
  4860  		v.reset(OpLsh64x64)
  4861  		v.AddArg(x)
  4862  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4863  		v0.AuxInt = int64(uint8(c))
  4864  		v.AddArg(v0)
  4865  		return true
  4866  	}
  4867  	// match: (Lsh64x8  (Const64 [0]) _)
  4868  	// cond:
  4869  	// result: (Const64 [0])
  4870  	for {
  4871  		v_0 := v.Args[0]
  4872  		if v_0.Op != OpConst64 {
  4873  			break
  4874  		}
  4875  		if v_0.AuxInt != 0 {
  4876  			break
  4877  		}
  4878  		v.reset(OpConst64)
  4879  		v.AuxInt = 0
  4880  		return true
  4881  	}
  4882  	return false
  4883  }
  4884  func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool {
  4885  	b := v.Block
  4886  	_ = b
  4887  	// match: (Lsh8x16  <t> x (Const16 [c]))
  4888  	// cond:
  4889  	// result: (Lsh8x64  x (Const64 <t> [int64(uint16(c))]))
  4890  	for {
  4891  		t := v.Type
  4892  		x := v.Args[0]
  4893  		v_1 := v.Args[1]
  4894  		if v_1.Op != OpConst16 {
  4895  			break
  4896  		}
  4897  		c := v_1.AuxInt
  4898  		v.reset(OpLsh8x64)
  4899  		v.AddArg(x)
  4900  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4901  		v0.AuxInt = int64(uint16(c))
  4902  		v.AddArg(v0)
  4903  		return true
  4904  	}
  4905  	// match: (Lsh8x16   (Const8 [0]) _)
  4906  	// cond:
  4907  	// result: (Const8  [0])
  4908  	for {
  4909  		v_0 := v.Args[0]
  4910  		if v_0.Op != OpConst8 {
  4911  			break
  4912  		}
  4913  		if v_0.AuxInt != 0 {
  4914  			break
  4915  		}
  4916  		v.reset(OpConst8)
  4917  		v.AuxInt = 0
  4918  		return true
  4919  	}
  4920  	return false
  4921  }
  4922  func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool {
  4923  	b := v.Block
  4924  	_ = b
  4925  	// match: (Lsh8x32  <t> x (Const32 [c]))
  4926  	// cond:
  4927  	// result: (Lsh8x64  x (Const64 <t> [int64(uint32(c))]))
  4928  	for {
  4929  		t := v.Type
  4930  		x := v.Args[0]
  4931  		v_1 := v.Args[1]
  4932  		if v_1.Op != OpConst32 {
  4933  			break
  4934  		}
  4935  		c := v_1.AuxInt
  4936  		v.reset(OpLsh8x64)
  4937  		v.AddArg(x)
  4938  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4939  		v0.AuxInt = int64(uint32(c))
  4940  		v.AddArg(v0)
  4941  		return true
  4942  	}
  4943  	// match: (Lsh8x32   (Const8 [0]) _)
  4944  	// cond:
  4945  	// result: (Const8  [0])
  4946  	for {
  4947  		v_0 := v.Args[0]
  4948  		if v_0.Op != OpConst8 {
  4949  			break
  4950  		}
  4951  		if v_0.AuxInt != 0 {
  4952  			break
  4953  		}
  4954  		v.reset(OpConst8)
  4955  		v.AuxInt = 0
  4956  		return true
  4957  	}
  4958  	return false
  4959  }
  4960  func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool {
  4961  	b := v.Block
  4962  	_ = b
  4963  	// match: (Lsh8x64   (Const8  [c]) (Const64 [d]))
  4964  	// cond:
  4965  	// result: (Const8  [int64(int8(c) << uint64(d))])
  4966  	for {
  4967  		v_0 := v.Args[0]
  4968  		if v_0.Op != OpConst8 {
  4969  			break
  4970  		}
  4971  		c := v_0.AuxInt
  4972  		v_1 := v.Args[1]
  4973  		if v_1.Op != OpConst64 {
  4974  			break
  4975  		}
  4976  		d := v_1.AuxInt
  4977  		v.reset(OpConst8)
  4978  		v.AuxInt = int64(int8(c) << uint64(d))
  4979  		return true
  4980  	}
  4981  	// match: (Lsh8x64   x (Const64 [0]))
  4982  	// cond:
  4983  	// result: x
  4984  	for {
  4985  		x := v.Args[0]
  4986  		v_1 := v.Args[1]
  4987  		if v_1.Op != OpConst64 {
  4988  			break
  4989  		}
  4990  		if v_1.AuxInt != 0 {
  4991  			break
  4992  		}
  4993  		v.reset(OpCopy)
  4994  		v.Type = x.Type
  4995  		v.AddArg(x)
  4996  		return true
  4997  	}
  4998  	// match: (Lsh8x64   (Const8 [0]) _)
  4999  	// cond:
  5000  	// result: (Const8  [0])
  5001  	for {
  5002  		v_0 := v.Args[0]
  5003  		if v_0.Op != OpConst8 {
  5004  			break
  5005  		}
  5006  		if v_0.AuxInt != 0 {
  5007  			break
  5008  		}
  5009  		v.reset(OpConst8)
  5010  		v.AuxInt = 0
  5011  		return true
  5012  	}
  5013  	// match: (Lsh8x64   _ (Const64 [c]))
  5014  	// cond: uint64(c) >= 8
  5015  	// result: (Const8  [0])
  5016  	for {
  5017  		v_1 := v.Args[1]
  5018  		if v_1.Op != OpConst64 {
  5019  			break
  5020  		}
  5021  		c := v_1.AuxInt
  5022  		if !(uint64(c) >= 8) {
  5023  			break
  5024  		}
  5025  		v.reset(OpConst8)
  5026  		v.AuxInt = 0
  5027  		return true
  5028  	}
  5029  	// match: (Lsh8x64  <t> (Lsh8x64  x (Const64 [c])) (Const64 [d]))
  5030  	// cond: !uaddOvf(c,d)
  5031  	// result: (Lsh8x64  x (Const64 <t> [c+d]))
  5032  	for {
  5033  		t := v.Type
  5034  		v_0 := v.Args[0]
  5035  		if v_0.Op != OpLsh8x64 {
  5036  			break
  5037  		}
  5038  		x := v_0.Args[0]
  5039  		v_0_1 := v_0.Args[1]
  5040  		if v_0_1.Op != OpConst64 {
  5041  			break
  5042  		}
  5043  		c := v_0_1.AuxInt
  5044  		v_1 := v.Args[1]
  5045  		if v_1.Op != OpConst64 {
  5046  			break
  5047  		}
  5048  		d := v_1.AuxInt
  5049  		if !(!uaddOvf(c, d)) {
  5050  			break
  5051  		}
  5052  		v.reset(OpLsh8x64)
  5053  		v.AddArg(x)
  5054  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5055  		v0.AuxInt = c + d
  5056  		v.AddArg(v0)
  5057  		return true
  5058  	}
  5059  	// match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  5060  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  5061  	// result: (Lsh8x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  5062  	for {
  5063  		v_0 := v.Args[0]
  5064  		if v_0.Op != OpRsh8Ux64 {
  5065  			break
  5066  		}
  5067  		v_0_0 := v_0.Args[0]
  5068  		if v_0_0.Op != OpLsh8x64 {
  5069  			break
  5070  		}
  5071  		x := v_0_0.Args[0]
  5072  		v_0_0_1 := v_0_0.Args[1]
  5073  		if v_0_0_1.Op != OpConst64 {
  5074  			break
  5075  		}
  5076  		c1 := v_0_0_1.AuxInt
  5077  		v_0_1 := v_0.Args[1]
  5078  		if v_0_1.Op != OpConst64 {
  5079  			break
  5080  		}
  5081  		c2 := v_0_1.AuxInt
  5082  		v_1 := v.Args[1]
  5083  		if v_1.Op != OpConst64 {
  5084  			break
  5085  		}
  5086  		c3 := v_1.AuxInt
  5087  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  5088  			break
  5089  		}
  5090  		v.reset(OpLsh8x64)
  5091  		v.AddArg(x)
  5092  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  5093  		v0.AuxInt = c1 - c2 + c3
  5094  		v.AddArg(v0)
  5095  		return true
  5096  	}
  5097  	return false
  5098  }
  5099  func rewriteValuegeneric_OpLsh8x8(v *Value, config *Config) bool {
  5100  	b := v.Block
  5101  	_ = b
  5102  	// match: (Lsh8x8   <t> x (Const8  [c]))
  5103  	// cond:
  5104  	// result: (Lsh8x64  x (Const64 <t> [int64(uint8(c))]))
  5105  	for {
  5106  		t := v.Type
  5107  		x := v.Args[0]
  5108  		v_1 := v.Args[1]
  5109  		if v_1.Op != OpConst8 {
  5110  			break
  5111  		}
  5112  		c := v_1.AuxInt
  5113  		v.reset(OpLsh8x64)
  5114  		v.AddArg(x)
  5115  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5116  		v0.AuxInt = int64(uint8(c))
  5117  		v.AddArg(v0)
  5118  		return true
  5119  	}
  5120  	// match: (Lsh8x8   (Const8 [0]) _)
  5121  	// cond:
  5122  	// result: (Const8  [0])
  5123  	for {
  5124  		v_0 := v.Args[0]
  5125  		if v_0.Op != OpConst8 {
  5126  			break
  5127  		}
  5128  		if v_0.AuxInt != 0 {
  5129  			break
  5130  		}
  5131  		v.reset(OpConst8)
  5132  		v.AuxInt = 0
  5133  		return true
  5134  	}
  5135  	return false
  5136  }
  5137  func rewriteValuegeneric_OpMod16(v *Value, config *Config) bool {
  5138  	b := v.Block
  5139  	_ = b
  5140  	// match: (Mod16 (Const16 [c]) (Const16 [d]))
  5141  	// cond: d != 0
  5142  	// result: (Const16 [int64(int16(c % d))])
  5143  	for {
  5144  		v_0 := v.Args[0]
  5145  		if v_0.Op != OpConst16 {
  5146  			break
  5147  		}
  5148  		c := v_0.AuxInt
  5149  		v_1 := v.Args[1]
  5150  		if v_1.Op != OpConst16 {
  5151  			break
  5152  		}
  5153  		d := v_1.AuxInt
  5154  		if !(d != 0) {
  5155  			break
  5156  		}
  5157  		v.reset(OpConst16)
  5158  		v.AuxInt = int64(int16(c % d))
  5159  		return true
  5160  	}
  5161  	return false
  5162  }
  5163  func rewriteValuegeneric_OpMod16u(v *Value, config *Config) bool {
  5164  	b := v.Block
  5165  	_ = b
  5166  	// match: (Mod16u (Const16 [c]) (Const16 [d]))
  5167  	// cond: d != 0
  5168  	// result: (Const16 [int64(uint16(c) % uint16(d))])
  5169  	for {
  5170  		v_0 := v.Args[0]
  5171  		if v_0.Op != OpConst16 {
  5172  			break
  5173  		}
  5174  		c := v_0.AuxInt
  5175  		v_1 := v.Args[1]
  5176  		if v_1.Op != OpConst16 {
  5177  			break
  5178  		}
  5179  		d := v_1.AuxInt
  5180  		if !(d != 0) {
  5181  			break
  5182  		}
  5183  		v.reset(OpConst16)
  5184  		v.AuxInt = int64(uint16(c) % uint16(d))
  5185  		return true
  5186  	}
  5187  	return false
  5188  }
  5189  func rewriteValuegeneric_OpMod32(v *Value, config *Config) bool {
  5190  	b := v.Block
  5191  	_ = b
  5192  	// match: (Mod32 (Const32 [c]) (Const32 [d]))
  5193  	// cond: d != 0
  5194  	// result: (Const32 [int64(int32(c % d))])
  5195  	for {
  5196  		v_0 := v.Args[0]
  5197  		if v_0.Op != OpConst32 {
  5198  			break
  5199  		}
  5200  		c := v_0.AuxInt
  5201  		v_1 := v.Args[1]
  5202  		if v_1.Op != OpConst32 {
  5203  			break
  5204  		}
  5205  		d := v_1.AuxInt
  5206  		if !(d != 0) {
  5207  			break
  5208  		}
  5209  		v.reset(OpConst32)
  5210  		v.AuxInt = int64(int32(c % d))
  5211  		return true
  5212  	}
  5213  	return false
  5214  }
  5215  func rewriteValuegeneric_OpMod32u(v *Value, config *Config) bool {
  5216  	b := v.Block
  5217  	_ = b
  5218  	// match: (Mod32u (Const32 [c]) (Const32 [d]))
  5219  	// cond: d != 0
  5220  	// result: (Const32 [int64(uint32(c) % uint32(d))])
  5221  	for {
  5222  		v_0 := v.Args[0]
  5223  		if v_0.Op != OpConst32 {
  5224  			break
  5225  		}
  5226  		c := v_0.AuxInt
  5227  		v_1 := v.Args[1]
  5228  		if v_1.Op != OpConst32 {
  5229  			break
  5230  		}
  5231  		d := v_1.AuxInt
  5232  		if !(d != 0) {
  5233  			break
  5234  		}
  5235  		v.reset(OpConst32)
  5236  		v.AuxInt = int64(uint32(c) % uint32(d))
  5237  		return true
  5238  	}
  5239  	return false
  5240  }
  5241  func rewriteValuegeneric_OpMod64(v *Value, config *Config) bool {
  5242  	b := v.Block
  5243  	_ = b
  5244  	// match: (Mod64 (Const64 [c]) (Const64 [d]))
  5245  	// cond: d != 0
  5246  	// result: (Const64 [c % d])
  5247  	for {
  5248  		v_0 := v.Args[0]
  5249  		if v_0.Op != OpConst64 {
  5250  			break
  5251  		}
  5252  		c := v_0.AuxInt
  5253  		v_1 := v.Args[1]
  5254  		if v_1.Op != OpConst64 {
  5255  			break
  5256  		}
  5257  		d := v_1.AuxInt
  5258  		if !(d != 0) {
  5259  			break
  5260  		}
  5261  		v.reset(OpConst64)
  5262  		v.AuxInt = c % d
  5263  		return true
  5264  	}
  5265  	// match: (Mod64  <t> x (Const64 [c]))
  5266  	// cond: x.Op != OpConst64 && smagic64ok(c)
  5267  	// result: (Sub64 x (Mul64 <t> (Div64  <t> x (Const64 <t> [c])) (Const64 <t> [c])))
  5268  	for {
  5269  		t := v.Type
  5270  		x := v.Args[0]
  5271  		v_1 := v.Args[1]
  5272  		if v_1.Op != OpConst64 {
  5273  			break
  5274  		}
  5275  		c := v_1.AuxInt
  5276  		if !(x.Op != OpConst64 && smagic64ok(c)) {
  5277  			break
  5278  		}
  5279  		v.reset(OpSub64)
  5280  		v.AddArg(x)
  5281  		v0 := b.NewValue0(v.Pos, OpMul64, t)
  5282  		v1 := b.NewValue0(v.Pos, OpDiv64, t)
  5283  		v1.AddArg(x)
  5284  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  5285  		v2.AuxInt = c
  5286  		v1.AddArg(v2)
  5287  		v0.AddArg(v1)
  5288  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  5289  		v3.AuxInt = c
  5290  		v0.AddArg(v3)
  5291  		v.AddArg(v0)
  5292  		return true
  5293  	}
  5294  	return false
  5295  }
  5296  func rewriteValuegeneric_OpMod64u(v *Value, config *Config) bool {
  5297  	b := v.Block
  5298  	_ = b
  5299  	// match: (Mod64u (Const64 [c]) (Const64 [d]))
  5300  	// cond: d != 0
  5301  	// result: (Const64 [int64(uint64(c) % uint64(d))])
  5302  	for {
  5303  		v_0 := v.Args[0]
  5304  		if v_0.Op != OpConst64 {
  5305  			break
  5306  		}
  5307  		c := v_0.AuxInt
  5308  		v_1 := v.Args[1]
  5309  		if v_1.Op != OpConst64 {
  5310  			break
  5311  		}
  5312  		d := v_1.AuxInt
  5313  		if !(d != 0) {
  5314  			break
  5315  		}
  5316  		v.reset(OpConst64)
  5317  		v.AuxInt = int64(uint64(c) % uint64(d))
  5318  		return true
  5319  	}
  5320  	// match: (Mod64u <t> n (Const64 [c]))
  5321  	// cond: isPowerOfTwo(c)
  5322  	// result: (And64 n (Const64 <t> [c-1]))
  5323  	for {
  5324  		t := v.Type
  5325  		n := v.Args[0]
  5326  		v_1 := v.Args[1]
  5327  		if v_1.Op != OpConst64 {
  5328  			break
  5329  		}
  5330  		c := v_1.AuxInt
  5331  		if !(isPowerOfTwo(c)) {
  5332  			break
  5333  		}
  5334  		v.reset(OpAnd64)
  5335  		v.AddArg(n)
  5336  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5337  		v0.AuxInt = c - 1
  5338  		v.AddArg(v0)
  5339  		return true
  5340  	}
  5341  	// match: (Mod64u <t> x (Const64 [c]))
  5342  	// cond: x.Op != OpConst64 && umagic64ok(c)
  5343  	// result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))
  5344  	for {
  5345  		t := v.Type
  5346  		x := v.Args[0]
  5347  		v_1 := v.Args[1]
  5348  		if v_1.Op != OpConst64 {
  5349  			break
  5350  		}
  5351  		c := v_1.AuxInt
  5352  		if !(x.Op != OpConst64 && umagic64ok(c)) {
  5353  			break
  5354  		}
  5355  		v.reset(OpSub64)
  5356  		v.AddArg(x)
  5357  		v0 := b.NewValue0(v.Pos, OpMul64, t)
  5358  		v1 := b.NewValue0(v.Pos, OpDiv64u, t)
  5359  		v1.AddArg(x)
  5360  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  5361  		v2.AuxInt = c
  5362  		v1.AddArg(v2)
  5363  		v0.AddArg(v1)
  5364  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  5365  		v3.AuxInt = c
  5366  		v0.AddArg(v3)
  5367  		v.AddArg(v0)
  5368  		return true
  5369  	}
  5370  	return false
  5371  }
  5372  func rewriteValuegeneric_OpMod8(v *Value, config *Config) bool {
  5373  	b := v.Block
  5374  	_ = b
  5375  	// match: (Mod8  (Const8  [c]) (Const8  [d]))
  5376  	// cond: d != 0
  5377  	// result: (Const8  [int64(int8(c % d))])
  5378  	for {
  5379  		v_0 := v.Args[0]
  5380  		if v_0.Op != OpConst8 {
  5381  			break
  5382  		}
  5383  		c := v_0.AuxInt
  5384  		v_1 := v.Args[1]
  5385  		if v_1.Op != OpConst8 {
  5386  			break
  5387  		}
  5388  		d := v_1.AuxInt
  5389  		if !(d != 0) {
  5390  			break
  5391  		}
  5392  		v.reset(OpConst8)
  5393  		v.AuxInt = int64(int8(c % d))
  5394  		return true
  5395  	}
  5396  	return false
  5397  }
  5398  func rewriteValuegeneric_OpMod8u(v *Value, config *Config) bool {
  5399  	b := v.Block
  5400  	_ = b
  5401  	// match: (Mod8u  (Const8 [c])  (Const8  [d]))
  5402  	// cond: d != 0
  5403  	// result: (Const8  [int64(uint8(c) % uint8(d))])
  5404  	for {
  5405  		v_0 := v.Args[0]
  5406  		if v_0.Op != OpConst8 {
  5407  			break
  5408  		}
  5409  		c := v_0.AuxInt
  5410  		v_1 := v.Args[1]
  5411  		if v_1.Op != OpConst8 {
  5412  			break
  5413  		}
  5414  		d := v_1.AuxInt
  5415  		if !(d != 0) {
  5416  			break
  5417  		}
  5418  		v.reset(OpConst8)
  5419  		v.AuxInt = int64(uint8(c) % uint8(d))
  5420  		return true
  5421  	}
  5422  	return false
  5423  }
  5424  func rewriteValuegeneric_OpMul16(v *Value, config *Config) bool {
  5425  	b := v.Block
  5426  	_ = b
  5427  	// match: (Mul16  (Const16 [c])  (Const16 [d]))
  5428  	// cond:
  5429  	// result: (Const16 [int64(int16(c*d))])
  5430  	for {
  5431  		v_0 := v.Args[0]
  5432  		if v_0.Op != OpConst16 {
  5433  			break
  5434  		}
  5435  		c := v_0.AuxInt
  5436  		v_1 := v.Args[1]
  5437  		if v_1.Op != OpConst16 {
  5438  			break
  5439  		}
  5440  		d := v_1.AuxInt
  5441  		v.reset(OpConst16)
  5442  		v.AuxInt = int64(int16(c * d))
  5443  		return true
  5444  	}
  5445  	// match: (Mul16 (Const16 [-1]) x)
  5446  	// cond:
  5447  	// result: (Neg16 x)
  5448  	for {
  5449  		v_0 := v.Args[0]
  5450  		if v_0.Op != OpConst16 {
  5451  			break
  5452  		}
  5453  		if v_0.AuxInt != -1 {
  5454  			break
  5455  		}
  5456  		x := v.Args[1]
  5457  		v.reset(OpNeg16)
  5458  		v.AddArg(x)
  5459  		return true
  5460  	}
  5461  	// match: (Mul16 <t> n (Const16 [c]))
  5462  	// cond: isPowerOfTwo(c)
  5463  	// result: (Lsh16x64 <t> n (Const64 <config.fe.TypeUInt64()> [log2(c)]))
  5464  	for {
  5465  		t := v.Type
  5466  		n := v.Args[0]
  5467  		v_1 := v.Args[1]
  5468  		if v_1.Op != OpConst16 {
  5469  			break
  5470  		}
  5471  		c := v_1.AuxInt
  5472  		if !(isPowerOfTwo(c)) {
  5473  			break
  5474  		}
  5475  		v.reset(OpLsh16x64)
  5476  		v.Type = t
  5477  		v.AddArg(n)
  5478  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  5479  		v0.AuxInt = log2(c)
  5480  		v.AddArg(v0)
  5481  		return true
  5482  	}
  5483  	// match: (Mul16 <t> n (Const16 [c]))
  5484  	// cond: t.IsSigned() && isPowerOfTwo(-c)
  5485  	// result: (Neg16 (Lsh16x64 <t> n (Const64 <config.fe.TypeUInt64()> [log2(-c)])))
  5486  	for {
  5487  		t := v.Type
  5488  		n := v.Args[0]
  5489  		v_1 := v.Args[1]
  5490  		if v_1.Op != OpConst16 {
  5491  			break
  5492  		}
  5493  		c := v_1.AuxInt
  5494  		if !(t.IsSigned() && isPowerOfTwo(-c)) {
  5495  			break
  5496  		}
  5497  		v.reset(OpNeg16)
  5498  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  5499  		v0.AddArg(n)
  5500  		v1 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  5501  		v1.AuxInt = log2(-c)
  5502  		v0.AddArg(v1)
  5503  		v.AddArg(v0)
  5504  		return true
  5505  	}
  5506  	// match: (Mul16 x (Const16 <t> [c]))
  5507  	// cond: x.Op != OpConst16
  5508  	// result: (Mul16 (Const16 <t> [c]) x)
  5509  	for {
  5510  		x := v.Args[0]
  5511  		v_1 := v.Args[1]
  5512  		if v_1.Op != OpConst16 {
  5513  			break
  5514  		}
  5515  		t := v_1.Type
  5516  		c := v_1.AuxInt
  5517  		if !(x.Op != OpConst16) {
  5518  			break
  5519  		}
  5520  		v.reset(OpMul16)
  5521  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  5522  		v0.AuxInt = c
  5523  		v.AddArg(v0)
  5524  		v.AddArg(x)
  5525  		return true
  5526  	}
  5527  	// match: (Mul16 (Const16 [0]) _)
  5528  	// cond:
  5529  	// result: (Const16 [0])
  5530  	for {
  5531  		v_0 := v.Args[0]
  5532  		if v_0.Op != OpConst16 {
  5533  			break
  5534  		}
  5535  		if v_0.AuxInt != 0 {
  5536  			break
  5537  		}
  5538  		v.reset(OpConst16)
  5539  		v.AuxInt = 0
  5540  		return true
  5541  	}
  5542  	return false
  5543  }
  5544  func rewriteValuegeneric_OpMul32(v *Value, config *Config) bool {
  5545  	b := v.Block
  5546  	_ = b
  5547  	// match: (Mul32  (Const32 [c])  (Const32 [d]))
  5548  	// cond:
  5549  	// result: (Const32 [int64(int32(c*d))])
  5550  	for {
  5551  		v_0 := v.Args[0]
  5552  		if v_0.Op != OpConst32 {
  5553  			break
  5554  		}
  5555  		c := v_0.AuxInt
  5556  		v_1 := v.Args[1]
  5557  		if v_1.Op != OpConst32 {
  5558  			break
  5559  		}
  5560  		d := v_1.AuxInt
  5561  		v.reset(OpConst32)
  5562  		v.AuxInt = int64(int32(c * d))
  5563  		return true
  5564  	}
  5565  	// match: (Mul32 (Const32 [-1]) x)
  5566  	// cond:
  5567  	// result: (Neg32 x)
  5568  	for {
  5569  		v_0 := v.Args[0]
  5570  		if v_0.Op != OpConst32 {
  5571  			break
  5572  		}
  5573  		if v_0.AuxInt != -1 {
  5574  			break
  5575  		}
  5576  		x := v.Args[1]
  5577  		v.reset(OpNeg32)
  5578  		v.AddArg(x)
  5579  		return true
  5580  	}
  5581  	// match: (Mul32 <t> n (Const32 [c]))
  5582  	// cond: isPowerOfTwo(c)
  5583  	// result: (Lsh32x64 <t> n (Const64 <config.fe.TypeUInt64()> [log2(c)]))
  5584  	for {
  5585  		t := v.Type
  5586  		n := v.Args[0]
  5587  		v_1 := v.Args[1]
  5588  		if v_1.Op != OpConst32 {
  5589  			break
  5590  		}
  5591  		c := v_1.AuxInt
  5592  		if !(isPowerOfTwo(c)) {
  5593  			break
  5594  		}
  5595  		v.reset(OpLsh32x64)
  5596  		v.Type = t
  5597  		v.AddArg(n)
  5598  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  5599  		v0.AuxInt = log2(c)
  5600  		v.AddArg(v0)
  5601  		return true
  5602  	}
  5603  	// match: (Mul32 <t> n (Const32 [c]))
  5604  	// cond: t.IsSigned() && isPowerOfTwo(-c)
  5605  	// result: (Neg32 (Lsh32x64 <t> n (Const64 <config.fe.TypeUInt64()> [log2(-c)])))
  5606  	for {
  5607  		t := v.Type
  5608  		n := v.Args[0]
  5609  		v_1 := v.Args[1]
  5610  		if v_1.Op != OpConst32 {
  5611  			break
  5612  		}
  5613  		c := v_1.AuxInt
  5614  		if !(t.IsSigned() && isPowerOfTwo(-c)) {
  5615  			break
  5616  		}
  5617  		v.reset(OpNeg32)
  5618  		v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
  5619  		v0.AddArg(n)
  5620  		v1 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  5621  		v1.AuxInt = log2(-c)
  5622  		v0.AddArg(v1)
  5623  		v.AddArg(v0)
  5624  		return true
  5625  	}
  5626  	// match: (Mul32 x (Const32 <t> [c]))
  5627  	// cond: x.Op != OpConst32
  5628  	// result: (Mul32 (Const32 <t> [c]) x)
  5629  	for {
  5630  		x := v.Args[0]
  5631  		v_1 := v.Args[1]
  5632  		if v_1.Op != OpConst32 {
  5633  			break
  5634  		}
  5635  		t := v_1.Type
  5636  		c := v_1.AuxInt
  5637  		if !(x.Op != OpConst32) {
  5638  			break
  5639  		}
  5640  		v.reset(OpMul32)
  5641  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  5642  		v0.AuxInt = c
  5643  		v.AddArg(v0)
  5644  		v.AddArg(x)
  5645  		return true
  5646  	}
  5647  	// match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x))
  5648  	// cond:
  5649  	// result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x))
  5650  	for {
  5651  		v_0 := v.Args[0]
  5652  		if v_0.Op != OpConst32 {
  5653  			break
  5654  		}
  5655  		t := v_0.Type
  5656  		c := v_0.AuxInt
  5657  		v_1 := v.Args[1]
  5658  		if v_1.Op != OpAdd32 {
  5659  			break
  5660  		}
  5661  		if v_1.Type != t {
  5662  			break
  5663  		}
  5664  		v_1_0 := v_1.Args[0]
  5665  		if v_1_0.Op != OpConst32 {
  5666  			break
  5667  		}
  5668  		if v_1_0.Type != t {
  5669  			break
  5670  		}
  5671  		d := v_1_0.AuxInt
  5672  		x := v_1.Args[1]
  5673  		v.reset(OpAdd32)
  5674  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  5675  		v0.AuxInt = int64(int32(c * d))
  5676  		v.AddArg(v0)
  5677  		v1 := b.NewValue0(v.Pos, OpMul32, t)
  5678  		v2 := b.NewValue0(v.Pos, OpConst32, t)
  5679  		v2.AuxInt = c
  5680  		v1.AddArg(v2)
  5681  		v1.AddArg(x)
  5682  		v.AddArg(v1)
  5683  		return true
  5684  	}
  5685  	// match: (Mul32 (Const32 [0]) _)
  5686  	// cond:
  5687  	// result: (Const32 [0])
  5688  	for {
  5689  		v_0 := v.Args[0]
  5690  		if v_0.Op != OpConst32 {
  5691  			break
  5692  		}
  5693  		if v_0.AuxInt != 0 {
  5694  			break
  5695  		}
  5696  		v.reset(OpConst32)
  5697  		v.AuxInt = 0
  5698  		return true
  5699  	}
  5700  	return false
  5701  }
  5702  func rewriteValuegeneric_OpMul32F(v *Value, config *Config) bool {
  5703  	b := v.Block
  5704  	_ = b
  5705  	// match: (Mul32F (Const32F [c]) (Const32F [d]))
  5706  	// cond:
  5707  	// result: (Const32F [f2i(float64(i2f32(c) * i2f32(d)))])
  5708  	for {
  5709  		v_0 := v.Args[0]
  5710  		if v_0.Op != OpConst32F {
  5711  			break
  5712  		}
  5713  		c := v_0.AuxInt
  5714  		v_1 := v.Args[1]
  5715  		if v_1.Op != OpConst32F {
  5716  			break
  5717  		}
  5718  		d := v_1.AuxInt
  5719  		v.reset(OpConst32F)
  5720  		v.AuxInt = f2i(float64(i2f32(c) * i2f32(d)))
  5721  		return true
  5722  	}
  5723  	// match: (Mul32F x (Const32F [f2i(1)]))
  5724  	// cond:
  5725  	// result: x
  5726  	for {
  5727  		x := v.Args[0]
  5728  		v_1 := v.Args[1]
  5729  		if v_1.Op != OpConst32F {
  5730  			break
  5731  		}
  5732  		if v_1.AuxInt != f2i(1) {
  5733  			break
  5734  		}
  5735  		v.reset(OpCopy)
  5736  		v.Type = x.Type
  5737  		v.AddArg(x)
  5738  		return true
  5739  	}
  5740  	// match: (Mul32F (Const32F [f2i(1)]) x)
  5741  	// cond:
  5742  	// result: x
  5743  	for {
  5744  		v_0 := v.Args[0]
  5745  		if v_0.Op != OpConst32F {
  5746  			break
  5747  		}
  5748  		if v_0.AuxInt != f2i(1) {
  5749  			break
  5750  		}
  5751  		x := v.Args[1]
  5752  		v.reset(OpCopy)
  5753  		v.Type = x.Type
  5754  		v.AddArg(x)
  5755  		return true
  5756  	}
  5757  	// match: (Mul32F x (Const32F [f2i(-1)]))
  5758  	// cond:
  5759  	// result: (Neg32F x)
  5760  	for {
  5761  		x := v.Args[0]
  5762  		v_1 := v.Args[1]
  5763  		if v_1.Op != OpConst32F {
  5764  			break
  5765  		}
  5766  		if v_1.AuxInt != f2i(-1) {
  5767  			break
  5768  		}
  5769  		v.reset(OpNeg32F)
  5770  		v.AddArg(x)
  5771  		return true
  5772  	}
  5773  	// match: (Mul32F (Const32F [f2i(-1)]) x)
  5774  	// cond:
  5775  	// result: (Neg32F x)
  5776  	for {
  5777  		v_0 := v.Args[0]
  5778  		if v_0.Op != OpConst32F {
  5779  			break
  5780  		}
  5781  		if v_0.AuxInt != f2i(-1) {
  5782  			break
  5783  		}
  5784  		x := v.Args[1]
  5785  		v.reset(OpNeg32F)
  5786  		v.AddArg(x)
  5787  		return true
  5788  	}
  5789  	return false
  5790  }
  5791  func rewriteValuegeneric_OpMul64(v *Value, config *Config) bool {
  5792  	b := v.Block
  5793  	_ = b
  5794  	// match: (Mul64  (Const64 [c])  (Const64 [d]))
  5795  	// cond:
  5796  	// result: (Const64 [c*d])
  5797  	for {
  5798  		v_0 := v.Args[0]
  5799  		if v_0.Op != OpConst64 {
  5800  			break
  5801  		}
  5802  		c := v_0.AuxInt
  5803  		v_1 := v.Args[1]
  5804  		if v_1.Op != OpConst64 {
  5805  			break
  5806  		}
  5807  		d := v_1.AuxInt
  5808  		v.reset(OpConst64)
  5809  		v.AuxInt = c * d
  5810  		return true
  5811  	}
  5812  	// match: (Mul64 (Const64 [-1]) x)
  5813  	// cond:
  5814  	// result: (Neg64 x)
  5815  	for {
  5816  		v_0 := v.Args[0]
  5817  		if v_0.Op != OpConst64 {
  5818  			break
  5819  		}
  5820  		if v_0.AuxInt != -1 {
  5821  			break
  5822  		}
  5823  		x := v.Args[1]
  5824  		v.reset(OpNeg64)
  5825  		v.AddArg(x)
  5826  		return true
  5827  	}
  5828  	// match: (Mul64 <t> n (Const64 [c]))
  5829  	// cond: isPowerOfTwo(c)
  5830  	// result: (Lsh64x64 <t> n (Const64 <config.fe.TypeUInt64()> [log2(c)]))
  5831  	for {
  5832  		t := v.Type
  5833  		n := v.Args[0]
  5834  		v_1 := v.Args[1]
  5835  		if v_1.Op != OpConst64 {
  5836  			break
  5837  		}
  5838  		c := v_1.AuxInt
  5839  		if !(isPowerOfTwo(c)) {
  5840  			break
  5841  		}
  5842  		v.reset(OpLsh64x64)
  5843  		v.Type = t
  5844  		v.AddArg(n)
  5845  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  5846  		v0.AuxInt = log2(c)
  5847  		v.AddArg(v0)
  5848  		return true
  5849  	}
  5850  	// match: (Mul64 <t> n (Const64 [c]))
  5851  	// cond: t.IsSigned() && isPowerOfTwo(-c)
  5852  	// result: (Neg64 (Lsh64x64 <t> n (Const64 <config.fe.TypeUInt64()> [log2(-c)])))
  5853  	for {
  5854  		t := v.Type
  5855  		n := v.Args[0]
  5856  		v_1 := v.Args[1]
  5857  		if v_1.Op != OpConst64 {
  5858  			break
  5859  		}
  5860  		c := v_1.AuxInt
  5861  		if !(t.IsSigned() && isPowerOfTwo(-c)) {
  5862  			break
  5863  		}
  5864  		v.reset(OpNeg64)
  5865  		v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  5866  		v0.AddArg(n)
  5867  		v1 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  5868  		v1.AuxInt = log2(-c)
  5869  		v0.AddArg(v1)
  5870  		v.AddArg(v0)
  5871  		return true
  5872  	}
  5873  	// match: (Mul64 x (Const64 <t> [c]))
  5874  	// cond: x.Op != OpConst64
  5875  	// result: (Mul64 (Const64 <t> [c]) x)
  5876  	for {
  5877  		x := v.Args[0]
  5878  		v_1 := v.Args[1]
  5879  		if v_1.Op != OpConst64 {
  5880  			break
  5881  		}
  5882  		t := v_1.Type
  5883  		c := v_1.AuxInt
  5884  		if !(x.Op != OpConst64) {
  5885  			break
  5886  		}
  5887  		v.reset(OpMul64)
  5888  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5889  		v0.AuxInt = c
  5890  		v.AddArg(v0)
  5891  		v.AddArg(x)
  5892  		return true
  5893  	}
  5894  	// match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x))
  5895  	// cond:
  5896  	// result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x))
  5897  	for {
  5898  		v_0 := v.Args[0]
  5899  		if v_0.Op != OpConst64 {
  5900  			break
  5901  		}
  5902  		t := v_0.Type
  5903  		c := v_0.AuxInt
  5904  		v_1 := v.Args[1]
  5905  		if v_1.Op != OpAdd64 {
  5906  			break
  5907  		}
  5908  		if v_1.Type != t {
  5909  			break
  5910  		}
  5911  		v_1_0 := v_1.Args[0]
  5912  		if v_1_0.Op != OpConst64 {
  5913  			break
  5914  		}
  5915  		if v_1_0.Type != t {
  5916  			break
  5917  		}
  5918  		d := v_1_0.AuxInt
  5919  		x := v_1.Args[1]
  5920  		v.reset(OpAdd64)
  5921  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5922  		v0.AuxInt = c * d
  5923  		v.AddArg(v0)
  5924  		v1 := b.NewValue0(v.Pos, OpMul64, t)
  5925  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  5926  		v2.AuxInt = c
  5927  		v1.AddArg(v2)
  5928  		v1.AddArg(x)
  5929  		v.AddArg(v1)
  5930  		return true
  5931  	}
  5932  	// match: (Mul64 (Const64 [0]) _)
  5933  	// cond:
  5934  	// result: (Const64 [0])
  5935  	for {
  5936  		v_0 := v.Args[0]
  5937  		if v_0.Op != OpConst64 {
  5938  			break
  5939  		}
  5940  		if v_0.AuxInt != 0 {
  5941  			break
  5942  		}
  5943  		v.reset(OpConst64)
  5944  		v.AuxInt = 0
  5945  		return true
  5946  	}
  5947  	return false
  5948  }
  5949  func rewriteValuegeneric_OpMul64F(v *Value, config *Config) bool {
  5950  	b := v.Block
  5951  	_ = b
  5952  	// match: (Mul64F (Const64F [c]) (Const64F [d]))
  5953  	// cond:
  5954  	// result: (Const64F [f2i(i2f(c) * i2f(d))])
  5955  	for {
  5956  		v_0 := v.Args[0]
  5957  		if v_0.Op != OpConst64F {
  5958  			break
  5959  		}
  5960  		c := v_0.AuxInt
  5961  		v_1 := v.Args[1]
  5962  		if v_1.Op != OpConst64F {
  5963  			break
  5964  		}
  5965  		d := v_1.AuxInt
  5966  		v.reset(OpConst64F)
  5967  		v.AuxInt = f2i(i2f(c) * i2f(d))
  5968  		return true
  5969  	}
  5970  	// match: (Mul64F x (Const64F [f2i(1)]))
  5971  	// cond:
  5972  	// result: x
  5973  	for {
  5974  		x := v.Args[0]
  5975  		v_1 := v.Args[1]
  5976  		if v_1.Op != OpConst64F {
  5977  			break
  5978  		}
  5979  		if v_1.AuxInt != f2i(1) {
  5980  			break
  5981  		}
  5982  		v.reset(OpCopy)
  5983  		v.Type = x.Type
  5984  		v.AddArg(x)
  5985  		return true
  5986  	}
  5987  	// match: (Mul64F (Const64F [f2i(1)]) x)
  5988  	// cond:
  5989  	// result: x
  5990  	for {
  5991  		v_0 := v.Args[0]
  5992  		if v_0.Op != OpConst64F {
  5993  			break
  5994  		}
  5995  		if v_0.AuxInt != f2i(1) {
  5996  			break
  5997  		}
  5998  		x := v.Args[1]
  5999  		v.reset(OpCopy)
  6000  		v.Type = x.Type
  6001  		v.AddArg(x)
  6002  		return true
  6003  	}
  6004  	// match: (Mul64F x (Const64F [f2i(-1)]))
  6005  	// cond:
  6006  	// result: (Neg64F x)
  6007  	for {
  6008  		x := v.Args[0]
  6009  		v_1 := v.Args[1]
  6010  		if v_1.Op != OpConst64F {
  6011  			break
  6012  		}
  6013  		if v_1.AuxInt != f2i(-1) {
  6014  			break
  6015  		}
  6016  		v.reset(OpNeg64F)
  6017  		v.AddArg(x)
  6018  		return true
  6019  	}
  6020  	// match: (Mul64F (Const64F [f2i(-1)]) x)
  6021  	// cond:
  6022  	// result: (Neg64F x)
  6023  	for {
  6024  		v_0 := v.Args[0]
  6025  		if v_0.Op != OpConst64F {
  6026  			break
  6027  		}
  6028  		if v_0.AuxInt != f2i(-1) {
  6029  			break
  6030  		}
  6031  		x := v.Args[1]
  6032  		v.reset(OpNeg64F)
  6033  		v.AddArg(x)
  6034  		return true
  6035  	}
  6036  	return false
  6037  }
  6038  func rewriteValuegeneric_OpMul8(v *Value, config *Config) bool {
  6039  	b := v.Block
  6040  	_ = b
  6041  	// match: (Mul8   (Const8 [c])   (Const8 [d]))
  6042  	// cond:
  6043  	// result: (Const8  [int64(int8(c*d))])
  6044  	for {
  6045  		v_0 := v.Args[0]
  6046  		if v_0.Op != OpConst8 {
  6047  			break
  6048  		}
  6049  		c := v_0.AuxInt
  6050  		v_1 := v.Args[1]
  6051  		if v_1.Op != OpConst8 {
  6052  			break
  6053  		}
  6054  		d := v_1.AuxInt
  6055  		v.reset(OpConst8)
  6056  		v.AuxInt = int64(int8(c * d))
  6057  		return true
  6058  	}
  6059  	// match: (Mul8  (Const8  [-1]) x)
  6060  	// cond:
  6061  	// result: (Neg8  x)
  6062  	for {
  6063  		v_0 := v.Args[0]
  6064  		if v_0.Op != OpConst8 {
  6065  			break
  6066  		}
  6067  		if v_0.AuxInt != -1 {
  6068  			break
  6069  		}
  6070  		x := v.Args[1]
  6071  		v.reset(OpNeg8)
  6072  		v.AddArg(x)
  6073  		return true
  6074  	}
  6075  	// match: (Mul8  <t> n (Const8  [c]))
  6076  	// cond: isPowerOfTwo(c)
  6077  	// result: (Lsh8x64  <t> n (Const64 <config.fe.TypeUInt64()> [log2(c)]))
  6078  	for {
  6079  		t := v.Type
  6080  		n := v.Args[0]
  6081  		v_1 := v.Args[1]
  6082  		if v_1.Op != OpConst8 {
  6083  			break
  6084  		}
  6085  		c := v_1.AuxInt
  6086  		if !(isPowerOfTwo(c)) {
  6087  			break
  6088  		}
  6089  		v.reset(OpLsh8x64)
  6090  		v.Type = t
  6091  		v.AddArg(n)
  6092  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  6093  		v0.AuxInt = log2(c)
  6094  		v.AddArg(v0)
  6095  		return true
  6096  	}
  6097  	// match: (Mul8  <t> n (Const8  [c]))
  6098  	// cond: t.IsSigned() && isPowerOfTwo(-c)
  6099  	// result: (Neg8  (Lsh8x64  <t> n (Const64 <config.fe.TypeUInt64()> [log2(-c)])))
  6100  	for {
  6101  		t := v.Type
  6102  		n := v.Args[0]
  6103  		v_1 := v.Args[1]
  6104  		if v_1.Op != OpConst8 {
  6105  			break
  6106  		}
  6107  		c := v_1.AuxInt
  6108  		if !(t.IsSigned() && isPowerOfTwo(-c)) {
  6109  			break
  6110  		}
  6111  		v.reset(OpNeg8)
  6112  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  6113  		v0.AddArg(n)
  6114  		v1 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  6115  		v1.AuxInt = log2(-c)
  6116  		v0.AddArg(v1)
  6117  		v.AddArg(v0)
  6118  		return true
  6119  	}
  6120  	// match: (Mul8  x (Const8  <t> [c]))
  6121  	// cond: x.Op != OpConst8
  6122  	// result: (Mul8  (Const8  <t> [c]) x)
  6123  	for {
  6124  		x := v.Args[0]
  6125  		v_1 := v.Args[1]
  6126  		if v_1.Op != OpConst8 {
  6127  			break
  6128  		}
  6129  		t := v_1.Type
  6130  		c := v_1.AuxInt
  6131  		if !(x.Op != OpConst8) {
  6132  			break
  6133  		}
  6134  		v.reset(OpMul8)
  6135  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  6136  		v0.AuxInt = c
  6137  		v.AddArg(v0)
  6138  		v.AddArg(x)
  6139  		return true
  6140  	}
  6141  	// match: (Mul8  (Const8  [0]) _)
  6142  	// cond:
  6143  	// result: (Const8  [0])
  6144  	for {
  6145  		v_0 := v.Args[0]
  6146  		if v_0.Op != OpConst8 {
  6147  			break
  6148  		}
  6149  		if v_0.AuxInt != 0 {
  6150  			break
  6151  		}
  6152  		v.reset(OpConst8)
  6153  		v.AuxInt = 0
  6154  		return true
  6155  	}
  6156  	return false
  6157  }
  6158  func rewriteValuegeneric_OpNeg16(v *Value, config *Config) bool {
  6159  	b := v.Block
  6160  	_ = b
  6161  	// match: (Neg16 (Sub16 x y))
  6162  	// cond:
  6163  	// result: (Sub16 y x)
  6164  	for {
  6165  		v_0 := v.Args[0]
  6166  		if v_0.Op != OpSub16 {
  6167  			break
  6168  		}
  6169  		x := v_0.Args[0]
  6170  		y := v_0.Args[1]
  6171  		v.reset(OpSub16)
  6172  		v.AddArg(y)
  6173  		v.AddArg(x)
  6174  		return true
  6175  	}
  6176  	return false
  6177  }
  6178  func rewriteValuegeneric_OpNeg32(v *Value, config *Config) bool {
  6179  	b := v.Block
  6180  	_ = b
  6181  	// match: (Neg32 (Sub32 x y))
  6182  	// cond:
  6183  	// result: (Sub32 y x)
  6184  	for {
  6185  		v_0 := v.Args[0]
  6186  		if v_0.Op != OpSub32 {
  6187  			break
  6188  		}
  6189  		x := v_0.Args[0]
  6190  		y := v_0.Args[1]
  6191  		v.reset(OpSub32)
  6192  		v.AddArg(y)
  6193  		v.AddArg(x)
  6194  		return true
  6195  	}
  6196  	return false
  6197  }
  6198  func rewriteValuegeneric_OpNeg64(v *Value, config *Config) bool {
  6199  	b := v.Block
  6200  	_ = b
  6201  	// match: (Neg64 (Sub64 x y))
  6202  	// cond:
  6203  	// result: (Sub64 y x)
  6204  	for {
  6205  		v_0 := v.Args[0]
  6206  		if v_0.Op != OpSub64 {
  6207  			break
  6208  		}
  6209  		x := v_0.Args[0]
  6210  		y := v_0.Args[1]
  6211  		v.reset(OpSub64)
  6212  		v.AddArg(y)
  6213  		v.AddArg(x)
  6214  		return true
  6215  	}
  6216  	return false
  6217  }
  6218  func rewriteValuegeneric_OpNeg8(v *Value, config *Config) bool {
  6219  	b := v.Block
  6220  	_ = b
  6221  	// match: (Neg8  (Sub8  x y))
  6222  	// cond:
  6223  	// result: (Sub8  y x)
  6224  	for {
  6225  		v_0 := v.Args[0]
  6226  		if v_0.Op != OpSub8 {
  6227  			break
  6228  		}
  6229  		x := v_0.Args[0]
  6230  		y := v_0.Args[1]
  6231  		v.reset(OpSub8)
  6232  		v.AddArg(y)
  6233  		v.AddArg(x)
  6234  		return true
  6235  	}
  6236  	return false
  6237  }
  6238  func rewriteValuegeneric_OpNeq16(v *Value, config *Config) bool {
  6239  	b := v.Block
  6240  	_ = b
  6241  	// match: (Neq16 x x)
  6242  	// cond:
  6243  	// result: (ConstBool [0])
  6244  	for {
  6245  		x := v.Args[0]
  6246  		if x != v.Args[1] {
  6247  			break
  6248  		}
  6249  		v.reset(OpConstBool)
  6250  		v.AuxInt = 0
  6251  		return true
  6252  	}
  6253  	// match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
  6254  	// cond:
  6255  	// result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x)
  6256  	for {
  6257  		v_0 := v.Args[0]
  6258  		if v_0.Op != OpConst16 {
  6259  			break
  6260  		}
  6261  		t := v_0.Type
  6262  		c := v_0.AuxInt
  6263  		v_1 := v.Args[1]
  6264  		if v_1.Op != OpAdd16 {
  6265  			break
  6266  		}
  6267  		v_1_0 := v_1.Args[0]
  6268  		if v_1_0.Op != OpConst16 {
  6269  			break
  6270  		}
  6271  		if v_1_0.Type != t {
  6272  			break
  6273  		}
  6274  		d := v_1_0.AuxInt
  6275  		x := v_1.Args[1]
  6276  		v.reset(OpNeq16)
  6277  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  6278  		v0.AuxInt = int64(int16(c - d))
  6279  		v.AddArg(v0)
  6280  		v.AddArg(x)
  6281  		return true
  6282  	}
  6283  	// match: (Neq16 x (Const16 <t> [c]))
  6284  	// cond: x.Op != OpConst16
  6285  	// result: (Neq16 (Const16 <t> [c]) x)
  6286  	for {
  6287  		x := v.Args[0]
  6288  		v_1 := v.Args[1]
  6289  		if v_1.Op != OpConst16 {
  6290  			break
  6291  		}
  6292  		t := v_1.Type
  6293  		c := v_1.AuxInt
  6294  		if !(x.Op != OpConst16) {
  6295  			break
  6296  		}
  6297  		v.reset(OpNeq16)
  6298  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  6299  		v0.AuxInt = c
  6300  		v.AddArg(v0)
  6301  		v.AddArg(x)
  6302  		return true
  6303  	}
  6304  	// match: (Neq16 (Const16 [c]) (Const16 [d]))
  6305  	// cond:
  6306  	// result: (ConstBool [b2i(c != d)])
  6307  	for {
  6308  		v_0 := v.Args[0]
  6309  		if v_0.Op != OpConst16 {
  6310  			break
  6311  		}
  6312  		c := v_0.AuxInt
  6313  		v_1 := v.Args[1]
  6314  		if v_1.Op != OpConst16 {
  6315  			break
  6316  		}
  6317  		d := v_1.AuxInt
  6318  		v.reset(OpConstBool)
  6319  		v.AuxInt = b2i(c != d)
  6320  		return true
  6321  	}
  6322  	return false
  6323  }
  6324  func rewriteValuegeneric_OpNeq32(v *Value, config *Config) bool {
  6325  	b := v.Block
  6326  	_ = b
  6327  	// match: (Neq32 x x)
  6328  	// cond:
  6329  	// result: (ConstBool [0])
  6330  	for {
  6331  		x := v.Args[0]
  6332  		if x != v.Args[1] {
  6333  			break
  6334  		}
  6335  		v.reset(OpConstBool)
  6336  		v.AuxInt = 0
  6337  		return true
  6338  	}
  6339  	// match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
  6340  	// cond:
  6341  	// result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x)
  6342  	for {
  6343  		v_0 := v.Args[0]
  6344  		if v_0.Op != OpConst32 {
  6345  			break
  6346  		}
  6347  		t := v_0.Type
  6348  		c := v_0.AuxInt
  6349  		v_1 := v.Args[1]
  6350  		if v_1.Op != OpAdd32 {
  6351  			break
  6352  		}
  6353  		v_1_0 := v_1.Args[0]
  6354  		if v_1_0.Op != OpConst32 {
  6355  			break
  6356  		}
  6357  		if v_1_0.Type != t {
  6358  			break
  6359  		}
  6360  		d := v_1_0.AuxInt
  6361  		x := v_1.Args[1]
  6362  		v.reset(OpNeq32)
  6363  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  6364  		v0.AuxInt = int64(int32(c - d))
  6365  		v.AddArg(v0)
  6366  		v.AddArg(x)
  6367  		return true
  6368  	}
  6369  	// match: (Neq32 x (Const32 <t> [c]))
  6370  	// cond: x.Op != OpConst32
  6371  	// result: (Neq32 (Const32 <t> [c]) x)
  6372  	for {
  6373  		x := v.Args[0]
  6374  		v_1 := v.Args[1]
  6375  		if v_1.Op != OpConst32 {
  6376  			break
  6377  		}
  6378  		t := v_1.Type
  6379  		c := v_1.AuxInt
  6380  		if !(x.Op != OpConst32) {
  6381  			break
  6382  		}
  6383  		v.reset(OpNeq32)
  6384  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  6385  		v0.AuxInt = c
  6386  		v.AddArg(v0)
  6387  		v.AddArg(x)
  6388  		return true
  6389  	}
  6390  	// match: (Neq32 (Const32 [c]) (Const32 [d]))
  6391  	// cond:
  6392  	// result: (ConstBool [b2i(c != d)])
  6393  	for {
  6394  		v_0 := v.Args[0]
  6395  		if v_0.Op != OpConst32 {
  6396  			break
  6397  		}
  6398  		c := v_0.AuxInt
  6399  		v_1 := v.Args[1]
  6400  		if v_1.Op != OpConst32 {
  6401  			break
  6402  		}
  6403  		d := v_1.AuxInt
  6404  		v.reset(OpConstBool)
  6405  		v.AuxInt = b2i(c != d)
  6406  		return true
  6407  	}
  6408  	return false
  6409  }
  6410  func rewriteValuegeneric_OpNeq64(v *Value, config *Config) bool {
  6411  	b := v.Block
  6412  	_ = b
  6413  	// match: (Neq64 x x)
  6414  	// cond:
  6415  	// result: (ConstBool [0])
  6416  	for {
  6417  		x := v.Args[0]
  6418  		if x != v.Args[1] {
  6419  			break
  6420  		}
  6421  		v.reset(OpConstBool)
  6422  		v.AuxInt = 0
  6423  		return true
  6424  	}
  6425  	// match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
  6426  	// cond:
  6427  	// result: (Neq64 (Const64 <t> [c-d]) x)
  6428  	for {
  6429  		v_0 := v.Args[0]
  6430  		if v_0.Op != OpConst64 {
  6431  			break
  6432  		}
  6433  		t := v_0.Type
  6434  		c := v_0.AuxInt
  6435  		v_1 := v.Args[1]
  6436  		if v_1.Op != OpAdd64 {
  6437  			break
  6438  		}
  6439  		v_1_0 := v_1.Args[0]
  6440  		if v_1_0.Op != OpConst64 {
  6441  			break
  6442  		}
  6443  		if v_1_0.Type != t {
  6444  			break
  6445  		}
  6446  		d := v_1_0.AuxInt
  6447  		x := v_1.Args[1]
  6448  		v.reset(OpNeq64)
  6449  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  6450  		v0.AuxInt = c - d
  6451  		v.AddArg(v0)
  6452  		v.AddArg(x)
  6453  		return true
  6454  	}
  6455  	// match: (Neq64 x (Const64 <t> [c]))
  6456  	// cond: x.Op != OpConst64
  6457  	// result: (Neq64 (Const64 <t> [c]) x)
  6458  	for {
  6459  		x := v.Args[0]
  6460  		v_1 := v.Args[1]
  6461  		if v_1.Op != OpConst64 {
  6462  			break
  6463  		}
  6464  		t := v_1.Type
  6465  		c := v_1.AuxInt
  6466  		if !(x.Op != OpConst64) {
  6467  			break
  6468  		}
  6469  		v.reset(OpNeq64)
  6470  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  6471  		v0.AuxInt = c
  6472  		v.AddArg(v0)
  6473  		v.AddArg(x)
  6474  		return true
  6475  	}
  6476  	// match: (Neq64 (Const64 [c]) (Const64 [d]))
  6477  	// cond:
  6478  	// result: (ConstBool [b2i(c != d)])
  6479  	for {
  6480  		v_0 := v.Args[0]
  6481  		if v_0.Op != OpConst64 {
  6482  			break
  6483  		}
  6484  		c := v_0.AuxInt
  6485  		v_1 := v.Args[1]
  6486  		if v_1.Op != OpConst64 {
  6487  			break
  6488  		}
  6489  		d := v_1.AuxInt
  6490  		v.reset(OpConstBool)
  6491  		v.AuxInt = b2i(c != d)
  6492  		return true
  6493  	}
  6494  	return false
  6495  }
  6496  func rewriteValuegeneric_OpNeq8(v *Value, config *Config) bool {
  6497  	b := v.Block
  6498  	_ = b
  6499  	// match: (Neq8  x x)
  6500  	// cond:
  6501  	// result: (ConstBool [0])
  6502  	for {
  6503  		x := v.Args[0]
  6504  		if x != v.Args[1] {
  6505  			break
  6506  		}
  6507  		v.reset(OpConstBool)
  6508  		v.AuxInt = 0
  6509  		return true
  6510  	}
  6511  	// match: (Neq8  (Const8  <t> [c]) (Add8  (Const8  <t> [d]) x))
  6512  	// cond:
  6513  	// result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x)
  6514  	for {
  6515  		v_0 := v.Args[0]
  6516  		if v_0.Op != OpConst8 {
  6517  			break
  6518  		}
  6519  		t := v_0.Type
  6520  		c := v_0.AuxInt
  6521  		v_1 := v.Args[1]
  6522  		if v_1.Op != OpAdd8 {
  6523  			break
  6524  		}
  6525  		v_1_0 := v_1.Args[0]
  6526  		if v_1_0.Op != OpConst8 {
  6527  			break
  6528  		}
  6529  		if v_1_0.Type != t {
  6530  			break
  6531  		}
  6532  		d := v_1_0.AuxInt
  6533  		x := v_1.Args[1]
  6534  		v.reset(OpNeq8)
  6535  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  6536  		v0.AuxInt = int64(int8(c - d))
  6537  		v.AddArg(v0)
  6538  		v.AddArg(x)
  6539  		return true
  6540  	}
  6541  	// match: (Neq8  x (Const8 <t>  [c]))
  6542  	// cond: x.Op != OpConst8
  6543  	// result: (Neq8  (Const8  <t> [c]) x)
  6544  	for {
  6545  		x := v.Args[0]
  6546  		v_1 := v.Args[1]
  6547  		if v_1.Op != OpConst8 {
  6548  			break
  6549  		}
  6550  		t := v_1.Type
  6551  		c := v_1.AuxInt
  6552  		if !(x.Op != OpConst8) {
  6553  			break
  6554  		}
  6555  		v.reset(OpNeq8)
  6556  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  6557  		v0.AuxInt = c
  6558  		v.AddArg(v0)
  6559  		v.AddArg(x)
  6560  		return true
  6561  	}
  6562  	// match: (Neq8  (Const8  [c]) (Const8  [d]))
  6563  	// cond:
  6564  	// result: (ConstBool [b2i(c != d)])
  6565  	for {
  6566  		v_0 := v.Args[0]
  6567  		if v_0.Op != OpConst8 {
  6568  			break
  6569  		}
  6570  		c := v_0.AuxInt
  6571  		v_1 := v.Args[1]
  6572  		if v_1.Op != OpConst8 {
  6573  			break
  6574  		}
  6575  		d := v_1.AuxInt
  6576  		v.reset(OpConstBool)
  6577  		v.AuxInt = b2i(c != d)
  6578  		return true
  6579  	}
  6580  	return false
  6581  }
  6582  func rewriteValuegeneric_OpNeqB(v *Value, config *Config) bool {
  6583  	b := v.Block
  6584  	_ = b
  6585  	// match: (NeqB (ConstBool [c]) (ConstBool [d]))
  6586  	// cond:
  6587  	// result: (ConstBool [b2i(c != d)])
  6588  	for {
  6589  		v_0 := v.Args[0]
  6590  		if v_0.Op != OpConstBool {
  6591  			break
  6592  		}
  6593  		c := v_0.AuxInt
  6594  		v_1 := v.Args[1]
  6595  		if v_1.Op != OpConstBool {
  6596  			break
  6597  		}
  6598  		d := v_1.AuxInt
  6599  		v.reset(OpConstBool)
  6600  		v.AuxInt = b2i(c != d)
  6601  		return true
  6602  	}
  6603  	// match: (NeqB (ConstBool [0]) x)
  6604  	// cond:
  6605  	// result: x
  6606  	for {
  6607  		v_0 := v.Args[0]
  6608  		if v_0.Op != OpConstBool {
  6609  			break
  6610  		}
  6611  		if v_0.AuxInt != 0 {
  6612  			break
  6613  		}
  6614  		x := v.Args[1]
  6615  		v.reset(OpCopy)
  6616  		v.Type = x.Type
  6617  		v.AddArg(x)
  6618  		return true
  6619  	}
  6620  	// match: (NeqB (ConstBool [1]) x)
  6621  	// cond:
  6622  	// result: (Not x)
  6623  	for {
  6624  		v_0 := v.Args[0]
  6625  		if v_0.Op != OpConstBool {
  6626  			break
  6627  		}
  6628  		if v_0.AuxInt != 1 {
  6629  			break
  6630  		}
  6631  		x := v.Args[1]
  6632  		v.reset(OpNot)
  6633  		v.AddArg(x)
  6634  		return true
  6635  	}
  6636  	return false
  6637  }
  6638  func rewriteValuegeneric_OpNeqInter(v *Value, config *Config) bool {
  6639  	b := v.Block
  6640  	_ = b
  6641  	// match: (NeqInter x y)
  6642  	// cond:
  6643  	// result: (NeqPtr (ITab x) (ITab y))
  6644  	for {
  6645  		x := v.Args[0]
  6646  		y := v.Args[1]
  6647  		v.reset(OpNeqPtr)
  6648  		v0 := b.NewValue0(v.Pos, OpITab, config.fe.TypeBytePtr())
  6649  		v0.AddArg(x)
  6650  		v.AddArg(v0)
  6651  		v1 := b.NewValue0(v.Pos, OpITab, config.fe.TypeBytePtr())
  6652  		v1.AddArg(y)
  6653  		v.AddArg(v1)
  6654  		return true
  6655  	}
  6656  }
  6657  func rewriteValuegeneric_OpNeqPtr(v *Value, config *Config) bool {
  6658  	b := v.Block
  6659  	_ = b
  6660  	// match: (NeqPtr p (ConstNil))
  6661  	// cond:
  6662  	// result: (IsNonNil p)
  6663  	for {
  6664  		p := v.Args[0]
  6665  		v_1 := v.Args[1]
  6666  		if v_1.Op != OpConstNil {
  6667  			break
  6668  		}
  6669  		v.reset(OpIsNonNil)
  6670  		v.AddArg(p)
  6671  		return true
  6672  	}
  6673  	// match: (NeqPtr (ConstNil) p)
  6674  	// cond:
  6675  	// result: (IsNonNil p)
  6676  	for {
  6677  		v_0 := v.Args[0]
  6678  		if v_0.Op != OpConstNil {
  6679  			break
  6680  		}
  6681  		p := v.Args[1]
  6682  		v.reset(OpIsNonNil)
  6683  		v.AddArg(p)
  6684  		return true
  6685  	}
  6686  	return false
  6687  }
  6688  func rewriteValuegeneric_OpNeqSlice(v *Value, config *Config) bool {
  6689  	b := v.Block
  6690  	_ = b
  6691  	// match: (NeqSlice x y)
  6692  	// cond:
  6693  	// result: (NeqPtr (SlicePtr x) (SlicePtr y))
  6694  	for {
  6695  		x := v.Args[0]
  6696  		y := v.Args[1]
  6697  		v.reset(OpNeqPtr)
  6698  		v0 := b.NewValue0(v.Pos, OpSlicePtr, config.fe.TypeBytePtr())
  6699  		v0.AddArg(x)
  6700  		v.AddArg(v0)
  6701  		v1 := b.NewValue0(v.Pos, OpSlicePtr, config.fe.TypeBytePtr())
  6702  		v1.AddArg(y)
  6703  		v.AddArg(v1)
  6704  		return true
  6705  	}
  6706  }
  6707  func rewriteValuegeneric_OpNilCheck(v *Value, config *Config) bool {
  6708  	b := v.Block
  6709  	_ = b
  6710  	// match: (NilCheck (GetG mem) mem)
  6711  	// cond:
  6712  	// result: mem
  6713  	for {
  6714  		v_0 := v.Args[0]
  6715  		if v_0.Op != OpGetG {
  6716  			break
  6717  		}
  6718  		mem := v_0.Args[0]
  6719  		if mem != v.Args[1] {
  6720  			break
  6721  		}
  6722  		v.reset(OpCopy)
  6723  		v.Type = mem.Type
  6724  		v.AddArg(mem)
  6725  		return true
  6726  	}
  6727  	// match: (NilCheck (Load (OffPtr [c] (SP)) mem) mem)
  6728  	// cond: mem.Op == OpStaticCall 	&& isSameSym(mem.Aux, "runtime.newobject") 	&& c == config.ctxt.FixedFrameSize() + config.RegSize 	&& warnRule(config.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check")
  6729  	// result: (Invalid)
  6730  	for {
  6731  		v_0 := v.Args[0]
  6732  		if v_0.Op != OpLoad {
  6733  			break
  6734  		}
  6735  		v_0_0 := v_0.Args[0]
  6736  		if v_0_0.Op != OpOffPtr {
  6737  			break
  6738  		}
  6739  		c := v_0_0.AuxInt
  6740  		v_0_0_0 := v_0_0.Args[0]
  6741  		if v_0_0_0.Op != OpSP {
  6742  			break
  6743  		}
  6744  		mem := v_0.Args[1]
  6745  		if mem != v.Args[1] {
  6746  			break
  6747  		}
  6748  		if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(config.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check")) {
  6749  			break
  6750  		}
  6751  		v.reset(OpInvalid)
  6752  		return true
  6753  	}
  6754  	// match: (NilCheck (OffPtr (Load (OffPtr [c] (SP)) mem)) mem)
  6755  	// cond: mem.Op == OpStaticCall 	&& isSameSym(mem.Aux, "runtime.newobject") 	&& c == config.ctxt.FixedFrameSize() + config.RegSize 	&& warnRule(config.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check")
  6756  	// result: (Invalid)
  6757  	for {
  6758  		v_0 := v.Args[0]
  6759  		if v_0.Op != OpOffPtr {
  6760  			break
  6761  		}
  6762  		v_0_0 := v_0.Args[0]
  6763  		if v_0_0.Op != OpLoad {
  6764  			break
  6765  		}
  6766  		v_0_0_0 := v_0_0.Args[0]
  6767  		if v_0_0_0.Op != OpOffPtr {
  6768  			break
  6769  		}
  6770  		c := v_0_0_0.AuxInt
  6771  		v_0_0_0_0 := v_0_0_0.Args[0]
  6772  		if v_0_0_0_0.Op != OpSP {
  6773  			break
  6774  		}
  6775  		mem := v_0_0.Args[1]
  6776  		if mem != v.Args[1] {
  6777  			break
  6778  		}
  6779  		if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(config.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check")) {
  6780  			break
  6781  		}
  6782  		v.reset(OpInvalid)
  6783  		return true
  6784  	}
  6785  	return false
  6786  }
  6787  func rewriteValuegeneric_OpNot(v *Value, config *Config) bool {
  6788  	b := v.Block
  6789  	_ = b
  6790  	// match: (Not (Eq64 x y))
  6791  	// cond:
  6792  	// result: (Neq64 x y)
  6793  	for {
  6794  		v_0 := v.Args[0]
  6795  		if v_0.Op != OpEq64 {
  6796  			break
  6797  		}
  6798  		x := v_0.Args[0]
  6799  		y := v_0.Args[1]
  6800  		v.reset(OpNeq64)
  6801  		v.AddArg(x)
  6802  		v.AddArg(y)
  6803  		return true
  6804  	}
  6805  	// match: (Not (Eq32 x y))
  6806  	// cond:
  6807  	// result: (Neq32 x y)
  6808  	for {
  6809  		v_0 := v.Args[0]
  6810  		if v_0.Op != OpEq32 {
  6811  			break
  6812  		}
  6813  		x := v_0.Args[0]
  6814  		y := v_0.Args[1]
  6815  		v.reset(OpNeq32)
  6816  		v.AddArg(x)
  6817  		v.AddArg(y)
  6818  		return true
  6819  	}
  6820  	// match: (Not (Eq16 x y))
  6821  	// cond:
  6822  	// result: (Neq16 x y)
  6823  	for {
  6824  		v_0 := v.Args[0]
  6825  		if v_0.Op != OpEq16 {
  6826  			break
  6827  		}
  6828  		x := v_0.Args[0]
  6829  		y := v_0.Args[1]
  6830  		v.reset(OpNeq16)
  6831  		v.AddArg(x)
  6832  		v.AddArg(y)
  6833  		return true
  6834  	}
  6835  	// match: (Not (Eq8  x y))
  6836  	// cond:
  6837  	// result: (Neq8  x y)
  6838  	for {
  6839  		v_0 := v.Args[0]
  6840  		if v_0.Op != OpEq8 {
  6841  			break
  6842  		}
  6843  		x := v_0.Args[0]
  6844  		y := v_0.Args[1]
  6845  		v.reset(OpNeq8)
  6846  		v.AddArg(x)
  6847  		v.AddArg(y)
  6848  		return true
  6849  	}
  6850  	// match: (Not (EqB  x y))
  6851  	// cond:
  6852  	// result: (NeqB  x y)
  6853  	for {
  6854  		v_0 := v.Args[0]
  6855  		if v_0.Op != OpEqB {
  6856  			break
  6857  		}
  6858  		x := v_0.Args[0]
  6859  		y := v_0.Args[1]
  6860  		v.reset(OpNeqB)
  6861  		v.AddArg(x)
  6862  		v.AddArg(y)
  6863  		return true
  6864  	}
  6865  	// match: (Not (Neq64 x y))
  6866  	// cond:
  6867  	// result: (Eq64 x y)
  6868  	for {
  6869  		v_0 := v.Args[0]
  6870  		if v_0.Op != OpNeq64 {
  6871  			break
  6872  		}
  6873  		x := v_0.Args[0]
  6874  		y := v_0.Args[1]
  6875  		v.reset(OpEq64)
  6876  		v.AddArg(x)
  6877  		v.AddArg(y)
  6878  		return true
  6879  	}
  6880  	// match: (Not (Neq32 x y))
  6881  	// cond:
  6882  	// result: (Eq32 x y)
  6883  	for {
  6884  		v_0 := v.Args[0]
  6885  		if v_0.Op != OpNeq32 {
  6886  			break
  6887  		}
  6888  		x := v_0.Args[0]
  6889  		y := v_0.Args[1]
  6890  		v.reset(OpEq32)
  6891  		v.AddArg(x)
  6892  		v.AddArg(y)
  6893  		return true
  6894  	}
  6895  	// match: (Not (Neq16 x y))
  6896  	// cond:
  6897  	// result: (Eq16 x y)
  6898  	for {
  6899  		v_0 := v.Args[0]
  6900  		if v_0.Op != OpNeq16 {
  6901  			break
  6902  		}
  6903  		x := v_0.Args[0]
  6904  		y := v_0.Args[1]
  6905  		v.reset(OpEq16)
  6906  		v.AddArg(x)
  6907  		v.AddArg(y)
  6908  		return true
  6909  	}
  6910  	// match: (Not (Neq8  x y))
  6911  	// cond:
  6912  	// result: (Eq8  x y)
  6913  	for {
  6914  		v_0 := v.Args[0]
  6915  		if v_0.Op != OpNeq8 {
  6916  			break
  6917  		}
  6918  		x := v_0.Args[0]
  6919  		y := v_0.Args[1]
  6920  		v.reset(OpEq8)
  6921  		v.AddArg(x)
  6922  		v.AddArg(y)
  6923  		return true
  6924  	}
  6925  	// match: (Not (NeqB  x y))
  6926  	// cond:
  6927  	// result: (EqB  x y)
  6928  	for {
  6929  		v_0 := v.Args[0]
  6930  		if v_0.Op != OpNeqB {
  6931  			break
  6932  		}
  6933  		x := v_0.Args[0]
  6934  		y := v_0.Args[1]
  6935  		v.reset(OpEqB)
  6936  		v.AddArg(x)
  6937  		v.AddArg(y)
  6938  		return true
  6939  	}
  6940  	// match: (Not (Greater64 x y))
  6941  	// cond:
  6942  	// result: (Leq64 x y)
  6943  	for {
  6944  		v_0 := v.Args[0]
  6945  		if v_0.Op != OpGreater64 {
  6946  			break
  6947  		}
  6948  		x := v_0.Args[0]
  6949  		y := v_0.Args[1]
  6950  		v.reset(OpLeq64)
  6951  		v.AddArg(x)
  6952  		v.AddArg(y)
  6953  		return true
  6954  	}
  6955  	// match: (Not (Greater32 x y))
  6956  	// cond:
  6957  	// result: (Leq32 x y)
  6958  	for {
  6959  		v_0 := v.Args[0]
  6960  		if v_0.Op != OpGreater32 {
  6961  			break
  6962  		}
  6963  		x := v_0.Args[0]
  6964  		y := v_0.Args[1]
  6965  		v.reset(OpLeq32)
  6966  		v.AddArg(x)
  6967  		v.AddArg(y)
  6968  		return true
  6969  	}
  6970  	// match: (Not (Greater16 x y))
  6971  	// cond:
  6972  	// result: (Leq16 x y)
  6973  	for {
  6974  		v_0 := v.Args[0]
  6975  		if v_0.Op != OpGreater16 {
  6976  			break
  6977  		}
  6978  		x := v_0.Args[0]
  6979  		y := v_0.Args[1]
  6980  		v.reset(OpLeq16)
  6981  		v.AddArg(x)
  6982  		v.AddArg(y)
  6983  		return true
  6984  	}
  6985  	// match: (Not (Greater8  x y))
  6986  	// cond:
  6987  	// result: (Leq8  x y)
  6988  	for {
  6989  		v_0 := v.Args[0]
  6990  		if v_0.Op != OpGreater8 {
  6991  			break
  6992  		}
  6993  		x := v_0.Args[0]
  6994  		y := v_0.Args[1]
  6995  		v.reset(OpLeq8)
  6996  		v.AddArg(x)
  6997  		v.AddArg(y)
  6998  		return true
  6999  	}
  7000  	// match: (Not (Greater64U x y))
  7001  	// cond:
  7002  	// result: (Leq64U x y)
  7003  	for {
  7004  		v_0 := v.Args[0]
  7005  		if v_0.Op != OpGreater64U {
  7006  			break
  7007  		}
  7008  		x := v_0.Args[0]
  7009  		y := v_0.Args[1]
  7010  		v.reset(OpLeq64U)
  7011  		v.AddArg(x)
  7012  		v.AddArg(y)
  7013  		return true
  7014  	}
  7015  	// match: (Not (Greater32U x y))
  7016  	// cond:
  7017  	// result: (Leq32U x y)
  7018  	for {
  7019  		v_0 := v.Args[0]
  7020  		if v_0.Op != OpGreater32U {
  7021  			break
  7022  		}
  7023  		x := v_0.Args[0]
  7024  		y := v_0.Args[1]
  7025  		v.reset(OpLeq32U)
  7026  		v.AddArg(x)
  7027  		v.AddArg(y)
  7028  		return true
  7029  	}
  7030  	// match: (Not (Greater16U x y))
  7031  	// cond:
  7032  	// result: (Leq16U x y)
  7033  	for {
  7034  		v_0 := v.Args[0]
  7035  		if v_0.Op != OpGreater16U {
  7036  			break
  7037  		}
  7038  		x := v_0.Args[0]
  7039  		y := v_0.Args[1]
  7040  		v.reset(OpLeq16U)
  7041  		v.AddArg(x)
  7042  		v.AddArg(y)
  7043  		return true
  7044  	}
  7045  	// match: (Not (Greater8U  x y))
  7046  	// cond:
  7047  	// result: (Leq8U  x y)
  7048  	for {
  7049  		v_0 := v.Args[0]
  7050  		if v_0.Op != OpGreater8U {
  7051  			break
  7052  		}
  7053  		x := v_0.Args[0]
  7054  		y := v_0.Args[1]
  7055  		v.reset(OpLeq8U)
  7056  		v.AddArg(x)
  7057  		v.AddArg(y)
  7058  		return true
  7059  	}
  7060  	// match: (Not (Geq64 x y))
  7061  	// cond:
  7062  	// result: (Less64 x y)
  7063  	for {
  7064  		v_0 := v.Args[0]
  7065  		if v_0.Op != OpGeq64 {
  7066  			break
  7067  		}
  7068  		x := v_0.Args[0]
  7069  		y := v_0.Args[1]
  7070  		v.reset(OpLess64)
  7071  		v.AddArg(x)
  7072  		v.AddArg(y)
  7073  		return true
  7074  	}
  7075  	// match: (Not (Geq32 x y))
  7076  	// cond:
  7077  	// result: (Less32 x y)
  7078  	for {
  7079  		v_0 := v.Args[0]
  7080  		if v_0.Op != OpGeq32 {
  7081  			break
  7082  		}
  7083  		x := v_0.Args[0]
  7084  		y := v_0.Args[1]
  7085  		v.reset(OpLess32)
  7086  		v.AddArg(x)
  7087  		v.AddArg(y)
  7088  		return true
  7089  	}
  7090  	// match: (Not (Geq16 x y))
  7091  	// cond:
  7092  	// result: (Less16 x y)
  7093  	for {
  7094  		v_0 := v.Args[0]
  7095  		if v_0.Op != OpGeq16 {
  7096  			break
  7097  		}
  7098  		x := v_0.Args[0]
  7099  		y := v_0.Args[1]
  7100  		v.reset(OpLess16)
  7101  		v.AddArg(x)
  7102  		v.AddArg(y)
  7103  		return true
  7104  	}
  7105  	// match: (Not (Geq8  x y))
  7106  	// cond:
  7107  	// result: (Less8  x y)
  7108  	for {
  7109  		v_0 := v.Args[0]
  7110  		if v_0.Op != OpGeq8 {
  7111  			break
  7112  		}
  7113  		x := v_0.Args[0]
  7114  		y := v_0.Args[1]
  7115  		v.reset(OpLess8)
  7116  		v.AddArg(x)
  7117  		v.AddArg(y)
  7118  		return true
  7119  	}
  7120  	// match: (Not (Geq64U x y))
  7121  	// cond:
  7122  	// result: (Less64U x y)
  7123  	for {
  7124  		v_0 := v.Args[0]
  7125  		if v_0.Op != OpGeq64U {
  7126  			break
  7127  		}
  7128  		x := v_0.Args[0]
  7129  		y := v_0.Args[1]
  7130  		v.reset(OpLess64U)
  7131  		v.AddArg(x)
  7132  		v.AddArg(y)
  7133  		return true
  7134  	}
  7135  	// match: (Not (Geq32U x y))
  7136  	// cond:
  7137  	// result: (Less32U x y)
  7138  	for {
  7139  		v_0 := v.Args[0]
  7140  		if v_0.Op != OpGeq32U {
  7141  			break
  7142  		}
  7143  		x := v_0.Args[0]
  7144  		y := v_0.Args[1]
  7145  		v.reset(OpLess32U)
  7146  		v.AddArg(x)
  7147  		v.AddArg(y)
  7148  		return true
  7149  	}
  7150  	// match: (Not (Geq16U x y))
  7151  	// cond:
  7152  	// result: (Less16U x y)
  7153  	for {
  7154  		v_0 := v.Args[0]
  7155  		if v_0.Op != OpGeq16U {
  7156  			break
  7157  		}
  7158  		x := v_0.Args[0]
  7159  		y := v_0.Args[1]
  7160  		v.reset(OpLess16U)
  7161  		v.AddArg(x)
  7162  		v.AddArg(y)
  7163  		return true
  7164  	}
  7165  	// match: (Not (Geq8U  x y))
  7166  	// cond:
  7167  	// result: (Less8U  x y)
  7168  	for {
  7169  		v_0 := v.Args[0]
  7170  		if v_0.Op != OpGeq8U {
  7171  			break
  7172  		}
  7173  		x := v_0.Args[0]
  7174  		y := v_0.Args[1]
  7175  		v.reset(OpLess8U)
  7176  		v.AddArg(x)
  7177  		v.AddArg(y)
  7178  		return true
  7179  	}
  7180  	// match: (Not (Less64 x y))
  7181  	// cond:
  7182  	// result: (Geq64 x y)
  7183  	for {
  7184  		v_0 := v.Args[0]
  7185  		if v_0.Op != OpLess64 {
  7186  			break
  7187  		}
  7188  		x := v_0.Args[0]
  7189  		y := v_0.Args[1]
  7190  		v.reset(OpGeq64)
  7191  		v.AddArg(x)
  7192  		v.AddArg(y)
  7193  		return true
  7194  	}
  7195  	// match: (Not (Less32 x y))
  7196  	// cond:
  7197  	// result: (Geq32 x y)
  7198  	for {
  7199  		v_0 := v.Args[0]
  7200  		if v_0.Op != OpLess32 {
  7201  			break
  7202  		}
  7203  		x := v_0.Args[0]
  7204  		y := v_0.Args[1]
  7205  		v.reset(OpGeq32)
  7206  		v.AddArg(x)
  7207  		v.AddArg(y)
  7208  		return true
  7209  	}
  7210  	// match: (Not (Less16 x y))
  7211  	// cond:
  7212  	// result: (Geq16 x y)
  7213  	for {
  7214  		v_0 := v.Args[0]
  7215  		if v_0.Op != OpLess16 {
  7216  			break
  7217  		}
  7218  		x := v_0.Args[0]
  7219  		y := v_0.Args[1]
  7220  		v.reset(OpGeq16)
  7221  		v.AddArg(x)
  7222  		v.AddArg(y)
  7223  		return true
  7224  	}
  7225  	// match: (Not (Less8  x y))
  7226  	// cond:
  7227  	// result: (Geq8  x y)
  7228  	for {
  7229  		v_0 := v.Args[0]
  7230  		if v_0.Op != OpLess8 {
  7231  			break
  7232  		}
  7233  		x := v_0.Args[0]
  7234  		y := v_0.Args[1]
  7235  		v.reset(OpGeq8)
  7236  		v.AddArg(x)
  7237  		v.AddArg(y)
  7238  		return true
  7239  	}
  7240  	// match: (Not (Less64U x y))
  7241  	// cond:
  7242  	// result: (Geq64U x y)
  7243  	for {
  7244  		v_0 := v.Args[0]
  7245  		if v_0.Op != OpLess64U {
  7246  			break
  7247  		}
  7248  		x := v_0.Args[0]
  7249  		y := v_0.Args[1]
  7250  		v.reset(OpGeq64U)
  7251  		v.AddArg(x)
  7252  		v.AddArg(y)
  7253  		return true
  7254  	}
  7255  	// match: (Not (Less32U x y))
  7256  	// cond:
  7257  	// result: (Geq32U x y)
  7258  	for {
  7259  		v_0 := v.Args[0]
  7260  		if v_0.Op != OpLess32U {
  7261  			break
  7262  		}
  7263  		x := v_0.Args[0]
  7264  		y := v_0.Args[1]
  7265  		v.reset(OpGeq32U)
  7266  		v.AddArg(x)
  7267  		v.AddArg(y)
  7268  		return true
  7269  	}
  7270  	// match: (Not (Less16U x y))
  7271  	// cond:
  7272  	// result: (Geq16U x y)
  7273  	for {
  7274  		v_0 := v.Args[0]
  7275  		if v_0.Op != OpLess16U {
  7276  			break
  7277  		}
  7278  		x := v_0.Args[0]
  7279  		y := v_0.Args[1]
  7280  		v.reset(OpGeq16U)
  7281  		v.AddArg(x)
  7282  		v.AddArg(y)
  7283  		return true
  7284  	}
  7285  	// match: (Not (Less8U  x y))
  7286  	// cond:
  7287  	// result: (Geq8U  x y)
  7288  	for {
  7289  		v_0 := v.Args[0]
  7290  		if v_0.Op != OpLess8U {
  7291  			break
  7292  		}
  7293  		x := v_0.Args[0]
  7294  		y := v_0.Args[1]
  7295  		v.reset(OpGeq8U)
  7296  		v.AddArg(x)
  7297  		v.AddArg(y)
  7298  		return true
  7299  	}
  7300  	// match: (Not (Leq64 x y))
  7301  	// cond:
  7302  	// result: (Greater64 x y)
  7303  	for {
  7304  		v_0 := v.Args[0]
  7305  		if v_0.Op != OpLeq64 {
  7306  			break
  7307  		}
  7308  		x := v_0.Args[0]
  7309  		y := v_0.Args[1]
  7310  		v.reset(OpGreater64)
  7311  		v.AddArg(x)
  7312  		v.AddArg(y)
  7313  		return true
  7314  	}
  7315  	// match: (Not (Leq32 x y))
  7316  	// cond:
  7317  	// result: (Greater32 x y)
  7318  	for {
  7319  		v_0 := v.Args[0]
  7320  		if v_0.Op != OpLeq32 {
  7321  			break
  7322  		}
  7323  		x := v_0.Args[0]
  7324  		y := v_0.Args[1]
  7325  		v.reset(OpGreater32)
  7326  		v.AddArg(x)
  7327  		v.AddArg(y)
  7328  		return true
  7329  	}
  7330  	// match: (Not (Leq16 x y))
  7331  	// cond:
  7332  	// result: (Greater16 x y)
  7333  	for {
  7334  		v_0 := v.Args[0]
  7335  		if v_0.Op != OpLeq16 {
  7336  			break
  7337  		}
  7338  		x := v_0.Args[0]
  7339  		y := v_0.Args[1]
  7340  		v.reset(OpGreater16)
  7341  		v.AddArg(x)
  7342  		v.AddArg(y)
  7343  		return true
  7344  	}
  7345  	// match: (Not (Leq8  x y))
  7346  	// cond:
  7347  	// result: (Greater8 x y)
  7348  	for {
  7349  		v_0 := v.Args[0]
  7350  		if v_0.Op != OpLeq8 {
  7351  			break
  7352  		}
  7353  		x := v_0.Args[0]
  7354  		y := v_0.Args[1]
  7355  		v.reset(OpGreater8)
  7356  		v.AddArg(x)
  7357  		v.AddArg(y)
  7358  		return true
  7359  	}
  7360  	// match: (Not (Leq64U x y))
  7361  	// cond:
  7362  	// result: (Greater64U x y)
  7363  	for {
  7364  		v_0 := v.Args[0]
  7365  		if v_0.Op != OpLeq64U {
  7366  			break
  7367  		}
  7368  		x := v_0.Args[0]
  7369  		y := v_0.Args[1]
  7370  		v.reset(OpGreater64U)
  7371  		v.AddArg(x)
  7372  		v.AddArg(y)
  7373  		return true
  7374  	}
  7375  	// match: (Not (Leq32U x y))
  7376  	// cond:
  7377  	// result: (Greater32U x y)
  7378  	for {
  7379  		v_0 := v.Args[0]
  7380  		if v_0.Op != OpLeq32U {
  7381  			break
  7382  		}
  7383  		x := v_0.Args[0]
  7384  		y := v_0.Args[1]
  7385  		v.reset(OpGreater32U)
  7386  		v.AddArg(x)
  7387  		v.AddArg(y)
  7388  		return true
  7389  	}
  7390  	// match: (Not (Leq16U x y))
  7391  	// cond:
  7392  	// result: (Greater16U x y)
  7393  	for {
  7394  		v_0 := v.Args[0]
  7395  		if v_0.Op != OpLeq16U {
  7396  			break
  7397  		}
  7398  		x := v_0.Args[0]
  7399  		y := v_0.Args[1]
  7400  		v.reset(OpGreater16U)
  7401  		v.AddArg(x)
  7402  		v.AddArg(y)
  7403  		return true
  7404  	}
  7405  	// match: (Not (Leq8U  x y))
  7406  	// cond:
  7407  	// result: (Greater8U  x y)
  7408  	for {
  7409  		v_0 := v.Args[0]
  7410  		if v_0.Op != OpLeq8U {
  7411  			break
  7412  		}
  7413  		x := v_0.Args[0]
  7414  		y := v_0.Args[1]
  7415  		v.reset(OpGreater8U)
  7416  		v.AddArg(x)
  7417  		v.AddArg(y)
  7418  		return true
  7419  	}
  7420  	return false
  7421  }
  7422  func rewriteValuegeneric_OpOffPtr(v *Value, config *Config) bool {
  7423  	b := v.Block
  7424  	_ = b
  7425  	// match: (OffPtr (OffPtr p [b]) [a])
  7426  	// cond:
  7427  	// result: (OffPtr p [a+b])
  7428  	for {
  7429  		a := v.AuxInt
  7430  		v_0 := v.Args[0]
  7431  		if v_0.Op != OpOffPtr {
  7432  			break
  7433  		}
  7434  		b := v_0.AuxInt
  7435  		p := v_0.Args[0]
  7436  		v.reset(OpOffPtr)
  7437  		v.AuxInt = a + b
  7438  		v.AddArg(p)
  7439  		return true
  7440  	}
  7441  	// match: (OffPtr p [0])
  7442  	// cond: v.Type.Compare(p.Type) == CMPeq
  7443  	// result: p
  7444  	for {
  7445  		if v.AuxInt != 0 {
  7446  			break
  7447  		}
  7448  		p := v.Args[0]
  7449  		if !(v.Type.Compare(p.Type) == CMPeq) {
  7450  			break
  7451  		}
  7452  		v.reset(OpCopy)
  7453  		v.Type = p.Type
  7454  		v.AddArg(p)
  7455  		return true
  7456  	}
  7457  	return false
  7458  }
  7459  func rewriteValuegeneric_OpOr16(v *Value, config *Config) bool {
  7460  	b := v.Block
  7461  	_ = b
  7462  	// match: (Or16 x (Const16 <t> [c]))
  7463  	// cond: x.Op != OpConst16
  7464  	// result: (Or16 (Const16 <t> [c]) x)
  7465  	for {
  7466  		x := v.Args[0]
  7467  		v_1 := v.Args[1]
  7468  		if v_1.Op != OpConst16 {
  7469  			break
  7470  		}
  7471  		t := v_1.Type
  7472  		c := v_1.AuxInt
  7473  		if !(x.Op != OpConst16) {
  7474  			break
  7475  		}
  7476  		v.reset(OpOr16)
  7477  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  7478  		v0.AuxInt = c
  7479  		v.AddArg(v0)
  7480  		v.AddArg(x)
  7481  		return true
  7482  	}
  7483  	// match: (Or16 x x)
  7484  	// cond:
  7485  	// result: x
  7486  	for {
  7487  		x := v.Args[0]
  7488  		if x != v.Args[1] {
  7489  			break
  7490  		}
  7491  		v.reset(OpCopy)
  7492  		v.Type = x.Type
  7493  		v.AddArg(x)
  7494  		return true
  7495  	}
  7496  	// match: (Or16 (Const16 [0]) x)
  7497  	// cond:
  7498  	// result: x
  7499  	for {
  7500  		v_0 := v.Args[0]
  7501  		if v_0.Op != OpConst16 {
  7502  			break
  7503  		}
  7504  		if v_0.AuxInt != 0 {
  7505  			break
  7506  		}
  7507  		x := v.Args[1]
  7508  		v.reset(OpCopy)
  7509  		v.Type = x.Type
  7510  		v.AddArg(x)
  7511  		return true
  7512  	}
  7513  	// match: (Or16 (Const16 [-1]) _)
  7514  	// cond:
  7515  	// result: (Const16 [-1])
  7516  	for {
  7517  		v_0 := v.Args[0]
  7518  		if v_0.Op != OpConst16 {
  7519  			break
  7520  		}
  7521  		if v_0.AuxInt != -1 {
  7522  			break
  7523  		}
  7524  		v.reset(OpConst16)
  7525  		v.AuxInt = -1
  7526  		return true
  7527  	}
  7528  	// match: (Or16 x (Or16 x y))
  7529  	// cond:
  7530  	// result: (Or16 x y)
  7531  	for {
  7532  		x := v.Args[0]
  7533  		v_1 := v.Args[1]
  7534  		if v_1.Op != OpOr16 {
  7535  			break
  7536  		}
  7537  		if x != v_1.Args[0] {
  7538  			break
  7539  		}
  7540  		y := v_1.Args[1]
  7541  		v.reset(OpOr16)
  7542  		v.AddArg(x)
  7543  		v.AddArg(y)
  7544  		return true
  7545  	}
  7546  	// match: (Or16 x (Or16 y x))
  7547  	// cond:
  7548  	// result: (Or16 x y)
  7549  	for {
  7550  		x := v.Args[0]
  7551  		v_1 := v.Args[1]
  7552  		if v_1.Op != OpOr16 {
  7553  			break
  7554  		}
  7555  		y := v_1.Args[0]
  7556  		if x != v_1.Args[1] {
  7557  			break
  7558  		}
  7559  		v.reset(OpOr16)
  7560  		v.AddArg(x)
  7561  		v.AddArg(y)
  7562  		return true
  7563  	}
  7564  	// match: (Or16 (Or16 x y) x)
  7565  	// cond:
  7566  	// result: (Or16 x y)
  7567  	for {
  7568  		v_0 := v.Args[0]
  7569  		if v_0.Op != OpOr16 {
  7570  			break
  7571  		}
  7572  		x := v_0.Args[0]
  7573  		y := v_0.Args[1]
  7574  		if x != v.Args[1] {
  7575  			break
  7576  		}
  7577  		v.reset(OpOr16)
  7578  		v.AddArg(x)
  7579  		v.AddArg(y)
  7580  		return true
  7581  	}
  7582  	// match: (Or16 (Or16 x y) y)
  7583  	// cond:
  7584  	// result: (Or16 x y)
  7585  	for {
  7586  		v_0 := v.Args[0]
  7587  		if v_0.Op != OpOr16 {
  7588  			break
  7589  		}
  7590  		x := v_0.Args[0]
  7591  		y := v_0.Args[1]
  7592  		if y != v.Args[1] {
  7593  			break
  7594  		}
  7595  		v.reset(OpOr16)
  7596  		v.AddArg(x)
  7597  		v.AddArg(y)
  7598  		return true
  7599  	}
  7600  	return false
  7601  }
  7602  func rewriteValuegeneric_OpOr32(v *Value, config *Config) bool {
  7603  	b := v.Block
  7604  	_ = b
  7605  	// match: (Or32 x (Const32 <t> [c]))
  7606  	// cond: x.Op != OpConst32
  7607  	// result: (Or32 (Const32 <t> [c]) x)
  7608  	for {
  7609  		x := v.Args[0]
  7610  		v_1 := v.Args[1]
  7611  		if v_1.Op != OpConst32 {
  7612  			break
  7613  		}
  7614  		t := v_1.Type
  7615  		c := v_1.AuxInt
  7616  		if !(x.Op != OpConst32) {
  7617  			break
  7618  		}
  7619  		v.reset(OpOr32)
  7620  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  7621  		v0.AuxInt = c
  7622  		v.AddArg(v0)
  7623  		v.AddArg(x)
  7624  		return true
  7625  	}
  7626  	// match: (Or32 x x)
  7627  	// cond:
  7628  	// result: x
  7629  	for {
  7630  		x := v.Args[0]
  7631  		if x != v.Args[1] {
  7632  			break
  7633  		}
  7634  		v.reset(OpCopy)
  7635  		v.Type = x.Type
  7636  		v.AddArg(x)
  7637  		return true
  7638  	}
  7639  	// match: (Or32 (Const32 [0]) x)
  7640  	// cond:
  7641  	// result: x
  7642  	for {
  7643  		v_0 := v.Args[0]
  7644  		if v_0.Op != OpConst32 {
  7645  			break
  7646  		}
  7647  		if v_0.AuxInt != 0 {
  7648  			break
  7649  		}
  7650  		x := v.Args[1]
  7651  		v.reset(OpCopy)
  7652  		v.Type = x.Type
  7653  		v.AddArg(x)
  7654  		return true
  7655  	}
  7656  	// match: (Or32 (Const32 [-1]) _)
  7657  	// cond:
  7658  	// result: (Const32 [-1])
  7659  	for {
  7660  		v_0 := v.Args[0]
  7661  		if v_0.Op != OpConst32 {
  7662  			break
  7663  		}
  7664  		if v_0.AuxInt != -1 {
  7665  			break
  7666  		}
  7667  		v.reset(OpConst32)
  7668  		v.AuxInt = -1
  7669  		return true
  7670  	}
  7671  	// match: (Or32 x (Or32 x y))
  7672  	// cond:
  7673  	// result: (Or32 x y)
  7674  	for {
  7675  		x := v.Args[0]
  7676  		v_1 := v.Args[1]
  7677  		if v_1.Op != OpOr32 {
  7678  			break
  7679  		}
  7680  		if x != v_1.Args[0] {
  7681  			break
  7682  		}
  7683  		y := v_1.Args[1]
  7684  		v.reset(OpOr32)
  7685  		v.AddArg(x)
  7686  		v.AddArg(y)
  7687  		return true
  7688  	}
  7689  	// match: (Or32 x (Or32 y x))
  7690  	// cond:
  7691  	// result: (Or32 x y)
  7692  	for {
  7693  		x := v.Args[0]
  7694  		v_1 := v.Args[1]
  7695  		if v_1.Op != OpOr32 {
  7696  			break
  7697  		}
  7698  		y := v_1.Args[0]
  7699  		if x != v_1.Args[1] {
  7700  			break
  7701  		}
  7702  		v.reset(OpOr32)
  7703  		v.AddArg(x)
  7704  		v.AddArg(y)
  7705  		return true
  7706  	}
  7707  	// match: (Or32 (Or32 x y) x)
  7708  	// cond:
  7709  	// result: (Or32 x y)
  7710  	for {
  7711  		v_0 := v.Args[0]
  7712  		if v_0.Op != OpOr32 {
  7713  			break
  7714  		}
  7715  		x := v_0.Args[0]
  7716  		y := v_0.Args[1]
  7717  		if x != v.Args[1] {
  7718  			break
  7719  		}
  7720  		v.reset(OpOr32)
  7721  		v.AddArg(x)
  7722  		v.AddArg(y)
  7723  		return true
  7724  	}
  7725  	// match: (Or32 (Or32 x y) y)
  7726  	// cond:
  7727  	// result: (Or32 x y)
  7728  	for {
  7729  		v_0 := v.Args[0]
  7730  		if v_0.Op != OpOr32 {
  7731  			break
  7732  		}
  7733  		x := v_0.Args[0]
  7734  		y := v_0.Args[1]
  7735  		if y != v.Args[1] {
  7736  			break
  7737  		}
  7738  		v.reset(OpOr32)
  7739  		v.AddArg(x)
  7740  		v.AddArg(y)
  7741  		return true
  7742  	}
  7743  	return false
  7744  }
  7745  func rewriteValuegeneric_OpOr64(v *Value, config *Config) bool {
  7746  	b := v.Block
  7747  	_ = b
  7748  	// match: (Or64 x (Const64 <t> [c]))
  7749  	// cond: x.Op != OpConst64
  7750  	// result: (Or64 (Const64 <t> [c]) x)
  7751  	for {
  7752  		x := v.Args[0]
  7753  		v_1 := v.Args[1]
  7754  		if v_1.Op != OpConst64 {
  7755  			break
  7756  		}
  7757  		t := v_1.Type
  7758  		c := v_1.AuxInt
  7759  		if !(x.Op != OpConst64) {
  7760  			break
  7761  		}
  7762  		v.reset(OpOr64)
  7763  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  7764  		v0.AuxInt = c
  7765  		v.AddArg(v0)
  7766  		v.AddArg(x)
  7767  		return true
  7768  	}
  7769  	// match: (Or64 x x)
  7770  	// cond:
  7771  	// result: x
  7772  	for {
  7773  		x := v.Args[0]
  7774  		if x != v.Args[1] {
  7775  			break
  7776  		}
  7777  		v.reset(OpCopy)
  7778  		v.Type = x.Type
  7779  		v.AddArg(x)
  7780  		return true
  7781  	}
  7782  	// match: (Or64 (Const64 [0]) x)
  7783  	// cond:
  7784  	// result: x
  7785  	for {
  7786  		v_0 := v.Args[0]
  7787  		if v_0.Op != OpConst64 {
  7788  			break
  7789  		}
  7790  		if v_0.AuxInt != 0 {
  7791  			break
  7792  		}
  7793  		x := v.Args[1]
  7794  		v.reset(OpCopy)
  7795  		v.Type = x.Type
  7796  		v.AddArg(x)
  7797  		return true
  7798  	}
  7799  	// match: (Or64 (Const64 [-1]) _)
  7800  	// cond:
  7801  	// result: (Const64 [-1])
  7802  	for {
  7803  		v_0 := v.Args[0]
  7804  		if v_0.Op != OpConst64 {
  7805  			break
  7806  		}
  7807  		if v_0.AuxInt != -1 {
  7808  			break
  7809  		}
  7810  		v.reset(OpConst64)
  7811  		v.AuxInt = -1
  7812  		return true
  7813  	}
  7814  	// match: (Or64 x (Or64 x y))
  7815  	// cond:
  7816  	// result: (Or64 x y)
  7817  	for {
  7818  		x := v.Args[0]
  7819  		v_1 := v.Args[1]
  7820  		if v_1.Op != OpOr64 {
  7821  			break
  7822  		}
  7823  		if x != v_1.Args[0] {
  7824  			break
  7825  		}
  7826  		y := v_1.Args[1]
  7827  		v.reset(OpOr64)
  7828  		v.AddArg(x)
  7829  		v.AddArg(y)
  7830  		return true
  7831  	}
  7832  	// match: (Or64 x (Or64 y x))
  7833  	// cond:
  7834  	// result: (Or64 x y)
  7835  	for {
  7836  		x := v.Args[0]
  7837  		v_1 := v.Args[1]
  7838  		if v_1.Op != OpOr64 {
  7839  			break
  7840  		}
  7841  		y := v_1.Args[0]
  7842  		if x != v_1.Args[1] {
  7843  			break
  7844  		}
  7845  		v.reset(OpOr64)
  7846  		v.AddArg(x)
  7847  		v.AddArg(y)
  7848  		return true
  7849  	}
  7850  	// match: (Or64 (Or64 x y) x)
  7851  	// cond:
  7852  	// result: (Or64 x y)
  7853  	for {
  7854  		v_0 := v.Args[0]
  7855  		if v_0.Op != OpOr64 {
  7856  			break
  7857  		}
  7858  		x := v_0.Args[0]
  7859  		y := v_0.Args[1]
  7860  		if x != v.Args[1] {
  7861  			break
  7862  		}
  7863  		v.reset(OpOr64)
  7864  		v.AddArg(x)
  7865  		v.AddArg(y)
  7866  		return true
  7867  	}
  7868  	// match: (Or64 (Or64 x y) y)
  7869  	// cond:
  7870  	// result: (Or64 x y)
  7871  	for {
  7872  		v_0 := v.Args[0]
  7873  		if v_0.Op != OpOr64 {
  7874  			break
  7875  		}
  7876  		x := v_0.Args[0]
  7877  		y := v_0.Args[1]
  7878  		if y != v.Args[1] {
  7879  			break
  7880  		}
  7881  		v.reset(OpOr64)
  7882  		v.AddArg(x)
  7883  		v.AddArg(y)
  7884  		return true
  7885  	}
  7886  	return false
  7887  }
  7888  func rewriteValuegeneric_OpOr8(v *Value, config *Config) bool {
  7889  	b := v.Block
  7890  	_ = b
  7891  	// match: (Or8  x (Const8  <t> [c]))
  7892  	// cond: x.Op != OpConst8
  7893  	// result: (Or8  (Const8  <t> [c]) x)
  7894  	for {
  7895  		x := v.Args[0]
  7896  		v_1 := v.Args[1]
  7897  		if v_1.Op != OpConst8 {
  7898  			break
  7899  		}
  7900  		t := v_1.Type
  7901  		c := v_1.AuxInt
  7902  		if !(x.Op != OpConst8) {
  7903  			break
  7904  		}
  7905  		v.reset(OpOr8)
  7906  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  7907  		v0.AuxInt = c
  7908  		v.AddArg(v0)
  7909  		v.AddArg(x)
  7910  		return true
  7911  	}
  7912  	// match: (Or8  x x)
  7913  	// cond:
  7914  	// result: x
  7915  	for {
  7916  		x := v.Args[0]
  7917  		if x != v.Args[1] {
  7918  			break
  7919  		}
  7920  		v.reset(OpCopy)
  7921  		v.Type = x.Type
  7922  		v.AddArg(x)
  7923  		return true
  7924  	}
  7925  	// match: (Or8  (Const8  [0]) x)
  7926  	// cond:
  7927  	// result: x
  7928  	for {
  7929  		v_0 := v.Args[0]
  7930  		if v_0.Op != OpConst8 {
  7931  			break
  7932  		}
  7933  		if v_0.AuxInt != 0 {
  7934  			break
  7935  		}
  7936  		x := v.Args[1]
  7937  		v.reset(OpCopy)
  7938  		v.Type = x.Type
  7939  		v.AddArg(x)
  7940  		return true
  7941  	}
  7942  	// match: (Or8  (Const8  [-1]) _)
  7943  	// cond:
  7944  	// result: (Const8  [-1])
  7945  	for {
  7946  		v_0 := v.Args[0]
  7947  		if v_0.Op != OpConst8 {
  7948  			break
  7949  		}
  7950  		if v_0.AuxInt != -1 {
  7951  			break
  7952  		}
  7953  		v.reset(OpConst8)
  7954  		v.AuxInt = -1
  7955  		return true
  7956  	}
  7957  	// match: (Or8  x (Or8  x y))
  7958  	// cond:
  7959  	// result: (Or8  x y)
  7960  	for {
  7961  		x := v.Args[0]
  7962  		v_1 := v.Args[1]
  7963  		if v_1.Op != OpOr8 {
  7964  			break
  7965  		}
  7966  		if x != v_1.Args[0] {
  7967  			break
  7968  		}
  7969  		y := v_1.Args[1]
  7970  		v.reset(OpOr8)
  7971  		v.AddArg(x)
  7972  		v.AddArg(y)
  7973  		return true
  7974  	}
  7975  	// match: (Or8  x (Or8  y x))
  7976  	// cond:
  7977  	// result: (Or8  x y)
  7978  	for {
  7979  		x := v.Args[0]
  7980  		v_1 := v.Args[1]
  7981  		if v_1.Op != OpOr8 {
  7982  			break
  7983  		}
  7984  		y := v_1.Args[0]
  7985  		if x != v_1.Args[1] {
  7986  			break
  7987  		}
  7988  		v.reset(OpOr8)
  7989  		v.AddArg(x)
  7990  		v.AddArg(y)
  7991  		return true
  7992  	}
  7993  	// match: (Or8  (Or8  x y) x)
  7994  	// cond:
  7995  	// result: (Or8  x y)
  7996  	for {
  7997  		v_0 := v.Args[0]
  7998  		if v_0.Op != OpOr8 {
  7999  			break
  8000  		}
  8001  		x := v_0.Args[0]
  8002  		y := v_0.Args[1]
  8003  		if x != v.Args[1] {
  8004  			break
  8005  		}
  8006  		v.reset(OpOr8)
  8007  		v.AddArg(x)
  8008  		v.AddArg(y)
  8009  		return true
  8010  	}
  8011  	// match: (Or8  (Or8  x y) y)
  8012  	// cond:
  8013  	// result: (Or8  x y)
  8014  	for {
  8015  		v_0 := v.Args[0]
  8016  		if v_0.Op != OpOr8 {
  8017  			break
  8018  		}
  8019  		x := v_0.Args[0]
  8020  		y := v_0.Args[1]
  8021  		if y != v.Args[1] {
  8022  			break
  8023  		}
  8024  		v.reset(OpOr8)
  8025  		v.AddArg(x)
  8026  		v.AddArg(y)
  8027  		return true
  8028  	}
  8029  	return false
  8030  }
  8031  func rewriteValuegeneric_OpPhi(v *Value, config *Config) bool {
  8032  	b := v.Block
  8033  	_ = b
  8034  	// match: (Phi (Const8  [c]) (Const8  [c]))
  8035  	// cond:
  8036  	// result: (Const8  [c])
  8037  	for {
  8038  		v_0 := v.Args[0]
  8039  		if v_0.Op != OpConst8 {
  8040  			break
  8041  		}
  8042  		c := v_0.AuxInt
  8043  		v_1 := v.Args[1]
  8044  		if v_1.Op != OpConst8 {
  8045  			break
  8046  		}
  8047  		if v_1.AuxInt != c {
  8048  			break
  8049  		}
  8050  		if len(v.Args) != 2 {
  8051  			break
  8052  		}
  8053  		v.reset(OpConst8)
  8054  		v.AuxInt = c
  8055  		return true
  8056  	}
  8057  	// match: (Phi (Const16 [c]) (Const16 [c]))
  8058  	// cond:
  8059  	// result: (Const16 [c])
  8060  	for {
  8061  		v_0 := v.Args[0]
  8062  		if v_0.Op != OpConst16 {
  8063  			break
  8064  		}
  8065  		c := v_0.AuxInt
  8066  		v_1 := v.Args[1]
  8067  		if v_1.Op != OpConst16 {
  8068  			break
  8069  		}
  8070  		if v_1.AuxInt != c {
  8071  			break
  8072  		}
  8073  		if len(v.Args) != 2 {
  8074  			break
  8075  		}
  8076  		v.reset(OpConst16)
  8077  		v.AuxInt = c
  8078  		return true
  8079  	}
  8080  	// match: (Phi (Const32 [c]) (Const32 [c]))
  8081  	// cond:
  8082  	// result: (Const32 [c])
  8083  	for {
  8084  		v_0 := v.Args[0]
  8085  		if v_0.Op != OpConst32 {
  8086  			break
  8087  		}
  8088  		c := v_0.AuxInt
  8089  		v_1 := v.Args[1]
  8090  		if v_1.Op != OpConst32 {
  8091  			break
  8092  		}
  8093  		if v_1.AuxInt != c {
  8094  			break
  8095  		}
  8096  		if len(v.Args) != 2 {
  8097  			break
  8098  		}
  8099  		v.reset(OpConst32)
  8100  		v.AuxInt = c
  8101  		return true
  8102  	}
  8103  	// match: (Phi (Const64 [c]) (Const64 [c]))
  8104  	// cond:
  8105  	// result: (Const64 [c])
  8106  	for {
  8107  		v_0 := v.Args[0]
  8108  		if v_0.Op != OpConst64 {
  8109  			break
  8110  		}
  8111  		c := v_0.AuxInt
  8112  		v_1 := v.Args[1]
  8113  		if v_1.Op != OpConst64 {
  8114  			break
  8115  		}
  8116  		if v_1.AuxInt != c {
  8117  			break
  8118  		}
  8119  		if len(v.Args) != 2 {
  8120  			break
  8121  		}
  8122  		v.reset(OpConst64)
  8123  		v.AuxInt = c
  8124  		return true
  8125  	}
  8126  	return false
  8127  }
  8128  func rewriteValuegeneric_OpPtrIndex(v *Value, config *Config) bool {
  8129  	b := v.Block
  8130  	_ = b
  8131  	// match: (PtrIndex <t> ptr idx)
  8132  	// cond: config.PtrSize == 4
  8133  	// result: (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.ElemType().Size()])))
  8134  	for {
  8135  		t := v.Type
  8136  		ptr := v.Args[0]
  8137  		idx := v.Args[1]
  8138  		if !(config.PtrSize == 4) {
  8139  			break
  8140  		}
  8141  		v.reset(OpAddPtr)
  8142  		v.AddArg(ptr)
  8143  		v0 := b.NewValue0(v.Pos, OpMul32, config.fe.TypeInt())
  8144  		v0.AddArg(idx)
  8145  		v1 := b.NewValue0(v.Pos, OpConst32, config.fe.TypeInt())
  8146  		v1.AuxInt = t.ElemType().Size()
  8147  		v0.AddArg(v1)
  8148  		v.AddArg(v0)
  8149  		return true
  8150  	}
  8151  	// match: (PtrIndex <t> ptr idx)
  8152  	// cond: config.PtrSize == 8
  8153  	// result: (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.ElemType().Size()])))
  8154  	for {
  8155  		t := v.Type
  8156  		ptr := v.Args[0]
  8157  		idx := v.Args[1]
  8158  		if !(config.PtrSize == 8) {
  8159  			break
  8160  		}
  8161  		v.reset(OpAddPtr)
  8162  		v.AddArg(ptr)
  8163  		v0 := b.NewValue0(v.Pos, OpMul64, config.fe.TypeInt())
  8164  		v0.AddArg(idx)
  8165  		v1 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeInt())
  8166  		v1.AuxInt = t.ElemType().Size()
  8167  		v0.AddArg(v1)
  8168  		v.AddArg(v0)
  8169  		return true
  8170  	}
  8171  	return false
  8172  }
  8173  func rewriteValuegeneric_OpRsh16Ux16(v *Value, config *Config) bool {
  8174  	b := v.Block
  8175  	_ = b
  8176  	// match: (Rsh16Ux16 <t> x (Const16 [c]))
  8177  	// cond:
  8178  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))]))
  8179  	for {
  8180  		t := v.Type
  8181  		x := v.Args[0]
  8182  		v_1 := v.Args[1]
  8183  		if v_1.Op != OpConst16 {
  8184  			break
  8185  		}
  8186  		c := v_1.AuxInt
  8187  		v.reset(OpRsh16Ux64)
  8188  		v.AddArg(x)
  8189  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8190  		v0.AuxInt = int64(uint16(c))
  8191  		v.AddArg(v0)
  8192  		return true
  8193  	}
  8194  	// match: (Rsh16Ux16 (Const16 [0]) _)
  8195  	// cond:
  8196  	// result: (Const16 [0])
  8197  	for {
  8198  		v_0 := v.Args[0]
  8199  		if v_0.Op != OpConst16 {
  8200  			break
  8201  		}
  8202  		if v_0.AuxInt != 0 {
  8203  			break
  8204  		}
  8205  		v.reset(OpConst16)
  8206  		v.AuxInt = 0
  8207  		return true
  8208  	}
  8209  	return false
  8210  }
  8211  func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool {
  8212  	b := v.Block
  8213  	_ = b
  8214  	// match: (Rsh16Ux32 <t> x (Const32 [c]))
  8215  	// cond:
  8216  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))]))
  8217  	for {
  8218  		t := v.Type
  8219  		x := v.Args[0]
  8220  		v_1 := v.Args[1]
  8221  		if v_1.Op != OpConst32 {
  8222  			break
  8223  		}
  8224  		c := v_1.AuxInt
  8225  		v.reset(OpRsh16Ux64)
  8226  		v.AddArg(x)
  8227  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8228  		v0.AuxInt = int64(uint32(c))
  8229  		v.AddArg(v0)
  8230  		return true
  8231  	}
  8232  	// match: (Rsh16Ux32 (Const16 [0]) _)
  8233  	// cond:
  8234  	// result: (Const16 [0])
  8235  	for {
  8236  		v_0 := v.Args[0]
  8237  		if v_0.Op != OpConst16 {
  8238  			break
  8239  		}
  8240  		if v_0.AuxInt != 0 {
  8241  			break
  8242  		}
  8243  		v.reset(OpConst16)
  8244  		v.AuxInt = 0
  8245  		return true
  8246  	}
  8247  	return false
  8248  }
  8249  func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool {
  8250  	b := v.Block
  8251  	_ = b
  8252  	// match: (Rsh16Ux64 (Const16 [c]) (Const64 [d]))
  8253  	// cond:
  8254  	// result: (Const16 [int64(int16(uint16(c) >> uint64(d)))])
  8255  	for {
  8256  		v_0 := v.Args[0]
  8257  		if v_0.Op != OpConst16 {
  8258  			break
  8259  		}
  8260  		c := v_0.AuxInt
  8261  		v_1 := v.Args[1]
  8262  		if v_1.Op != OpConst64 {
  8263  			break
  8264  		}
  8265  		d := v_1.AuxInt
  8266  		v.reset(OpConst16)
  8267  		v.AuxInt = int64(int16(uint16(c) >> uint64(d)))
  8268  		return true
  8269  	}
  8270  	// match: (Rsh16Ux64 x (Const64 [0]))
  8271  	// cond:
  8272  	// result: x
  8273  	for {
  8274  		x := v.Args[0]
  8275  		v_1 := v.Args[1]
  8276  		if v_1.Op != OpConst64 {
  8277  			break
  8278  		}
  8279  		if v_1.AuxInt != 0 {
  8280  			break
  8281  		}
  8282  		v.reset(OpCopy)
  8283  		v.Type = x.Type
  8284  		v.AddArg(x)
  8285  		return true
  8286  	}
  8287  	// match: (Rsh16Ux64 (Const16 [0]) _)
  8288  	// cond:
  8289  	// result: (Const16 [0])
  8290  	for {
  8291  		v_0 := v.Args[0]
  8292  		if v_0.Op != OpConst16 {
  8293  			break
  8294  		}
  8295  		if v_0.AuxInt != 0 {
  8296  			break
  8297  		}
  8298  		v.reset(OpConst16)
  8299  		v.AuxInt = 0
  8300  		return true
  8301  	}
  8302  	// match: (Rsh16Ux64 _ (Const64 [c]))
  8303  	// cond: uint64(c) >= 16
  8304  	// result: (Const16 [0])
  8305  	for {
  8306  		v_1 := v.Args[1]
  8307  		if v_1.Op != OpConst64 {
  8308  			break
  8309  		}
  8310  		c := v_1.AuxInt
  8311  		if !(uint64(c) >= 16) {
  8312  			break
  8313  		}
  8314  		v.reset(OpConst16)
  8315  		v.AuxInt = 0
  8316  		return true
  8317  	}
  8318  	// match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d]))
  8319  	// cond: !uaddOvf(c,d)
  8320  	// result: (Rsh16Ux64 x (Const64 <t> [c+d]))
  8321  	for {
  8322  		t := v.Type
  8323  		v_0 := v.Args[0]
  8324  		if v_0.Op != OpRsh16Ux64 {
  8325  			break
  8326  		}
  8327  		x := v_0.Args[0]
  8328  		v_0_1 := v_0.Args[1]
  8329  		if v_0_1.Op != OpConst64 {
  8330  			break
  8331  		}
  8332  		c := v_0_1.AuxInt
  8333  		v_1 := v.Args[1]
  8334  		if v_1.Op != OpConst64 {
  8335  			break
  8336  		}
  8337  		d := v_1.AuxInt
  8338  		if !(!uaddOvf(c, d)) {
  8339  			break
  8340  		}
  8341  		v.reset(OpRsh16Ux64)
  8342  		v.AddArg(x)
  8343  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8344  		v0.AuxInt = c + d
  8345  		v.AddArg(v0)
  8346  		return true
  8347  	}
  8348  	// match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  8349  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  8350  	// result: (Rsh16Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  8351  	for {
  8352  		v_0 := v.Args[0]
  8353  		if v_0.Op != OpLsh16x64 {
  8354  			break
  8355  		}
  8356  		v_0_0 := v_0.Args[0]
  8357  		if v_0_0.Op != OpRsh16Ux64 {
  8358  			break
  8359  		}
  8360  		x := v_0_0.Args[0]
  8361  		v_0_0_1 := v_0_0.Args[1]
  8362  		if v_0_0_1.Op != OpConst64 {
  8363  			break
  8364  		}
  8365  		c1 := v_0_0_1.AuxInt
  8366  		v_0_1 := v_0.Args[1]
  8367  		if v_0_1.Op != OpConst64 {
  8368  			break
  8369  		}
  8370  		c2 := v_0_1.AuxInt
  8371  		v_1 := v.Args[1]
  8372  		if v_1.Op != OpConst64 {
  8373  			break
  8374  		}
  8375  		c3 := v_1.AuxInt
  8376  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  8377  			break
  8378  		}
  8379  		v.reset(OpRsh16Ux64)
  8380  		v.AddArg(x)
  8381  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  8382  		v0.AuxInt = c1 - c2 + c3
  8383  		v.AddArg(v0)
  8384  		return true
  8385  	}
  8386  	return false
  8387  }
  8388  func rewriteValuegeneric_OpRsh16Ux8(v *Value, config *Config) bool {
  8389  	b := v.Block
  8390  	_ = b
  8391  	// match: (Rsh16Ux8  <t> x (Const8  [c]))
  8392  	// cond:
  8393  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))]))
  8394  	for {
  8395  		t := v.Type
  8396  		x := v.Args[0]
  8397  		v_1 := v.Args[1]
  8398  		if v_1.Op != OpConst8 {
  8399  			break
  8400  		}
  8401  		c := v_1.AuxInt
  8402  		v.reset(OpRsh16Ux64)
  8403  		v.AddArg(x)
  8404  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8405  		v0.AuxInt = int64(uint8(c))
  8406  		v.AddArg(v0)
  8407  		return true
  8408  	}
  8409  	// match: (Rsh16Ux8 (Const16 [0]) _)
  8410  	// cond:
  8411  	// result: (Const16 [0])
  8412  	for {
  8413  		v_0 := v.Args[0]
  8414  		if v_0.Op != OpConst16 {
  8415  			break
  8416  		}
  8417  		if v_0.AuxInt != 0 {
  8418  			break
  8419  		}
  8420  		v.reset(OpConst16)
  8421  		v.AuxInt = 0
  8422  		return true
  8423  	}
  8424  	return false
  8425  }
  8426  func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool {
  8427  	b := v.Block
  8428  	_ = b
  8429  	// match: (Rsh16x16  <t> x (Const16 [c]))
  8430  	// cond:
  8431  	// result: (Rsh16x64  x (Const64 <t> [int64(uint16(c))]))
  8432  	for {
  8433  		t := v.Type
  8434  		x := v.Args[0]
  8435  		v_1 := v.Args[1]
  8436  		if v_1.Op != OpConst16 {
  8437  			break
  8438  		}
  8439  		c := v_1.AuxInt
  8440  		v.reset(OpRsh16x64)
  8441  		v.AddArg(x)
  8442  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8443  		v0.AuxInt = int64(uint16(c))
  8444  		v.AddArg(v0)
  8445  		return true
  8446  	}
  8447  	// match: (Rsh16x16  (Const16 [0]) _)
  8448  	// cond:
  8449  	// result: (Const16 [0])
  8450  	for {
  8451  		v_0 := v.Args[0]
  8452  		if v_0.Op != OpConst16 {
  8453  			break
  8454  		}
  8455  		if v_0.AuxInt != 0 {
  8456  			break
  8457  		}
  8458  		v.reset(OpConst16)
  8459  		v.AuxInt = 0
  8460  		return true
  8461  	}
  8462  	return false
  8463  }
  8464  func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool {
  8465  	b := v.Block
  8466  	_ = b
  8467  	// match: (Rsh16x32  <t> x (Const32 [c]))
  8468  	// cond:
  8469  	// result: (Rsh16x64  x (Const64 <t> [int64(uint32(c))]))
  8470  	for {
  8471  		t := v.Type
  8472  		x := v.Args[0]
  8473  		v_1 := v.Args[1]
  8474  		if v_1.Op != OpConst32 {
  8475  			break
  8476  		}
  8477  		c := v_1.AuxInt
  8478  		v.reset(OpRsh16x64)
  8479  		v.AddArg(x)
  8480  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8481  		v0.AuxInt = int64(uint32(c))
  8482  		v.AddArg(v0)
  8483  		return true
  8484  	}
  8485  	// match: (Rsh16x32  (Const16 [0]) _)
  8486  	// cond:
  8487  	// result: (Const16 [0])
  8488  	for {
  8489  		v_0 := v.Args[0]
  8490  		if v_0.Op != OpConst16 {
  8491  			break
  8492  		}
  8493  		if v_0.AuxInt != 0 {
  8494  			break
  8495  		}
  8496  		v.reset(OpConst16)
  8497  		v.AuxInt = 0
  8498  		return true
  8499  	}
  8500  	return false
  8501  }
  8502  func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool {
  8503  	b := v.Block
  8504  	_ = b
  8505  	// match: (Rsh16x64  (Const16 [c]) (Const64 [d]))
  8506  	// cond:
  8507  	// result: (Const16 [int64(int16(c) >> uint64(d))])
  8508  	for {
  8509  		v_0 := v.Args[0]
  8510  		if v_0.Op != OpConst16 {
  8511  			break
  8512  		}
  8513  		c := v_0.AuxInt
  8514  		v_1 := v.Args[1]
  8515  		if v_1.Op != OpConst64 {
  8516  			break
  8517  		}
  8518  		d := v_1.AuxInt
  8519  		v.reset(OpConst16)
  8520  		v.AuxInt = int64(int16(c) >> uint64(d))
  8521  		return true
  8522  	}
  8523  	// match: (Rsh16x64  x (Const64 [0]))
  8524  	// cond:
  8525  	// result: x
  8526  	for {
  8527  		x := v.Args[0]
  8528  		v_1 := v.Args[1]
  8529  		if v_1.Op != OpConst64 {
  8530  			break
  8531  		}
  8532  		if v_1.AuxInt != 0 {
  8533  			break
  8534  		}
  8535  		v.reset(OpCopy)
  8536  		v.Type = x.Type
  8537  		v.AddArg(x)
  8538  		return true
  8539  	}
  8540  	// match: (Rsh16x64  (Const16 [0]) _)
  8541  	// cond:
  8542  	// result: (Const16 [0])
  8543  	for {
  8544  		v_0 := v.Args[0]
  8545  		if v_0.Op != OpConst16 {
  8546  			break
  8547  		}
  8548  		if v_0.AuxInt != 0 {
  8549  			break
  8550  		}
  8551  		v.reset(OpConst16)
  8552  		v.AuxInt = 0
  8553  		return true
  8554  	}
  8555  	// match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d]))
  8556  	// cond: !uaddOvf(c,d)
  8557  	// result: (Rsh16x64 x (Const64 <t> [c+d]))
  8558  	for {
  8559  		t := v.Type
  8560  		v_0 := v.Args[0]
  8561  		if v_0.Op != OpRsh16x64 {
  8562  			break
  8563  		}
  8564  		x := v_0.Args[0]
  8565  		v_0_1 := v_0.Args[1]
  8566  		if v_0_1.Op != OpConst64 {
  8567  			break
  8568  		}
  8569  		c := v_0_1.AuxInt
  8570  		v_1 := v.Args[1]
  8571  		if v_1.Op != OpConst64 {
  8572  			break
  8573  		}
  8574  		d := v_1.AuxInt
  8575  		if !(!uaddOvf(c, d)) {
  8576  			break
  8577  		}
  8578  		v.reset(OpRsh16x64)
  8579  		v.AddArg(x)
  8580  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8581  		v0.AuxInt = c + d
  8582  		v.AddArg(v0)
  8583  		return true
  8584  	}
  8585  	return false
  8586  }
  8587  func rewriteValuegeneric_OpRsh16x8(v *Value, config *Config) bool {
  8588  	b := v.Block
  8589  	_ = b
  8590  	// match: (Rsh16x8   <t> x (Const8  [c]))
  8591  	// cond:
  8592  	// result: (Rsh16x64  x (Const64 <t> [int64(uint8(c))]))
  8593  	for {
  8594  		t := v.Type
  8595  		x := v.Args[0]
  8596  		v_1 := v.Args[1]
  8597  		if v_1.Op != OpConst8 {
  8598  			break
  8599  		}
  8600  		c := v_1.AuxInt
  8601  		v.reset(OpRsh16x64)
  8602  		v.AddArg(x)
  8603  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8604  		v0.AuxInt = int64(uint8(c))
  8605  		v.AddArg(v0)
  8606  		return true
  8607  	}
  8608  	// match: (Rsh16x8  (Const16 [0]) _)
  8609  	// cond:
  8610  	// result: (Const16 [0])
  8611  	for {
  8612  		v_0 := v.Args[0]
  8613  		if v_0.Op != OpConst16 {
  8614  			break
  8615  		}
  8616  		if v_0.AuxInt != 0 {
  8617  			break
  8618  		}
  8619  		v.reset(OpConst16)
  8620  		v.AuxInt = 0
  8621  		return true
  8622  	}
  8623  	return false
  8624  }
  8625  func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool {
  8626  	b := v.Block
  8627  	_ = b
  8628  	// match: (Rsh32Ux16 <t> x (Const16 [c]))
  8629  	// cond:
  8630  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))]))
  8631  	for {
  8632  		t := v.Type
  8633  		x := v.Args[0]
  8634  		v_1 := v.Args[1]
  8635  		if v_1.Op != OpConst16 {
  8636  			break
  8637  		}
  8638  		c := v_1.AuxInt
  8639  		v.reset(OpRsh32Ux64)
  8640  		v.AddArg(x)
  8641  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8642  		v0.AuxInt = int64(uint16(c))
  8643  		v.AddArg(v0)
  8644  		return true
  8645  	}
  8646  	// match: (Rsh32Ux16 (Const32 [0]) _)
  8647  	// cond:
  8648  	// result: (Const32 [0])
  8649  	for {
  8650  		v_0 := v.Args[0]
  8651  		if v_0.Op != OpConst32 {
  8652  			break
  8653  		}
  8654  		if v_0.AuxInt != 0 {
  8655  			break
  8656  		}
  8657  		v.reset(OpConst32)
  8658  		v.AuxInt = 0
  8659  		return true
  8660  	}
  8661  	return false
  8662  }
  8663  func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool {
  8664  	b := v.Block
  8665  	_ = b
  8666  	// match: (Rsh32Ux32 <t> x (Const32 [c]))
  8667  	// cond:
  8668  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))]))
  8669  	for {
  8670  		t := v.Type
  8671  		x := v.Args[0]
  8672  		v_1 := v.Args[1]
  8673  		if v_1.Op != OpConst32 {
  8674  			break
  8675  		}
  8676  		c := v_1.AuxInt
  8677  		v.reset(OpRsh32Ux64)
  8678  		v.AddArg(x)
  8679  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8680  		v0.AuxInt = int64(uint32(c))
  8681  		v.AddArg(v0)
  8682  		return true
  8683  	}
  8684  	// match: (Rsh32Ux32 (Const32 [0]) _)
  8685  	// cond:
  8686  	// result: (Const32 [0])
  8687  	for {
  8688  		v_0 := v.Args[0]
  8689  		if v_0.Op != OpConst32 {
  8690  			break
  8691  		}
  8692  		if v_0.AuxInt != 0 {
  8693  			break
  8694  		}
  8695  		v.reset(OpConst32)
  8696  		v.AuxInt = 0
  8697  		return true
  8698  	}
  8699  	return false
  8700  }
  8701  func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool {
  8702  	b := v.Block
  8703  	_ = b
  8704  	// match: (Rsh32Ux64 (Const32 [c]) (Const64 [d]))
  8705  	// cond:
  8706  	// result: (Const32 [int64(int32(uint32(c) >> uint64(d)))])
  8707  	for {
  8708  		v_0 := v.Args[0]
  8709  		if v_0.Op != OpConst32 {
  8710  			break
  8711  		}
  8712  		c := v_0.AuxInt
  8713  		v_1 := v.Args[1]
  8714  		if v_1.Op != OpConst64 {
  8715  			break
  8716  		}
  8717  		d := v_1.AuxInt
  8718  		v.reset(OpConst32)
  8719  		v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
  8720  		return true
  8721  	}
  8722  	// match: (Rsh32Ux64 x (Const64 [0]))
  8723  	// cond:
  8724  	// result: x
  8725  	for {
  8726  		x := v.Args[0]
  8727  		v_1 := v.Args[1]
  8728  		if v_1.Op != OpConst64 {
  8729  			break
  8730  		}
  8731  		if v_1.AuxInt != 0 {
  8732  			break
  8733  		}
  8734  		v.reset(OpCopy)
  8735  		v.Type = x.Type
  8736  		v.AddArg(x)
  8737  		return true
  8738  	}
  8739  	// match: (Rsh32Ux64 (Const32 [0]) _)
  8740  	// cond:
  8741  	// result: (Const32 [0])
  8742  	for {
  8743  		v_0 := v.Args[0]
  8744  		if v_0.Op != OpConst32 {
  8745  			break
  8746  		}
  8747  		if v_0.AuxInt != 0 {
  8748  			break
  8749  		}
  8750  		v.reset(OpConst32)
  8751  		v.AuxInt = 0
  8752  		return true
  8753  	}
  8754  	// match: (Rsh32Ux64 _ (Const64 [c]))
  8755  	// cond: uint64(c) >= 32
  8756  	// result: (Const32 [0])
  8757  	for {
  8758  		v_1 := v.Args[1]
  8759  		if v_1.Op != OpConst64 {
  8760  			break
  8761  		}
  8762  		c := v_1.AuxInt
  8763  		if !(uint64(c) >= 32) {
  8764  			break
  8765  		}
  8766  		v.reset(OpConst32)
  8767  		v.AuxInt = 0
  8768  		return true
  8769  	}
  8770  	// match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d]))
  8771  	// cond: !uaddOvf(c,d)
  8772  	// result: (Rsh32Ux64 x (Const64 <t> [c+d]))
  8773  	for {
  8774  		t := v.Type
  8775  		v_0 := v.Args[0]
  8776  		if v_0.Op != OpRsh32Ux64 {
  8777  			break
  8778  		}
  8779  		x := v_0.Args[0]
  8780  		v_0_1 := v_0.Args[1]
  8781  		if v_0_1.Op != OpConst64 {
  8782  			break
  8783  		}
  8784  		c := v_0_1.AuxInt
  8785  		v_1 := v.Args[1]
  8786  		if v_1.Op != OpConst64 {
  8787  			break
  8788  		}
  8789  		d := v_1.AuxInt
  8790  		if !(!uaddOvf(c, d)) {
  8791  			break
  8792  		}
  8793  		v.reset(OpRsh32Ux64)
  8794  		v.AddArg(x)
  8795  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8796  		v0.AuxInt = c + d
  8797  		v.AddArg(v0)
  8798  		return true
  8799  	}
  8800  	// match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  8801  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  8802  	// result: (Rsh32Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  8803  	for {
  8804  		v_0 := v.Args[0]
  8805  		if v_0.Op != OpLsh32x64 {
  8806  			break
  8807  		}
  8808  		v_0_0 := v_0.Args[0]
  8809  		if v_0_0.Op != OpRsh32Ux64 {
  8810  			break
  8811  		}
  8812  		x := v_0_0.Args[0]
  8813  		v_0_0_1 := v_0_0.Args[1]
  8814  		if v_0_0_1.Op != OpConst64 {
  8815  			break
  8816  		}
  8817  		c1 := v_0_0_1.AuxInt
  8818  		v_0_1 := v_0.Args[1]
  8819  		if v_0_1.Op != OpConst64 {
  8820  			break
  8821  		}
  8822  		c2 := v_0_1.AuxInt
  8823  		v_1 := v.Args[1]
  8824  		if v_1.Op != OpConst64 {
  8825  			break
  8826  		}
  8827  		c3 := v_1.AuxInt
  8828  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  8829  			break
  8830  		}
  8831  		v.reset(OpRsh32Ux64)
  8832  		v.AddArg(x)
  8833  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  8834  		v0.AuxInt = c1 - c2 + c3
  8835  		v.AddArg(v0)
  8836  		return true
  8837  	}
  8838  	return false
  8839  }
  8840  func rewriteValuegeneric_OpRsh32Ux8(v *Value, config *Config) bool {
  8841  	b := v.Block
  8842  	_ = b
  8843  	// match: (Rsh32Ux8  <t> x (Const8  [c]))
  8844  	// cond:
  8845  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))]))
  8846  	for {
  8847  		t := v.Type
  8848  		x := v.Args[0]
  8849  		v_1 := v.Args[1]
  8850  		if v_1.Op != OpConst8 {
  8851  			break
  8852  		}
  8853  		c := v_1.AuxInt
  8854  		v.reset(OpRsh32Ux64)
  8855  		v.AddArg(x)
  8856  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8857  		v0.AuxInt = int64(uint8(c))
  8858  		v.AddArg(v0)
  8859  		return true
  8860  	}
  8861  	// match: (Rsh32Ux8 (Const32 [0]) _)
  8862  	// cond:
  8863  	// result: (Const32 [0])
  8864  	for {
  8865  		v_0 := v.Args[0]
  8866  		if v_0.Op != OpConst32 {
  8867  			break
  8868  		}
  8869  		if v_0.AuxInt != 0 {
  8870  			break
  8871  		}
  8872  		v.reset(OpConst32)
  8873  		v.AuxInt = 0
  8874  		return true
  8875  	}
  8876  	return false
  8877  }
  8878  func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool {
  8879  	b := v.Block
  8880  	_ = b
  8881  	// match: (Rsh32x16  <t> x (Const16 [c]))
  8882  	// cond:
  8883  	// result: (Rsh32x64  x (Const64 <t> [int64(uint16(c))]))
  8884  	for {
  8885  		t := v.Type
  8886  		x := v.Args[0]
  8887  		v_1 := v.Args[1]
  8888  		if v_1.Op != OpConst16 {
  8889  			break
  8890  		}
  8891  		c := v_1.AuxInt
  8892  		v.reset(OpRsh32x64)
  8893  		v.AddArg(x)
  8894  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8895  		v0.AuxInt = int64(uint16(c))
  8896  		v.AddArg(v0)
  8897  		return true
  8898  	}
  8899  	// match: (Rsh32x16  (Const32 [0]) _)
  8900  	// cond:
  8901  	// result: (Const32 [0])
  8902  	for {
  8903  		v_0 := v.Args[0]
  8904  		if v_0.Op != OpConst32 {
  8905  			break
  8906  		}
  8907  		if v_0.AuxInt != 0 {
  8908  			break
  8909  		}
  8910  		v.reset(OpConst32)
  8911  		v.AuxInt = 0
  8912  		return true
  8913  	}
  8914  	return false
  8915  }
  8916  func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool {
  8917  	b := v.Block
  8918  	_ = b
  8919  	// match: (Rsh32x32  <t> x (Const32 [c]))
  8920  	// cond:
  8921  	// result: (Rsh32x64  x (Const64 <t> [int64(uint32(c))]))
  8922  	for {
  8923  		t := v.Type
  8924  		x := v.Args[0]
  8925  		v_1 := v.Args[1]
  8926  		if v_1.Op != OpConst32 {
  8927  			break
  8928  		}
  8929  		c := v_1.AuxInt
  8930  		v.reset(OpRsh32x64)
  8931  		v.AddArg(x)
  8932  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8933  		v0.AuxInt = int64(uint32(c))
  8934  		v.AddArg(v0)
  8935  		return true
  8936  	}
  8937  	// match: (Rsh32x32  (Const32 [0]) _)
  8938  	// cond:
  8939  	// result: (Const32 [0])
  8940  	for {
  8941  		v_0 := v.Args[0]
  8942  		if v_0.Op != OpConst32 {
  8943  			break
  8944  		}
  8945  		if v_0.AuxInt != 0 {
  8946  			break
  8947  		}
  8948  		v.reset(OpConst32)
  8949  		v.AuxInt = 0
  8950  		return true
  8951  	}
  8952  	return false
  8953  }
  8954  func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool {
  8955  	b := v.Block
  8956  	_ = b
  8957  	// match: (Rsh32x64  (Const32 [c]) (Const64 [d]))
  8958  	// cond:
  8959  	// result: (Const32 [int64(int32(c) >> uint64(d))])
  8960  	for {
  8961  		v_0 := v.Args[0]
  8962  		if v_0.Op != OpConst32 {
  8963  			break
  8964  		}
  8965  		c := v_0.AuxInt
  8966  		v_1 := v.Args[1]
  8967  		if v_1.Op != OpConst64 {
  8968  			break
  8969  		}
  8970  		d := v_1.AuxInt
  8971  		v.reset(OpConst32)
  8972  		v.AuxInt = int64(int32(c) >> uint64(d))
  8973  		return true
  8974  	}
  8975  	// match: (Rsh32x64  x (Const64 [0]))
  8976  	// cond:
  8977  	// result: x
  8978  	for {
  8979  		x := v.Args[0]
  8980  		v_1 := v.Args[1]
  8981  		if v_1.Op != OpConst64 {
  8982  			break
  8983  		}
  8984  		if v_1.AuxInt != 0 {
  8985  			break
  8986  		}
  8987  		v.reset(OpCopy)
  8988  		v.Type = x.Type
  8989  		v.AddArg(x)
  8990  		return true
  8991  	}
  8992  	// match: (Rsh32x64  (Const32 [0]) _)
  8993  	// cond:
  8994  	// result: (Const32 [0])
  8995  	for {
  8996  		v_0 := v.Args[0]
  8997  		if v_0.Op != OpConst32 {
  8998  			break
  8999  		}
  9000  		if v_0.AuxInt != 0 {
  9001  			break
  9002  		}
  9003  		v.reset(OpConst32)
  9004  		v.AuxInt = 0
  9005  		return true
  9006  	}
  9007  	// match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d]))
  9008  	// cond: !uaddOvf(c,d)
  9009  	// result: (Rsh32x64 x (Const64 <t> [c+d]))
  9010  	for {
  9011  		t := v.Type
  9012  		v_0 := v.Args[0]
  9013  		if v_0.Op != OpRsh32x64 {
  9014  			break
  9015  		}
  9016  		x := v_0.Args[0]
  9017  		v_0_1 := v_0.Args[1]
  9018  		if v_0_1.Op != OpConst64 {
  9019  			break
  9020  		}
  9021  		c := v_0_1.AuxInt
  9022  		v_1 := v.Args[1]
  9023  		if v_1.Op != OpConst64 {
  9024  			break
  9025  		}
  9026  		d := v_1.AuxInt
  9027  		if !(!uaddOvf(c, d)) {
  9028  			break
  9029  		}
  9030  		v.reset(OpRsh32x64)
  9031  		v.AddArg(x)
  9032  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9033  		v0.AuxInt = c + d
  9034  		v.AddArg(v0)
  9035  		return true
  9036  	}
  9037  	return false
  9038  }
  9039  func rewriteValuegeneric_OpRsh32x8(v *Value, config *Config) bool {
  9040  	b := v.Block
  9041  	_ = b
  9042  	// match: (Rsh32x8   <t> x (Const8  [c]))
  9043  	// cond:
  9044  	// result: (Rsh32x64  x (Const64 <t> [int64(uint8(c))]))
  9045  	for {
  9046  		t := v.Type
  9047  		x := v.Args[0]
  9048  		v_1 := v.Args[1]
  9049  		if v_1.Op != OpConst8 {
  9050  			break
  9051  		}
  9052  		c := v_1.AuxInt
  9053  		v.reset(OpRsh32x64)
  9054  		v.AddArg(x)
  9055  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9056  		v0.AuxInt = int64(uint8(c))
  9057  		v.AddArg(v0)
  9058  		return true
  9059  	}
  9060  	// match: (Rsh32x8  (Const32 [0]) _)
  9061  	// cond:
  9062  	// result: (Const32 [0])
  9063  	for {
  9064  		v_0 := v.Args[0]
  9065  		if v_0.Op != OpConst32 {
  9066  			break
  9067  		}
  9068  		if v_0.AuxInt != 0 {
  9069  			break
  9070  		}
  9071  		v.reset(OpConst32)
  9072  		v.AuxInt = 0
  9073  		return true
  9074  	}
  9075  	return false
  9076  }
  9077  func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool {
  9078  	b := v.Block
  9079  	_ = b
  9080  	// match: (Rsh64Ux16 <t> x (Const16 [c]))
  9081  	// cond:
  9082  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))]))
  9083  	for {
  9084  		t := v.Type
  9085  		x := v.Args[0]
  9086  		v_1 := v.Args[1]
  9087  		if v_1.Op != OpConst16 {
  9088  			break
  9089  		}
  9090  		c := v_1.AuxInt
  9091  		v.reset(OpRsh64Ux64)
  9092  		v.AddArg(x)
  9093  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9094  		v0.AuxInt = int64(uint16(c))
  9095  		v.AddArg(v0)
  9096  		return true
  9097  	}
  9098  	// match: (Rsh64Ux16 (Const64 [0]) _)
  9099  	// cond:
  9100  	// result: (Const64 [0])
  9101  	for {
  9102  		v_0 := v.Args[0]
  9103  		if v_0.Op != OpConst64 {
  9104  			break
  9105  		}
  9106  		if v_0.AuxInt != 0 {
  9107  			break
  9108  		}
  9109  		v.reset(OpConst64)
  9110  		v.AuxInt = 0
  9111  		return true
  9112  	}
  9113  	return false
  9114  }
  9115  func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool {
  9116  	b := v.Block
  9117  	_ = b
  9118  	// match: (Rsh64Ux32 <t> x (Const32 [c]))
  9119  	// cond:
  9120  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))]))
  9121  	for {
  9122  		t := v.Type
  9123  		x := v.Args[0]
  9124  		v_1 := v.Args[1]
  9125  		if v_1.Op != OpConst32 {
  9126  			break
  9127  		}
  9128  		c := v_1.AuxInt
  9129  		v.reset(OpRsh64Ux64)
  9130  		v.AddArg(x)
  9131  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9132  		v0.AuxInt = int64(uint32(c))
  9133  		v.AddArg(v0)
  9134  		return true
  9135  	}
  9136  	// match: (Rsh64Ux32 (Const64 [0]) _)
  9137  	// cond:
  9138  	// result: (Const64 [0])
  9139  	for {
  9140  		v_0 := v.Args[0]
  9141  		if v_0.Op != OpConst64 {
  9142  			break
  9143  		}
  9144  		if v_0.AuxInt != 0 {
  9145  			break
  9146  		}
  9147  		v.reset(OpConst64)
  9148  		v.AuxInt = 0
  9149  		return true
  9150  	}
  9151  	return false
  9152  }
  9153  func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool {
  9154  	b := v.Block
  9155  	_ = b
  9156  	// match: (Rsh64Ux64 (Const64 [c]) (Const64 [d]))
  9157  	// cond:
  9158  	// result: (Const64 [int64(uint64(c) >> uint64(d))])
  9159  	for {
  9160  		v_0 := v.Args[0]
  9161  		if v_0.Op != OpConst64 {
  9162  			break
  9163  		}
  9164  		c := v_0.AuxInt
  9165  		v_1 := v.Args[1]
  9166  		if v_1.Op != OpConst64 {
  9167  			break
  9168  		}
  9169  		d := v_1.AuxInt
  9170  		v.reset(OpConst64)
  9171  		v.AuxInt = int64(uint64(c) >> uint64(d))
  9172  		return true
  9173  	}
  9174  	// match: (Rsh64Ux64 x (Const64 [0]))
  9175  	// cond:
  9176  	// result: x
  9177  	for {
  9178  		x := v.Args[0]
  9179  		v_1 := v.Args[1]
  9180  		if v_1.Op != OpConst64 {
  9181  			break
  9182  		}
  9183  		if v_1.AuxInt != 0 {
  9184  			break
  9185  		}
  9186  		v.reset(OpCopy)
  9187  		v.Type = x.Type
  9188  		v.AddArg(x)
  9189  		return true
  9190  	}
  9191  	// match: (Rsh64Ux64 (Const64 [0]) _)
  9192  	// cond:
  9193  	// result: (Const64 [0])
  9194  	for {
  9195  		v_0 := v.Args[0]
  9196  		if v_0.Op != OpConst64 {
  9197  			break
  9198  		}
  9199  		if v_0.AuxInt != 0 {
  9200  			break
  9201  		}
  9202  		v.reset(OpConst64)
  9203  		v.AuxInt = 0
  9204  		return true
  9205  	}
  9206  	// match: (Rsh64Ux64 _ (Const64 [c]))
  9207  	// cond: uint64(c) >= 64
  9208  	// result: (Const64 [0])
  9209  	for {
  9210  		v_1 := v.Args[1]
  9211  		if v_1.Op != OpConst64 {
  9212  			break
  9213  		}
  9214  		c := v_1.AuxInt
  9215  		if !(uint64(c) >= 64) {
  9216  			break
  9217  		}
  9218  		v.reset(OpConst64)
  9219  		v.AuxInt = 0
  9220  		return true
  9221  	}
  9222  	// match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d]))
  9223  	// cond: !uaddOvf(c,d)
  9224  	// result: (Rsh64Ux64 x (Const64 <t> [c+d]))
  9225  	for {
  9226  		t := v.Type
  9227  		v_0 := v.Args[0]
  9228  		if v_0.Op != OpRsh64Ux64 {
  9229  			break
  9230  		}
  9231  		x := v_0.Args[0]
  9232  		v_0_1 := v_0.Args[1]
  9233  		if v_0_1.Op != OpConst64 {
  9234  			break
  9235  		}
  9236  		c := v_0_1.AuxInt
  9237  		v_1 := v.Args[1]
  9238  		if v_1.Op != OpConst64 {
  9239  			break
  9240  		}
  9241  		d := v_1.AuxInt
  9242  		if !(!uaddOvf(c, d)) {
  9243  			break
  9244  		}
  9245  		v.reset(OpRsh64Ux64)
  9246  		v.AddArg(x)
  9247  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9248  		v0.AuxInt = c + d
  9249  		v.AddArg(v0)
  9250  		return true
  9251  	}
  9252  	// match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  9253  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  9254  	// result: (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  9255  	for {
  9256  		v_0 := v.Args[0]
  9257  		if v_0.Op != OpLsh64x64 {
  9258  			break
  9259  		}
  9260  		v_0_0 := v_0.Args[0]
  9261  		if v_0_0.Op != OpRsh64Ux64 {
  9262  			break
  9263  		}
  9264  		x := v_0_0.Args[0]
  9265  		v_0_0_1 := v_0_0.Args[1]
  9266  		if v_0_0_1.Op != OpConst64 {
  9267  			break
  9268  		}
  9269  		c1 := v_0_0_1.AuxInt
  9270  		v_0_1 := v_0.Args[1]
  9271  		if v_0_1.Op != OpConst64 {
  9272  			break
  9273  		}
  9274  		c2 := v_0_1.AuxInt
  9275  		v_1 := v.Args[1]
  9276  		if v_1.Op != OpConst64 {
  9277  			break
  9278  		}
  9279  		c3 := v_1.AuxInt
  9280  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  9281  			break
  9282  		}
  9283  		v.reset(OpRsh64Ux64)
  9284  		v.AddArg(x)
  9285  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  9286  		v0.AuxInt = c1 - c2 + c3
  9287  		v.AddArg(v0)
  9288  		return true
  9289  	}
  9290  	return false
  9291  }
  9292  func rewriteValuegeneric_OpRsh64Ux8(v *Value, config *Config) bool {
  9293  	b := v.Block
  9294  	_ = b
  9295  	// match: (Rsh64Ux8  <t> x (Const8  [c]))
  9296  	// cond:
  9297  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))]))
  9298  	for {
  9299  		t := v.Type
  9300  		x := v.Args[0]
  9301  		v_1 := v.Args[1]
  9302  		if v_1.Op != OpConst8 {
  9303  			break
  9304  		}
  9305  		c := v_1.AuxInt
  9306  		v.reset(OpRsh64Ux64)
  9307  		v.AddArg(x)
  9308  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9309  		v0.AuxInt = int64(uint8(c))
  9310  		v.AddArg(v0)
  9311  		return true
  9312  	}
  9313  	// match: (Rsh64Ux8 (Const64 [0]) _)
  9314  	// cond:
  9315  	// result: (Const64 [0])
  9316  	for {
  9317  		v_0 := v.Args[0]
  9318  		if v_0.Op != OpConst64 {
  9319  			break
  9320  		}
  9321  		if v_0.AuxInt != 0 {
  9322  			break
  9323  		}
  9324  		v.reset(OpConst64)
  9325  		v.AuxInt = 0
  9326  		return true
  9327  	}
  9328  	return false
  9329  }
  9330  func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool {
  9331  	b := v.Block
  9332  	_ = b
  9333  	// match: (Rsh64x16  <t> x (Const16 [c]))
  9334  	// cond:
  9335  	// result: (Rsh64x64  x (Const64 <t> [int64(uint16(c))]))
  9336  	for {
  9337  		t := v.Type
  9338  		x := v.Args[0]
  9339  		v_1 := v.Args[1]
  9340  		if v_1.Op != OpConst16 {
  9341  			break
  9342  		}
  9343  		c := v_1.AuxInt
  9344  		v.reset(OpRsh64x64)
  9345  		v.AddArg(x)
  9346  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9347  		v0.AuxInt = int64(uint16(c))
  9348  		v.AddArg(v0)
  9349  		return true
  9350  	}
  9351  	// match: (Rsh64x16  (Const64 [0]) _)
  9352  	// cond:
  9353  	// result: (Const64 [0])
  9354  	for {
  9355  		v_0 := v.Args[0]
  9356  		if v_0.Op != OpConst64 {
  9357  			break
  9358  		}
  9359  		if v_0.AuxInt != 0 {
  9360  			break
  9361  		}
  9362  		v.reset(OpConst64)
  9363  		v.AuxInt = 0
  9364  		return true
  9365  	}
  9366  	return false
  9367  }
  9368  func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool {
  9369  	b := v.Block
  9370  	_ = b
  9371  	// match: (Rsh64x32  <t> x (Const32 [c]))
  9372  	// cond:
  9373  	// result: (Rsh64x64  x (Const64 <t> [int64(uint32(c))]))
  9374  	for {
  9375  		t := v.Type
  9376  		x := v.Args[0]
  9377  		v_1 := v.Args[1]
  9378  		if v_1.Op != OpConst32 {
  9379  			break
  9380  		}
  9381  		c := v_1.AuxInt
  9382  		v.reset(OpRsh64x64)
  9383  		v.AddArg(x)
  9384  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9385  		v0.AuxInt = int64(uint32(c))
  9386  		v.AddArg(v0)
  9387  		return true
  9388  	}
  9389  	// match: (Rsh64x32  (Const64 [0]) _)
  9390  	// cond:
  9391  	// result: (Const64 [0])
  9392  	for {
  9393  		v_0 := v.Args[0]
  9394  		if v_0.Op != OpConst64 {
  9395  			break
  9396  		}
  9397  		if v_0.AuxInt != 0 {
  9398  			break
  9399  		}
  9400  		v.reset(OpConst64)
  9401  		v.AuxInt = 0
  9402  		return true
  9403  	}
  9404  	return false
  9405  }
  9406  func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool {
  9407  	b := v.Block
  9408  	_ = b
  9409  	// match: (Rsh64x64  (Const64 [c]) (Const64 [d]))
  9410  	// cond:
  9411  	// result: (Const64 [c >> uint64(d)])
  9412  	for {
  9413  		v_0 := v.Args[0]
  9414  		if v_0.Op != OpConst64 {
  9415  			break
  9416  		}
  9417  		c := v_0.AuxInt
  9418  		v_1 := v.Args[1]
  9419  		if v_1.Op != OpConst64 {
  9420  			break
  9421  		}
  9422  		d := v_1.AuxInt
  9423  		v.reset(OpConst64)
  9424  		v.AuxInt = c >> uint64(d)
  9425  		return true
  9426  	}
  9427  	// match: (Rsh64x64  x (Const64 [0]))
  9428  	// cond:
  9429  	// result: x
  9430  	for {
  9431  		x := v.Args[0]
  9432  		v_1 := v.Args[1]
  9433  		if v_1.Op != OpConst64 {
  9434  			break
  9435  		}
  9436  		if v_1.AuxInt != 0 {
  9437  			break
  9438  		}
  9439  		v.reset(OpCopy)
  9440  		v.Type = x.Type
  9441  		v.AddArg(x)
  9442  		return true
  9443  	}
  9444  	// match: (Rsh64x64  (Const64 [0]) _)
  9445  	// cond:
  9446  	// result: (Const64 [0])
  9447  	for {
  9448  		v_0 := v.Args[0]
  9449  		if v_0.Op != OpConst64 {
  9450  			break
  9451  		}
  9452  		if v_0.AuxInt != 0 {
  9453  			break
  9454  		}
  9455  		v.reset(OpConst64)
  9456  		v.AuxInt = 0
  9457  		return true
  9458  	}
  9459  	// match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d]))
  9460  	// cond: !uaddOvf(c,d)
  9461  	// result: (Rsh64x64 x (Const64 <t> [c+d]))
  9462  	for {
  9463  		t := v.Type
  9464  		v_0 := v.Args[0]
  9465  		if v_0.Op != OpRsh64x64 {
  9466  			break
  9467  		}
  9468  		x := v_0.Args[0]
  9469  		v_0_1 := v_0.Args[1]
  9470  		if v_0_1.Op != OpConst64 {
  9471  			break
  9472  		}
  9473  		c := v_0_1.AuxInt
  9474  		v_1 := v.Args[1]
  9475  		if v_1.Op != OpConst64 {
  9476  			break
  9477  		}
  9478  		d := v_1.AuxInt
  9479  		if !(!uaddOvf(c, d)) {
  9480  			break
  9481  		}
  9482  		v.reset(OpRsh64x64)
  9483  		v.AddArg(x)
  9484  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9485  		v0.AuxInt = c + d
  9486  		v.AddArg(v0)
  9487  		return true
  9488  	}
  9489  	return false
  9490  }
  9491  func rewriteValuegeneric_OpRsh64x8(v *Value, config *Config) bool {
  9492  	b := v.Block
  9493  	_ = b
  9494  	// match: (Rsh64x8   <t> x (Const8  [c]))
  9495  	// cond:
  9496  	// result: (Rsh64x64  x (Const64 <t> [int64(uint8(c))]))
  9497  	for {
  9498  		t := v.Type
  9499  		x := v.Args[0]
  9500  		v_1 := v.Args[1]
  9501  		if v_1.Op != OpConst8 {
  9502  			break
  9503  		}
  9504  		c := v_1.AuxInt
  9505  		v.reset(OpRsh64x64)
  9506  		v.AddArg(x)
  9507  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9508  		v0.AuxInt = int64(uint8(c))
  9509  		v.AddArg(v0)
  9510  		return true
  9511  	}
  9512  	// match: (Rsh64x8  (Const64 [0]) _)
  9513  	// cond:
  9514  	// result: (Const64 [0])
  9515  	for {
  9516  		v_0 := v.Args[0]
  9517  		if v_0.Op != OpConst64 {
  9518  			break
  9519  		}
  9520  		if v_0.AuxInt != 0 {
  9521  			break
  9522  		}
  9523  		v.reset(OpConst64)
  9524  		v.AuxInt = 0
  9525  		return true
  9526  	}
  9527  	return false
  9528  }
  9529  func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool {
  9530  	b := v.Block
  9531  	_ = b
  9532  	// match: (Rsh8Ux16 <t> x (Const16 [c]))
  9533  	// cond:
  9534  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))]))
  9535  	for {
  9536  		t := v.Type
  9537  		x := v.Args[0]
  9538  		v_1 := v.Args[1]
  9539  		if v_1.Op != OpConst16 {
  9540  			break
  9541  		}
  9542  		c := v_1.AuxInt
  9543  		v.reset(OpRsh8Ux64)
  9544  		v.AddArg(x)
  9545  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9546  		v0.AuxInt = int64(uint16(c))
  9547  		v.AddArg(v0)
  9548  		return true
  9549  	}
  9550  	// match: (Rsh8Ux16  (Const8 [0]) _)
  9551  	// cond:
  9552  	// result: (Const8  [0])
  9553  	for {
  9554  		v_0 := v.Args[0]
  9555  		if v_0.Op != OpConst8 {
  9556  			break
  9557  		}
  9558  		if v_0.AuxInt != 0 {
  9559  			break
  9560  		}
  9561  		v.reset(OpConst8)
  9562  		v.AuxInt = 0
  9563  		return true
  9564  	}
  9565  	return false
  9566  }
  9567  func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool {
  9568  	b := v.Block
  9569  	_ = b
  9570  	// match: (Rsh8Ux32 <t> x (Const32 [c]))
  9571  	// cond:
  9572  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))]))
  9573  	for {
  9574  		t := v.Type
  9575  		x := v.Args[0]
  9576  		v_1 := v.Args[1]
  9577  		if v_1.Op != OpConst32 {
  9578  			break
  9579  		}
  9580  		c := v_1.AuxInt
  9581  		v.reset(OpRsh8Ux64)
  9582  		v.AddArg(x)
  9583  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9584  		v0.AuxInt = int64(uint32(c))
  9585  		v.AddArg(v0)
  9586  		return true
  9587  	}
  9588  	// match: (Rsh8Ux32  (Const8 [0]) _)
  9589  	// cond:
  9590  	// result: (Const8  [0])
  9591  	for {
  9592  		v_0 := v.Args[0]
  9593  		if v_0.Op != OpConst8 {
  9594  			break
  9595  		}
  9596  		if v_0.AuxInt != 0 {
  9597  			break
  9598  		}
  9599  		v.reset(OpConst8)
  9600  		v.AuxInt = 0
  9601  		return true
  9602  	}
  9603  	return false
  9604  }
  9605  func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool {
  9606  	b := v.Block
  9607  	_ = b
  9608  	// match: (Rsh8Ux64  (Const8  [c]) (Const64 [d]))
  9609  	// cond:
  9610  	// result: (Const8  [int64(int8(uint8(c) >> uint64(d)))])
  9611  	for {
  9612  		v_0 := v.Args[0]
  9613  		if v_0.Op != OpConst8 {
  9614  			break
  9615  		}
  9616  		c := v_0.AuxInt
  9617  		v_1 := v.Args[1]
  9618  		if v_1.Op != OpConst64 {
  9619  			break
  9620  		}
  9621  		d := v_1.AuxInt
  9622  		v.reset(OpConst8)
  9623  		v.AuxInt = int64(int8(uint8(c) >> uint64(d)))
  9624  		return true
  9625  	}
  9626  	// match: (Rsh8Ux64  x (Const64 [0]))
  9627  	// cond:
  9628  	// result: x
  9629  	for {
  9630  		x := v.Args[0]
  9631  		v_1 := v.Args[1]
  9632  		if v_1.Op != OpConst64 {
  9633  			break
  9634  		}
  9635  		if v_1.AuxInt != 0 {
  9636  			break
  9637  		}
  9638  		v.reset(OpCopy)
  9639  		v.Type = x.Type
  9640  		v.AddArg(x)
  9641  		return true
  9642  	}
  9643  	// match: (Rsh8Ux64  (Const8 [0]) _)
  9644  	// cond:
  9645  	// result: (Const8  [0])
  9646  	for {
  9647  		v_0 := v.Args[0]
  9648  		if v_0.Op != OpConst8 {
  9649  			break
  9650  		}
  9651  		if v_0.AuxInt != 0 {
  9652  			break
  9653  		}
  9654  		v.reset(OpConst8)
  9655  		v.AuxInt = 0
  9656  		return true
  9657  	}
  9658  	// match: (Rsh8Ux64  _ (Const64 [c]))
  9659  	// cond: uint64(c) >= 8
  9660  	// result: (Const8  [0])
  9661  	for {
  9662  		v_1 := v.Args[1]
  9663  		if v_1.Op != OpConst64 {
  9664  			break
  9665  		}
  9666  		c := v_1.AuxInt
  9667  		if !(uint64(c) >= 8) {
  9668  			break
  9669  		}
  9670  		v.reset(OpConst8)
  9671  		v.AuxInt = 0
  9672  		return true
  9673  	}
  9674  	// match: (Rsh8Ux64  <t> (Rsh8Ux64  x (Const64 [c])) (Const64 [d]))
  9675  	// cond: !uaddOvf(c,d)
  9676  	// result: (Rsh8Ux64  x (Const64 <t> [c+d]))
  9677  	for {
  9678  		t := v.Type
  9679  		v_0 := v.Args[0]
  9680  		if v_0.Op != OpRsh8Ux64 {
  9681  			break
  9682  		}
  9683  		x := v_0.Args[0]
  9684  		v_0_1 := v_0.Args[1]
  9685  		if v_0_1.Op != OpConst64 {
  9686  			break
  9687  		}
  9688  		c := v_0_1.AuxInt
  9689  		v_1 := v.Args[1]
  9690  		if v_1.Op != OpConst64 {
  9691  			break
  9692  		}
  9693  		d := v_1.AuxInt
  9694  		if !(!uaddOvf(c, d)) {
  9695  			break
  9696  		}
  9697  		v.reset(OpRsh8Ux64)
  9698  		v.AddArg(x)
  9699  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9700  		v0.AuxInt = c + d
  9701  		v.AddArg(v0)
  9702  		return true
  9703  	}
  9704  	// match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  9705  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  9706  	// result: (Rsh8Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  9707  	for {
  9708  		v_0 := v.Args[0]
  9709  		if v_0.Op != OpLsh8x64 {
  9710  			break
  9711  		}
  9712  		v_0_0 := v_0.Args[0]
  9713  		if v_0_0.Op != OpRsh8Ux64 {
  9714  			break
  9715  		}
  9716  		x := v_0_0.Args[0]
  9717  		v_0_0_1 := v_0_0.Args[1]
  9718  		if v_0_0_1.Op != OpConst64 {
  9719  			break
  9720  		}
  9721  		c1 := v_0_0_1.AuxInt
  9722  		v_0_1 := v_0.Args[1]
  9723  		if v_0_1.Op != OpConst64 {
  9724  			break
  9725  		}
  9726  		c2 := v_0_1.AuxInt
  9727  		v_1 := v.Args[1]
  9728  		if v_1.Op != OpConst64 {
  9729  			break
  9730  		}
  9731  		c3 := v_1.AuxInt
  9732  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  9733  			break
  9734  		}
  9735  		v.reset(OpRsh8Ux64)
  9736  		v.AddArg(x)
  9737  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  9738  		v0.AuxInt = c1 - c2 + c3
  9739  		v.AddArg(v0)
  9740  		return true
  9741  	}
  9742  	return false
  9743  }
  9744  func rewriteValuegeneric_OpRsh8Ux8(v *Value, config *Config) bool {
  9745  	b := v.Block
  9746  	_ = b
  9747  	// match: (Rsh8Ux8  <t> x (Const8  [c]))
  9748  	// cond:
  9749  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))]))
  9750  	for {
  9751  		t := v.Type
  9752  		x := v.Args[0]
  9753  		v_1 := v.Args[1]
  9754  		if v_1.Op != OpConst8 {
  9755  			break
  9756  		}
  9757  		c := v_1.AuxInt
  9758  		v.reset(OpRsh8Ux64)
  9759  		v.AddArg(x)
  9760  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9761  		v0.AuxInt = int64(uint8(c))
  9762  		v.AddArg(v0)
  9763  		return true
  9764  	}
  9765  	// match: (Rsh8Ux8  (Const8 [0]) _)
  9766  	// cond:
  9767  	// result: (Const8  [0])
  9768  	for {
  9769  		v_0 := v.Args[0]
  9770  		if v_0.Op != OpConst8 {
  9771  			break
  9772  		}
  9773  		if v_0.AuxInt != 0 {
  9774  			break
  9775  		}
  9776  		v.reset(OpConst8)
  9777  		v.AuxInt = 0
  9778  		return true
  9779  	}
  9780  	return false
  9781  }
  9782  func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool {
  9783  	b := v.Block
  9784  	_ = b
  9785  	// match: (Rsh8x16  <t> x (Const16 [c]))
  9786  	// cond:
  9787  	// result: (Rsh8x64  x (Const64 <t> [int64(uint16(c))]))
  9788  	for {
  9789  		t := v.Type
  9790  		x := v.Args[0]
  9791  		v_1 := v.Args[1]
  9792  		if v_1.Op != OpConst16 {
  9793  			break
  9794  		}
  9795  		c := v_1.AuxInt
  9796  		v.reset(OpRsh8x64)
  9797  		v.AddArg(x)
  9798  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9799  		v0.AuxInt = int64(uint16(c))
  9800  		v.AddArg(v0)
  9801  		return true
  9802  	}
  9803  	// match: (Rsh8x16   (Const8 [0]) _)
  9804  	// cond:
  9805  	// result: (Const8  [0])
  9806  	for {
  9807  		v_0 := v.Args[0]
  9808  		if v_0.Op != OpConst8 {
  9809  			break
  9810  		}
  9811  		if v_0.AuxInt != 0 {
  9812  			break
  9813  		}
  9814  		v.reset(OpConst8)
  9815  		v.AuxInt = 0
  9816  		return true
  9817  	}
  9818  	return false
  9819  }
  9820  func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool {
  9821  	b := v.Block
  9822  	_ = b
  9823  	// match: (Rsh8x32  <t> x (Const32 [c]))
  9824  	// cond:
  9825  	// result: (Rsh8x64  x (Const64 <t> [int64(uint32(c))]))
  9826  	for {
  9827  		t := v.Type
  9828  		x := v.Args[0]
  9829  		v_1 := v.Args[1]
  9830  		if v_1.Op != OpConst32 {
  9831  			break
  9832  		}
  9833  		c := v_1.AuxInt
  9834  		v.reset(OpRsh8x64)
  9835  		v.AddArg(x)
  9836  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9837  		v0.AuxInt = int64(uint32(c))
  9838  		v.AddArg(v0)
  9839  		return true
  9840  	}
  9841  	// match: (Rsh8x32   (Const8 [0]) _)
  9842  	// cond:
  9843  	// result: (Const8  [0])
  9844  	for {
  9845  		v_0 := v.Args[0]
  9846  		if v_0.Op != OpConst8 {
  9847  			break
  9848  		}
  9849  		if v_0.AuxInt != 0 {
  9850  			break
  9851  		}
  9852  		v.reset(OpConst8)
  9853  		v.AuxInt = 0
  9854  		return true
  9855  	}
  9856  	return false
  9857  }
  9858  func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool {
  9859  	b := v.Block
  9860  	_ = b
  9861  	// match: (Rsh8x64   (Const8  [c]) (Const64 [d]))
  9862  	// cond:
  9863  	// result: (Const8  [int64(int8(c) >> uint64(d))])
  9864  	for {
  9865  		v_0 := v.Args[0]
  9866  		if v_0.Op != OpConst8 {
  9867  			break
  9868  		}
  9869  		c := v_0.AuxInt
  9870  		v_1 := v.Args[1]
  9871  		if v_1.Op != OpConst64 {
  9872  			break
  9873  		}
  9874  		d := v_1.AuxInt
  9875  		v.reset(OpConst8)
  9876  		v.AuxInt = int64(int8(c) >> uint64(d))
  9877  		return true
  9878  	}
  9879  	// match: (Rsh8x64   x (Const64 [0]))
  9880  	// cond:
  9881  	// result: x
  9882  	for {
  9883  		x := v.Args[0]
  9884  		v_1 := v.Args[1]
  9885  		if v_1.Op != OpConst64 {
  9886  			break
  9887  		}
  9888  		if v_1.AuxInt != 0 {
  9889  			break
  9890  		}
  9891  		v.reset(OpCopy)
  9892  		v.Type = x.Type
  9893  		v.AddArg(x)
  9894  		return true
  9895  	}
  9896  	// match: (Rsh8x64   (Const8 [0]) _)
  9897  	// cond:
  9898  	// result: (Const8  [0])
  9899  	for {
  9900  		v_0 := v.Args[0]
  9901  		if v_0.Op != OpConst8 {
  9902  			break
  9903  		}
  9904  		if v_0.AuxInt != 0 {
  9905  			break
  9906  		}
  9907  		v.reset(OpConst8)
  9908  		v.AuxInt = 0
  9909  		return true
  9910  	}
  9911  	// match: (Rsh8x64  <t> (Rsh8x64  x (Const64 [c])) (Const64 [d]))
  9912  	// cond: !uaddOvf(c,d)
  9913  	// result: (Rsh8x64  x (Const64 <t> [c+d]))
  9914  	for {
  9915  		t := v.Type
  9916  		v_0 := v.Args[0]
  9917  		if v_0.Op != OpRsh8x64 {
  9918  			break
  9919  		}
  9920  		x := v_0.Args[0]
  9921  		v_0_1 := v_0.Args[1]
  9922  		if v_0_1.Op != OpConst64 {
  9923  			break
  9924  		}
  9925  		c := v_0_1.AuxInt
  9926  		v_1 := v.Args[1]
  9927  		if v_1.Op != OpConst64 {
  9928  			break
  9929  		}
  9930  		d := v_1.AuxInt
  9931  		if !(!uaddOvf(c, d)) {
  9932  			break
  9933  		}
  9934  		v.reset(OpRsh8x64)
  9935  		v.AddArg(x)
  9936  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9937  		v0.AuxInt = c + d
  9938  		v.AddArg(v0)
  9939  		return true
  9940  	}
  9941  	return false
  9942  }
  9943  func rewriteValuegeneric_OpRsh8x8(v *Value, config *Config) bool {
  9944  	b := v.Block
  9945  	_ = b
  9946  	// match: (Rsh8x8   <t> x (Const8  [c]))
  9947  	// cond:
  9948  	// result: (Rsh8x64  x (Const64 <t> [int64(uint8(c))]))
  9949  	for {
  9950  		t := v.Type
  9951  		x := v.Args[0]
  9952  		v_1 := v.Args[1]
  9953  		if v_1.Op != OpConst8 {
  9954  			break
  9955  		}
  9956  		c := v_1.AuxInt
  9957  		v.reset(OpRsh8x64)
  9958  		v.AddArg(x)
  9959  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9960  		v0.AuxInt = int64(uint8(c))
  9961  		v.AddArg(v0)
  9962  		return true
  9963  	}
  9964  	// match: (Rsh8x8   (Const8 [0]) _)
  9965  	// cond:
  9966  	// result: (Const8  [0])
  9967  	for {
  9968  		v_0 := v.Args[0]
  9969  		if v_0.Op != OpConst8 {
  9970  			break
  9971  		}
  9972  		if v_0.AuxInt != 0 {
  9973  			break
  9974  		}
  9975  		v.reset(OpConst8)
  9976  		v.AuxInt = 0
  9977  		return true
  9978  	}
  9979  	return false
  9980  }
  9981  func rewriteValuegeneric_OpSignExt16to32(v *Value, config *Config) bool {
  9982  	b := v.Block
  9983  	_ = b
  9984  	// match: (SignExt16to32 (Const16 [c]))
  9985  	// cond:
  9986  	// result: (Const32 [int64( int16(c))])
  9987  	for {
  9988  		v_0 := v.Args[0]
  9989  		if v_0.Op != OpConst16 {
  9990  			break
  9991  		}
  9992  		c := v_0.AuxInt
  9993  		v.reset(OpConst32)
  9994  		v.AuxInt = int64(int16(c))
  9995  		return true
  9996  	}
  9997  	// match: (SignExt16to32 (Trunc32to16 x:(Rsh32x64 _ (Const64 [s]))))
  9998  	// cond: s >= 16
  9999  	// result: x
 10000  	for {
 10001  		v_0 := v.Args[0]
 10002  		if v_0.Op != OpTrunc32to16 {
 10003  			break
 10004  		}
 10005  		x := v_0.Args[0]
 10006  		if x.Op != OpRsh32x64 {
 10007  			break
 10008  		}
 10009  		x_1 := x.Args[1]
 10010  		if x_1.Op != OpConst64 {
 10011  			break
 10012  		}
 10013  		s := x_1.AuxInt
 10014  		if !(s >= 16) {
 10015  			break
 10016  		}
 10017  		v.reset(OpCopy)
 10018  		v.Type = x.Type
 10019  		v.AddArg(x)
 10020  		return true
 10021  	}
 10022  	return false
 10023  }
 10024  func rewriteValuegeneric_OpSignExt16to64(v *Value, config *Config) bool {
 10025  	b := v.Block
 10026  	_ = b
 10027  	// match: (SignExt16to64 (Const16 [c]))
 10028  	// cond:
 10029  	// result: (Const64 [int64( int16(c))])
 10030  	for {
 10031  		v_0 := v.Args[0]
 10032  		if v_0.Op != OpConst16 {
 10033  			break
 10034  		}
 10035  		c := v_0.AuxInt
 10036  		v.reset(OpConst64)
 10037  		v.AuxInt = int64(int16(c))
 10038  		return true
 10039  	}
 10040  	// match: (SignExt16to64 (Trunc64to16 x:(Rsh64x64 _ (Const64 [s]))))
 10041  	// cond: s >= 48
 10042  	// result: x
 10043  	for {
 10044  		v_0 := v.Args[0]
 10045  		if v_0.Op != OpTrunc64to16 {
 10046  			break
 10047  		}
 10048  		x := v_0.Args[0]
 10049  		if x.Op != OpRsh64x64 {
 10050  			break
 10051  		}
 10052  		x_1 := x.Args[1]
 10053  		if x_1.Op != OpConst64 {
 10054  			break
 10055  		}
 10056  		s := x_1.AuxInt
 10057  		if !(s >= 48) {
 10058  			break
 10059  		}
 10060  		v.reset(OpCopy)
 10061  		v.Type = x.Type
 10062  		v.AddArg(x)
 10063  		return true
 10064  	}
 10065  	return false
 10066  }
 10067  func rewriteValuegeneric_OpSignExt32to64(v *Value, config *Config) bool {
 10068  	b := v.Block
 10069  	_ = b
 10070  	// match: (SignExt32to64 (Const32 [c]))
 10071  	// cond:
 10072  	// result: (Const64 [int64( int32(c))])
 10073  	for {
 10074  		v_0 := v.Args[0]
 10075  		if v_0.Op != OpConst32 {
 10076  			break
 10077  		}
 10078  		c := v_0.AuxInt
 10079  		v.reset(OpConst64)
 10080  		v.AuxInt = int64(int32(c))
 10081  		return true
 10082  	}
 10083  	// match: (SignExt32to64 (Trunc64to32 x:(Rsh64x64 _ (Const64 [s]))))
 10084  	// cond: s >= 32
 10085  	// result: x
 10086  	for {
 10087  		v_0 := v.Args[0]
 10088  		if v_0.Op != OpTrunc64to32 {
 10089  			break
 10090  		}
 10091  		x := v_0.Args[0]
 10092  		if x.Op != OpRsh64x64 {
 10093  			break
 10094  		}
 10095  		x_1 := x.Args[1]
 10096  		if x_1.Op != OpConst64 {
 10097  			break
 10098  		}
 10099  		s := x_1.AuxInt
 10100  		if !(s >= 32) {
 10101  			break
 10102  		}
 10103  		v.reset(OpCopy)
 10104  		v.Type = x.Type
 10105  		v.AddArg(x)
 10106  		return true
 10107  	}
 10108  	return false
 10109  }
 10110  func rewriteValuegeneric_OpSignExt8to16(v *Value, config *Config) bool {
 10111  	b := v.Block
 10112  	_ = b
 10113  	// match: (SignExt8to16  (Const8  [c]))
 10114  	// cond:
 10115  	// result: (Const16 [int64(  int8(c))])
 10116  	for {
 10117  		v_0 := v.Args[0]
 10118  		if v_0.Op != OpConst8 {
 10119  			break
 10120  		}
 10121  		c := v_0.AuxInt
 10122  		v.reset(OpConst16)
 10123  		v.AuxInt = int64(int8(c))
 10124  		return true
 10125  	}
 10126  	// match: (SignExt8to16  (Trunc16to8  x:(Rsh16x64 _ (Const64 [s]))))
 10127  	// cond: s >= 8
 10128  	// result: x
 10129  	for {
 10130  		v_0 := v.Args[0]
 10131  		if v_0.Op != OpTrunc16to8 {
 10132  			break
 10133  		}
 10134  		x := v_0.Args[0]
 10135  		if x.Op != OpRsh16x64 {
 10136  			break
 10137  		}
 10138  		x_1 := x.Args[1]
 10139  		if x_1.Op != OpConst64 {
 10140  			break
 10141  		}
 10142  		s := x_1.AuxInt
 10143  		if !(s >= 8) {
 10144  			break
 10145  		}
 10146  		v.reset(OpCopy)
 10147  		v.Type = x.Type
 10148  		v.AddArg(x)
 10149  		return true
 10150  	}
 10151  	return false
 10152  }
 10153  func rewriteValuegeneric_OpSignExt8to32(v *Value, config *Config) bool {
 10154  	b := v.Block
 10155  	_ = b
 10156  	// match: (SignExt8to32  (Const8  [c]))
 10157  	// cond:
 10158  	// result: (Const32 [int64(  int8(c))])
 10159  	for {
 10160  		v_0 := v.Args[0]
 10161  		if v_0.Op != OpConst8 {
 10162  			break
 10163  		}
 10164  		c := v_0.AuxInt
 10165  		v.reset(OpConst32)
 10166  		v.AuxInt = int64(int8(c))
 10167  		return true
 10168  	}
 10169  	// match: (SignExt8to32  (Trunc32to8  x:(Rsh32x64 _ (Const64 [s]))))
 10170  	// cond: s >= 24
 10171  	// result: x
 10172  	for {
 10173  		v_0 := v.Args[0]
 10174  		if v_0.Op != OpTrunc32to8 {
 10175  			break
 10176  		}
 10177  		x := v_0.Args[0]
 10178  		if x.Op != OpRsh32x64 {
 10179  			break
 10180  		}
 10181  		x_1 := x.Args[1]
 10182  		if x_1.Op != OpConst64 {
 10183  			break
 10184  		}
 10185  		s := x_1.AuxInt
 10186  		if !(s >= 24) {
 10187  			break
 10188  		}
 10189  		v.reset(OpCopy)
 10190  		v.Type = x.Type
 10191  		v.AddArg(x)
 10192  		return true
 10193  	}
 10194  	return false
 10195  }
 10196  func rewriteValuegeneric_OpSignExt8to64(v *Value, config *Config) bool {
 10197  	b := v.Block
 10198  	_ = b
 10199  	// match: (SignExt8to64  (Const8  [c]))
 10200  	// cond:
 10201  	// result: (Const64 [int64(  int8(c))])
 10202  	for {
 10203  		v_0 := v.Args[0]
 10204  		if v_0.Op != OpConst8 {
 10205  			break
 10206  		}
 10207  		c := v_0.AuxInt
 10208  		v.reset(OpConst64)
 10209  		v.AuxInt = int64(int8(c))
 10210  		return true
 10211  	}
 10212  	// match: (SignExt8to64  (Trunc64to8  x:(Rsh64x64 _ (Const64 [s]))))
 10213  	// cond: s >= 56
 10214  	// result: x
 10215  	for {
 10216  		v_0 := v.Args[0]
 10217  		if v_0.Op != OpTrunc64to8 {
 10218  			break
 10219  		}
 10220  		x := v_0.Args[0]
 10221  		if x.Op != OpRsh64x64 {
 10222  			break
 10223  		}
 10224  		x_1 := x.Args[1]
 10225  		if x_1.Op != OpConst64 {
 10226  			break
 10227  		}
 10228  		s := x_1.AuxInt
 10229  		if !(s >= 56) {
 10230  			break
 10231  		}
 10232  		v.reset(OpCopy)
 10233  		v.Type = x.Type
 10234  		v.AddArg(x)
 10235  		return true
 10236  	}
 10237  	return false
 10238  }
 10239  func rewriteValuegeneric_OpSliceCap(v *Value, config *Config) bool {
 10240  	b := v.Block
 10241  	_ = b
 10242  	// match: (SliceCap (SliceMake _ _ (Const64 <t> [c])))
 10243  	// cond:
 10244  	// result: (Const64 <t> [c])
 10245  	for {
 10246  		v_0 := v.Args[0]
 10247  		if v_0.Op != OpSliceMake {
 10248  			break
 10249  		}
 10250  		v_0_2 := v_0.Args[2]
 10251  		if v_0_2.Op != OpConst64 {
 10252  			break
 10253  		}
 10254  		t := v_0_2.Type
 10255  		c := v_0_2.AuxInt
 10256  		v.reset(OpConst64)
 10257  		v.Type = t
 10258  		v.AuxInt = c
 10259  		return true
 10260  	}
 10261  	// match: (SliceCap (SliceMake _ _ (Const32 <t> [c])))
 10262  	// cond:
 10263  	// result: (Const32 <t> [c])
 10264  	for {
 10265  		v_0 := v.Args[0]
 10266  		if v_0.Op != OpSliceMake {
 10267  			break
 10268  		}
 10269  		v_0_2 := v_0.Args[2]
 10270  		if v_0_2.Op != OpConst32 {
 10271  			break
 10272  		}
 10273  		t := v_0_2.Type
 10274  		c := v_0_2.AuxInt
 10275  		v.reset(OpConst32)
 10276  		v.Type = t
 10277  		v.AuxInt = c
 10278  		return true
 10279  	}
 10280  	// match: (SliceCap (SliceMake _ _ (SliceCap x)))
 10281  	// cond:
 10282  	// result: (SliceCap x)
 10283  	for {
 10284  		v_0 := v.Args[0]
 10285  		if v_0.Op != OpSliceMake {
 10286  			break
 10287  		}
 10288  		v_0_2 := v_0.Args[2]
 10289  		if v_0_2.Op != OpSliceCap {
 10290  			break
 10291  		}
 10292  		x := v_0_2.Args[0]
 10293  		v.reset(OpSliceCap)
 10294  		v.AddArg(x)
 10295  		return true
 10296  	}
 10297  	// match: (SliceCap (SliceMake _ _ (SliceLen x)))
 10298  	// cond:
 10299  	// result: (SliceLen x)
 10300  	for {
 10301  		v_0 := v.Args[0]
 10302  		if v_0.Op != OpSliceMake {
 10303  			break
 10304  		}
 10305  		v_0_2 := v_0.Args[2]
 10306  		if v_0_2.Op != OpSliceLen {
 10307  			break
 10308  		}
 10309  		x := v_0_2.Args[0]
 10310  		v.reset(OpSliceLen)
 10311  		v.AddArg(x)
 10312  		return true
 10313  	}
 10314  	return false
 10315  }
 10316  func rewriteValuegeneric_OpSliceLen(v *Value, config *Config) bool {
 10317  	b := v.Block
 10318  	_ = b
 10319  	// match: (SliceLen (SliceMake _ (Const64 <t> [c]) _))
 10320  	// cond:
 10321  	// result: (Const64 <t> [c])
 10322  	for {
 10323  		v_0 := v.Args[0]
 10324  		if v_0.Op != OpSliceMake {
 10325  			break
 10326  		}
 10327  		v_0_1 := v_0.Args[1]
 10328  		if v_0_1.Op != OpConst64 {
 10329  			break
 10330  		}
 10331  		t := v_0_1.Type
 10332  		c := v_0_1.AuxInt
 10333  		v.reset(OpConst64)
 10334  		v.Type = t
 10335  		v.AuxInt = c
 10336  		return true
 10337  	}
 10338  	// match: (SliceLen (SliceMake _ (Const32 <t> [c]) _))
 10339  	// cond:
 10340  	// result: (Const32 <t> [c])
 10341  	for {
 10342  		v_0 := v.Args[0]
 10343  		if v_0.Op != OpSliceMake {
 10344  			break
 10345  		}
 10346  		v_0_1 := v_0.Args[1]
 10347  		if v_0_1.Op != OpConst32 {
 10348  			break
 10349  		}
 10350  		t := v_0_1.Type
 10351  		c := v_0_1.AuxInt
 10352  		v.reset(OpConst32)
 10353  		v.Type = t
 10354  		v.AuxInt = c
 10355  		return true
 10356  	}
 10357  	// match: (SliceLen (SliceMake _ (SliceLen x) _))
 10358  	// cond:
 10359  	// result: (SliceLen x)
 10360  	for {
 10361  		v_0 := v.Args[0]
 10362  		if v_0.Op != OpSliceMake {
 10363  			break
 10364  		}
 10365  		v_0_1 := v_0.Args[1]
 10366  		if v_0_1.Op != OpSliceLen {
 10367  			break
 10368  		}
 10369  		x := v_0_1.Args[0]
 10370  		v.reset(OpSliceLen)
 10371  		v.AddArg(x)
 10372  		return true
 10373  	}
 10374  	return false
 10375  }
 10376  func rewriteValuegeneric_OpSlicePtr(v *Value, config *Config) bool {
 10377  	b := v.Block
 10378  	_ = b
 10379  	// match: (SlicePtr (SliceMake (SlicePtr x) _ _))
 10380  	// cond:
 10381  	// result: (SlicePtr x)
 10382  	for {
 10383  		v_0 := v.Args[0]
 10384  		if v_0.Op != OpSliceMake {
 10385  			break
 10386  		}
 10387  		v_0_0 := v_0.Args[0]
 10388  		if v_0_0.Op != OpSlicePtr {
 10389  			break
 10390  		}
 10391  		x := v_0_0.Args[0]
 10392  		v.reset(OpSlicePtr)
 10393  		v.AddArg(x)
 10394  		return true
 10395  	}
 10396  	return false
 10397  }
 10398  func rewriteValuegeneric_OpSlicemask(v *Value, config *Config) bool {
 10399  	b := v.Block
 10400  	_ = b
 10401  	// match: (Slicemask (Const32 [x]))
 10402  	// cond: x > 0
 10403  	// result: (Const32 [-1])
 10404  	for {
 10405  		v_0 := v.Args[0]
 10406  		if v_0.Op != OpConst32 {
 10407  			break
 10408  		}
 10409  		x := v_0.AuxInt
 10410  		if !(x > 0) {
 10411  			break
 10412  		}
 10413  		v.reset(OpConst32)
 10414  		v.AuxInt = -1
 10415  		return true
 10416  	}
 10417  	// match: (Slicemask (Const32 [0]))
 10418  	// cond:
 10419  	// result: (Const32 [0])
 10420  	for {
 10421  		v_0 := v.Args[0]
 10422  		if v_0.Op != OpConst32 {
 10423  			break
 10424  		}
 10425  		if v_0.AuxInt != 0 {
 10426  			break
 10427  		}
 10428  		v.reset(OpConst32)
 10429  		v.AuxInt = 0
 10430  		return true
 10431  	}
 10432  	// match: (Slicemask (Const64 [x]))
 10433  	// cond: x > 0
 10434  	// result: (Const64 [-1])
 10435  	for {
 10436  		v_0 := v.Args[0]
 10437  		if v_0.Op != OpConst64 {
 10438  			break
 10439  		}
 10440  		x := v_0.AuxInt
 10441  		if !(x > 0) {
 10442  			break
 10443  		}
 10444  		v.reset(OpConst64)
 10445  		v.AuxInt = -1
 10446  		return true
 10447  	}
 10448  	// match: (Slicemask (Const64 [0]))
 10449  	// cond:
 10450  	// result: (Const64 [0])
 10451  	for {
 10452  		v_0 := v.Args[0]
 10453  		if v_0.Op != OpConst64 {
 10454  			break
 10455  		}
 10456  		if v_0.AuxInt != 0 {
 10457  			break
 10458  		}
 10459  		v.reset(OpConst64)
 10460  		v.AuxInt = 0
 10461  		return true
 10462  	}
 10463  	return false
 10464  }
 10465  func rewriteValuegeneric_OpSqrt(v *Value, config *Config) bool {
 10466  	b := v.Block
 10467  	_ = b
 10468  	// match: (Sqrt (Const64F [c]))
 10469  	// cond:
 10470  	// result: (Const64F [f2i(math.Sqrt(i2f(c)))])
 10471  	for {
 10472  		v_0 := v.Args[0]
 10473  		if v_0.Op != OpConst64F {
 10474  			break
 10475  		}
 10476  		c := v_0.AuxInt
 10477  		v.reset(OpConst64F)
 10478  		v.AuxInt = f2i(math.Sqrt(i2f(c)))
 10479  		return true
 10480  	}
 10481  	return false
 10482  }
 10483  func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
 10484  	b := v.Block
 10485  	_ = b
 10486  	// match: (Store _ (StructMake0) mem)
 10487  	// cond:
 10488  	// result: mem
 10489  	for {
 10490  		v_1 := v.Args[1]
 10491  		if v_1.Op != OpStructMake0 {
 10492  			break
 10493  		}
 10494  		mem := v.Args[2]
 10495  		v.reset(OpCopy)
 10496  		v.Type = mem.Type
 10497  		v.AddArg(mem)
 10498  		return true
 10499  	}
 10500  	// match: (Store dst (StructMake1 <t> f0) mem)
 10501  	// cond:
 10502  	// result: (Store [t.FieldType(0).Size()] dst f0 mem)
 10503  	for {
 10504  		dst := v.Args[0]
 10505  		v_1 := v.Args[1]
 10506  		if v_1.Op != OpStructMake1 {
 10507  			break
 10508  		}
 10509  		t := v_1.Type
 10510  		f0 := v_1.Args[0]
 10511  		mem := v.Args[2]
 10512  		v.reset(OpStore)
 10513  		v.AuxInt = t.FieldType(0).Size()
 10514  		v.AddArg(dst)
 10515  		v.AddArg(f0)
 10516  		v.AddArg(mem)
 10517  		return true
 10518  	}
 10519  	// match: (Store dst (StructMake2 <t> f0 f1) mem)
 10520  	// cond:
 10521  	// result: (Store [t.FieldType(1).Size()]     (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)     f1     (Store [t.FieldType(0).Size()] dst f0 mem))
 10522  	for {
 10523  		dst := v.Args[0]
 10524  		v_1 := v.Args[1]
 10525  		if v_1.Op != OpStructMake2 {
 10526  			break
 10527  		}
 10528  		t := v_1.Type
 10529  		f0 := v_1.Args[0]
 10530  		f1 := v_1.Args[1]
 10531  		mem := v.Args[2]
 10532  		v.reset(OpStore)
 10533  		v.AuxInt = t.FieldType(1).Size()
 10534  		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 10535  		v0.AuxInt = t.FieldOff(1)
 10536  		v0.AddArg(dst)
 10537  		v.AddArg(v0)
 10538  		v.AddArg(f1)
 10539  		v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10540  		v1.AuxInt = t.FieldType(0).Size()
 10541  		v1.AddArg(dst)
 10542  		v1.AddArg(f0)
 10543  		v1.AddArg(mem)
 10544  		v.AddArg(v1)
 10545  		return true
 10546  	}
 10547  	// match: (Store dst (StructMake3 <t> f0 f1 f2) mem)
 10548  	// cond:
 10549  	// result: (Store [t.FieldType(2).Size()]     (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst)     f2     (Store [t.FieldType(1).Size()]       (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)       f1       (Store [t.FieldType(0).Size()] dst f0 mem)))
 10550  	for {
 10551  		dst := v.Args[0]
 10552  		v_1 := v.Args[1]
 10553  		if v_1.Op != OpStructMake3 {
 10554  			break
 10555  		}
 10556  		t := v_1.Type
 10557  		f0 := v_1.Args[0]
 10558  		f1 := v_1.Args[1]
 10559  		f2 := v_1.Args[2]
 10560  		mem := v.Args[2]
 10561  		v.reset(OpStore)
 10562  		v.AuxInt = t.FieldType(2).Size()
 10563  		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
 10564  		v0.AuxInt = t.FieldOff(2)
 10565  		v0.AddArg(dst)
 10566  		v.AddArg(v0)
 10567  		v.AddArg(f2)
 10568  		v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10569  		v1.AuxInt = t.FieldType(1).Size()
 10570  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 10571  		v2.AuxInt = t.FieldOff(1)
 10572  		v2.AddArg(dst)
 10573  		v1.AddArg(v2)
 10574  		v1.AddArg(f1)
 10575  		v3 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10576  		v3.AuxInt = t.FieldType(0).Size()
 10577  		v3.AddArg(dst)
 10578  		v3.AddArg(f0)
 10579  		v3.AddArg(mem)
 10580  		v1.AddArg(v3)
 10581  		v.AddArg(v1)
 10582  		return true
 10583  	}
 10584  	// match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem)
 10585  	// cond:
 10586  	// result: (Store [t.FieldType(3).Size()]     (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst)     f3     (Store [t.FieldType(2).Size()]       (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst)       f2       (Store [t.FieldType(1).Size()]         (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)         f1         (Store [t.FieldType(0).Size()] dst f0 mem))))
 10587  	for {
 10588  		dst := v.Args[0]
 10589  		v_1 := v.Args[1]
 10590  		if v_1.Op != OpStructMake4 {
 10591  			break
 10592  		}
 10593  		t := v_1.Type
 10594  		f0 := v_1.Args[0]
 10595  		f1 := v_1.Args[1]
 10596  		f2 := v_1.Args[2]
 10597  		f3 := v_1.Args[3]
 10598  		mem := v.Args[2]
 10599  		v.reset(OpStore)
 10600  		v.AuxInt = t.FieldType(3).Size()
 10601  		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
 10602  		v0.AuxInt = t.FieldOff(3)
 10603  		v0.AddArg(dst)
 10604  		v.AddArg(v0)
 10605  		v.AddArg(f3)
 10606  		v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10607  		v1.AuxInt = t.FieldType(2).Size()
 10608  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
 10609  		v2.AuxInt = t.FieldOff(2)
 10610  		v2.AddArg(dst)
 10611  		v1.AddArg(v2)
 10612  		v1.AddArg(f2)
 10613  		v3 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10614  		v3.AuxInt = t.FieldType(1).Size()
 10615  		v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 10616  		v4.AuxInt = t.FieldOff(1)
 10617  		v4.AddArg(dst)
 10618  		v3.AddArg(v4)
 10619  		v3.AddArg(f1)
 10620  		v5 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10621  		v5.AuxInt = t.FieldType(0).Size()
 10622  		v5.AddArg(dst)
 10623  		v5.AddArg(f0)
 10624  		v5.AddArg(mem)
 10625  		v3.AddArg(v5)
 10626  		v1.AddArg(v3)
 10627  		v.AddArg(v1)
 10628  		return true
 10629  	}
 10630  	// match: (Store [size] dst (Load <t> src mem) mem)
 10631  	// cond: !config.fe.CanSSA(t)
 10632  	// result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src mem)
 10633  	for {
 10634  		size := v.AuxInt
 10635  		dst := v.Args[0]
 10636  		v_1 := v.Args[1]
 10637  		if v_1.Op != OpLoad {
 10638  			break
 10639  		}
 10640  		t := v_1.Type
 10641  		src := v_1.Args[0]
 10642  		mem := v_1.Args[1]
 10643  		if mem != v.Args[2] {
 10644  			break
 10645  		}
 10646  		if !(!config.fe.CanSSA(t)) {
 10647  			break
 10648  		}
 10649  		v.reset(OpMove)
 10650  		v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64()
 10651  		v.AddArg(dst)
 10652  		v.AddArg(src)
 10653  		v.AddArg(mem)
 10654  		return true
 10655  	}
 10656  	// match: (Store [size] dst (Load <t> src mem) (VarDef {x} mem))
 10657  	// cond: !config.fe.CanSSA(t)
 10658  	// result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src (VarDef {x} mem))
 10659  	for {
 10660  		size := v.AuxInt
 10661  		dst := v.Args[0]
 10662  		v_1 := v.Args[1]
 10663  		if v_1.Op != OpLoad {
 10664  			break
 10665  		}
 10666  		t := v_1.Type
 10667  		src := v_1.Args[0]
 10668  		mem := v_1.Args[1]
 10669  		v_2 := v.Args[2]
 10670  		if v_2.Op != OpVarDef {
 10671  			break
 10672  		}
 10673  		x := v_2.Aux
 10674  		if mem != v_2.Args[0] {
 10675  			break
 10676  		}
 10677  		if !(!config.fe.CanSSA(t)) {
 10678  			break
 10679  		}
 10680  		v.reset(OpMove)
 10681  		v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64()
 10682  		v.AddArg(dst)
 10683  		v.AddArg(src)
 10684  		v0 := b.NewValue0(v.Pos, OpVarDef, TypeMem)
 10685  		v0.Aux = x
 10686  		v0.AddArg(mem)
 10687  		v.AddArg(v0)
 10688  		return true
 10689  	}
 10690  	// match: (Store _ (ArrayMake0) mem)
 10691  	// cond:
 10692  	// result: mem
 10693  	for {
 10694  		v_1 := v.Args[1]
 10695  		if v_1.Op != OpArrayMake0 {
 10696  			break
 10697  		}
 10698  		mem := v.Args[2]
 10699  		v.reset(OpCopy)
 10700  		v.Type = mem.Type
 10701  		v.AddArg(mem)
 10702  		return true
 10703  	}
 10704  	// match: (Store [size] dst (ArrayMake1 e) mem)
 10705  	// cond:
 10706  	// result: (Store [size] dst e mem)
 10707  	for {
 10708  		size := v.AuxInt
 10709  		dst := v.Args[0]
 10710  		v_1 := v.Args[1]
 10711  		if v_1.Op != OpArrayMake1 {
 10712  			break
 10713  		}
 10714  		e := v_1.Args[0]
 10715  		mem := v.Args[2]
 10716  		v.reset(OpStore)
 10717  		v.AuxInt = size
 10718  		v.AddArg(dst)
 10719  		v.AddArg(e)
 10720  		v.AddArg(mem)
 10721  		return true
 10722  	}
 10723  	return false
 10724  }
 10725  func rewriteValuegeneric_OpStringLen(v *Value, config *Config) bool {
 10726  	b := v.Block
 10727  	_ = b
 10728  	// match: (StringLen (StringMake _ (Const64 <t> [c])))
 10729  	// cond:
 10730  	// result: (Const64 <t> [c])
 10731  	for {
 10732  		v_0 := v.Args[0]
 10733  		if v_0.Op != OpStringMake {
 10734  			break
 10735  		}
 10736  		v_0_1 := v_0.Args[1]
 10737  		if v_0_1.Op != OpConst64 {
 10738  			break
 10739  		}
 10740  		t := v_0_1.Type
 10741  		c := v_0_1.AuxInt
 10742  		v.reset(OpConst64)
 10743  		v.Type = t
 10744  		v.AuxInt = c
 10745  		return true
 10746  	}
 10747  	return false
 10748  }
 10749  func rewriteValuegeneric_OpStringPtr(v *Value, config *Config) bool {
 10750  	b := v.Block
 10751  	_ = b
 10752  	// match: (StringPtr (StringMake (Const64 <t> [c]) _))
 10753  	// cond:
 10754  	// result: (Const64 <t> [c])
 10755  	for {
 10756  		v_0 := v.Args[0]
 10757  		if v_0.Op != OpStringMake {
 10758  			break
 10759  		}
 10760  		v_0_0 := v_0.Args[0]
 10761  		if v_0_0.Op != OpConst64 {
 10762  			break
 10763  		}
 10764  		t := v_0_0.Type
 10765  		c := v_0_0.AuxInt
 10766  		v.reset(OpConst64)
 10767  		v.Type = t
 10768  		v.AuxInt = c
 10769  		return true
 10770  	}
 10771  	return false
 10772  }
 10773  func rewriteValuegeneric_OpStructSelect(v *Value, config *Config) bool {
 10774  	b := v.Block
 10775  	_ = b
 10776  	// match: (StructSelect (StructMake1 x))
 10777  	// cond:
 10778  	// result: x
 10779  	for {
 10780  		v_0 := v.Args[0]
 10781  		if v_0.Op != OpStructMake1 {
 10782  			break
 10783  		}
 10784  		x := v_0.Args[0]
 10785  		v.reset(OpCopy)
 10786  		v.Type = x.Type
 10787  		v.AddArg(x)
 10788  		return true
 10789  	}
 10790  	// match: (StructSelect [0] (StructMake2 x _))
 10791  	// cond:
 10792  	// result: x
 10793  	for {
 10794  		if v.AuxInt != 0 {
 10795  			break
 10796  		}
 10797  		v_0 := v.Args[0]
 10798  		if v_0.Op != OpStructMake2 {
 10799  			break
 10800  		}
 10801  		x := v_0.Args[0]
 10802  		v.reset(OpCopy)
 10803  		v.Type = x.Type
 10804  		v.AddArg(x)
 10805  		return true
 10806  	}
 10807  	// match: (StructSelect [1] (StructMake2 _ x))
 10808  	// cond:
 10809  	// result: x
 10810  	for {
 10811  		if v.AuxInt != 1 {
 10812  			break
 10813  		}
 10814  		v_0 := v.Args[0]
 10815  		if v_0.Op != OpStructMake2 {
 10816  			break
 10817  		}
 10818  		x := v_0.Args[1]
 10819  		v.reset(OpCopy)
 10820  		v.Type = x.Type
 10821  		v.AddArg(x)
 10822  		return true
 10823  	}
 10824  	// match: (StructSelect [0] (StructMake3 x _ _))
 10825  	// cond:
 10826  	// result: x
 10827  	for {
 10828  		if v.AuxInt != 0 {
 10829  			break
 10830  		}
 10831  		v_0 := v.Args[0]
 10832  		if v_0.Op != OpStructMake3 {
 10833  			break
 10834  		}
 10835  		x := v_0.Args[0]
 10836  		v.reset(OpCopy)
 10837  		v.Type = x.Type
 10838  		v.AddArg(x)
 10839  		return true
 10840  	}
 10841  	// match: (StructSelect [1] (StructMake3 _ x _))
 10842  	// cond:
 10843  	// result: x
 10844  	for {
 10845  		if v.AuxInt != 1 {
 10846  			break
 10847  		}
 10848  		v_0 := v.Args[0]
 10849  		if v_0.Op != OpStructMake3 {
 10850  			break
 10851  		}
 10852  		x := v_0.Args[1]
 10853  		v.reset(OpCopy)
 10854  		v.Type = x.Type
 10855  		v.AddArg(x)
 10856  		return true
 10857  	}
 10858  	// match: (StructSelect [2] (StructMake3 _ _ x))
 10859  	// cond:
 10860  	// result: x
 10861  	for {
 10862  		if v.AuxInt != 2 {
 10863  			break
 10864  		}
 10865  		v_0 := v.Args[0]
 10866  		if v_0.Op != OpStructMake3 {
 10867  			break
 10868  		}
 10869  		x := v_0.Args[2]
 10870  		v.reset(OpCopy)
 10871  		v.Type = x.Type
 10872  		v.AddArg(x)
 10873  		return true
 10874  	}
 10875  	// match: (StructSelect [0] (StructMake4 x _ _ _))
 10876  	// cond:
 10877  	// result: x
 10878  	for {
 10879  		if v.AuxInt != 0 {
 10880  			break
 10881  		}
 10882  		v_0 := v.Args[0]
 10883  		if v_0.Op != OpStructMake4 {
 10884  			break
 10885  		}
 10886  		x := v_0.Args[0]
 10887  		v.reset(OpCopy)
 10888  		v.Type = x.Type
 10889  		v.AddArg(x)
 10890  		return true
 10891  	}
 10892  	// match: (StructSelect [1] (StructMake4 _ x _ _))
 10893  	// cond:
 10894  	// result: x
 10895  	for {
 10896  		if v.AuxInt != 1 {
 10897  			break
 10898  		}
 10899  		v_0 := v.Args[0]
 10900  		if v_0.Op != OpStructMake4 {
 10901  			break
 10902  		}
 10903  		x := v_0.Args[1]
 10904  		v.reset(OpCopy)
 10905  		v.Type = x.Type
 10906  		v.AddArg(x)
 10907  		return true
 10908  	}
 10909  	// match: (StructSelect [2] (StructMake4 _ _ x _))
 10910  	// cond:
 10911  	// result: x
 10912  	for {
 10913  		if v.AuxInt != 2 {
 10914  			break
 10915  		}
 10916  		v_0 := v.Args[0]
 10917  		if v_0.Op != OpStructMake4 {
 10918  			break
 10919  		}
 10920  		x := v_0.Args[2]
 10921  		v.reset(OpCopy)
 10922  		v.Type = x.Type
 10923  		v.AddArg(x)
 10924  		return true
 10925  	}
 10926  	// match: (StructSelect [3] (StructMake4 _ _ _ x))
 10927  	// cond:
 10928  	// result: x
 10929  	for {
 10930  		if v.AuxInt != 3 {
 10931  			break
 10932  		}
 10933  		v_0 := v.Args[0]
 10934  		if v_0.Op != OpStructMake4 {
 10935  			break
 10936  		}
 10937  		x := v_0.Args[3]
 10938  		v.reset(OpCopy)
 10939  		v.Type = x.Type
 10940  		v.AddArg(x)
 10941  		return true
 10942  	}
 10943  	// match: (StructSelect [i] x:(Load <t> ptr mem))
 10944  	// cond: !config.fe.CanSSA(t)
 10945  	// result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem)
 10946  	for {
 10947  		i := v.AuxInt
 10948  		x := v.Args[0]
 10949  		if x.Op != OpLoad {
 10950  			break
 10951  		}
 10952  		t := x.Type
 10953  		ptr := x.Args[0]
 10954  		mem := x.Args[1]
 10955  		if !(!config.fe.CanSSA(t)) {
 10956  			break
 10957  		}
 10958  		b = x.Block
 10959  		v0 := b.NewValue0(v.Pos, OpLoad, v.Type)
 10960  		v.reset(OpCopy)
 10961  		v.AddArg(v0)
 10962  		v1 := b.NewValue0(v.Pos, OpOffPtr, v.Type.PtrTo())
 10963  		v1.AuxInt = t.FieldOff(int(i))
 10964  		v1.AddArg(ptr)
 10965  		v0.AddArg(v1)
 10966  		v0.AddArg(mem)
 10967  		return true
 10968  	}
 10969  	// match: (StructSelect [0] x:(IData _))
 10970  	// cond:
 10971  	// result: x
 10972  	for {
 10973  		if v.AuxInt != 0 {
 10974  			break
 10975  		}
 10976  		x := v.Args[0]
 10977  		if x.Op != OpIData {
 10978  			break
 10979  		}
 10980  		v.reset(OpCopy)
 10981  		v.Type = x.Type
 10982  		v.AddArg(x)
 10983  		return true
 10984  	}
 10985  	return false
 10986  }
 10987  func rewriteValuegeneric_OpSub16(v *Value, config *Config) bool {
 10988  	b := v.Block
 10989  	_ = b
 10990  	// match: (Sub16  (Const16 [c]) (Const16 [d]))
 10991  	// cond:
 10992  	// result: (Const16 [int64(int16(c-d))])
 10993  	for {
 10994  		v_0 := v.Args[0]
 10995  		if v_0.Op != OpConst16 {
 10996  			break
 10997  		}
 10998  		c := v_0.AuxInt
 10999  		v_1 := v.Args[1]
 11000  		if v_1.Op != OpConst16 {
 11001  			break
 11002  		}
 11003  		d := v_1.AuxInt
 11004  		v.reset(OpConst16)
 11005  		v.AuxInt = int64(int16(c - d))
 11006  		return true
 11007  	}
 11008  	// match: (Sub16 x (Const16 <t> [c]))
 11009  	// cond: x.Op != OpConst16
 11010  	// result: (Add16 (Const16 <t> [int64(int16(-c))]) x)
 11011  	for {
 11012  		x := v.Args[0]
 11013  		v_1 := v.Args[1]
 11014  		if v_1.Op != OpConst16 {
 11015  			break
 11016  		}
 11017  		t := v_1.Type
 11018  		c := v_1.AuxInt
 11019  		if !(x.Op != OpConst16) {
 11020  			break
 11021  		}
 11022  		v.reset(OpAdd16)
 11023  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 11024  		v0.AuxInt = int64(int16(-c))
 11025  		v.AddArg(v0)
 11026  		v.AddArg(x)
 11027  		return true
 11028  	}
 11029  	// match: (Sub16 x x)
 11030  	// cond:
 11031  	// result: (Const16 [0])
 11032  	for {
 11033  		x := v.Args[0]
 11034  		if x != v.Args[1] {
 11035  			break
 11036  		}
 11037  		v.reset(OpConst16)
 11038  		v.AuxInt = 0
 11039  		return true
 11040  	}
 11041  	// match: (Sub16 (Add16 x y) x)
 11042  	// cond:
 11043  	// result: y
 11044  	for {
 11045  		v_0 := v.Args[0]
 11046  		if v_0.Op != OpAdd16 {
 11047  			break
 11048  		}
 11049  		x := v_0.Args[0]
 11050  		y := v_0.Args[1]
 11051  		if x != v.Args[1] {
 11052  			break
 11053  		}
 11054  		v.reset(OpCopy)
 11055  		v.Type = y.Type
 11056  		v.AddArg(y)
 11057  		return true
 11058  	}
 11059  	// match: (Sub16 (Add16 x y) y)
 11060  	// cond:
 11061  	// result: x
 11062  	for {
 11063  		v_0 := v.Args[0]
 11064  		if v_0.Op != OpAdd16 {
 11065  			break
 11066  		}
 11067  		x := v_0.Args[0]
 11068  		y := v_0.Args[1]
 11069  		if y != v.Args[1] {
 11070  			break
 11071  		}
 11072  		v.reset(OpCopy)
 11073  		v.Type = x.Type
 11074  		v.AddArg(x)
 11075  		return true
 11076  	}
 11077  	return false
 11078  }
 11079  func rewriteValuegeneric_OpSub32(v *Value, config *Config) bool {
 11080  	b := v.Block
 11081  	_ = b
 11082  	// match: (Sub32  (Const32 [c]) (Const32 [d]))
 11083  	// cond:
 11084  	// result: (Const32 [int64(int32(c-d))])
 11085  	for {
 11086  		v_0 := v.Args[0]
 11087  		if v_0.Op != OpConst32 {
 11088  			break
 11089  		}
 11090  		c := v_0.AuxInt
 11091  		v_1 := v.Args[1]
 11092  		if v_1.Op != OpConst32 {
 11093  			break
 11094  		}
 11095  		d := v_1.AuxInt
 11096  		v.reset(OpConst32)
 11097  		v.AuxInt = int64(int32(c - d))
 11098  		return true
 11099  	}
 11100  	// match: (Sub32 x (Const32 <t> [c]))
 11101  	// cond: x.Op != OpConst32
 11102  	// result: (Add32 (Const32 <t> [int64(int32(-c))]) x)
 11103  	for {
 11104  		x := v.Args[0]
 11105  		v_1 := v.Args[1]
 11106  		if v_1.Op != OpConst32 {
 11107  			break
 11108  		}
 11109  		t := v_1.Type
 11110  		c := v_1.AuxInt
 11111  		if !(x.Op != OpConst32) {
 11112  			break
 11113  		}
 11114  		v.reset(OpAdd32)
 11115  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 11116  		v0.AuxInt = int64(int32(-c))
 11117  		v.AddArg(v0)
 11118  		v.AddArg(x)
 11119  		return true
 11120  	}
 11121  	// match: (Sub32 x x)
 11122  	// cond:
 11123  	// result: (Const32 [0])
 11124  	for {
 11125  		x := v.Args[0]
 11126  		if x != v.Args[1] {
 11127  			break
 11128  		}
 11129  		v.reset(OpConst32)
 11130  		v.AuxInt = 0
 11131  		return true
 11132  	}
 11133  	// match: (Sub32 (Add32 x y) x)
 11134  	// cond:
 11135  	// result: y
 11136  	for {
 11137  		v_0 := v.Args[0]
 11138  		if v_0.Op != OpAdd32 {
 11139  			break
 11140  		}
 11141  		x := v_0.Args[0]
 11142  		y := v_0.Args[1]
 11143  		if x != v.Args[1] {
 11144  			break
 11145  		}
 11146  		v.reset(OpCopy)
 11147  		v.Type = y.Type
 11148  		v.AddArg(y)
 11149  		return true
 11150  	}
 11151  	// match: (Sub32 (Add32 x y) y)
 11152  	// cond:
 11153  	// result: x
 11154  	for {
 11155  		v_0 := v.Args[0]
 11156  		if v_0.Op != OpAdd32 {
 11157  			break
 11158  		}
 11159  		x := v_0.Args[0]
 11160  		y := v_0.Args[1]
 11161  		if y != v.Args[1] {
 11162  			break
 11163  		}
 11164  		v.reset(OpCopy)
 11165  		v.Type = x.Type
 11166  		v.AddArg(x)
 11167  		return true
 11168  	}
 11169  	return false
 11170  }
 11171  func rewriteValuegeneric_OpSub32F(v *Value, config *Config) bool {
 11172  	b := v.Block
 11173  	_ = b
 11174  	// match: (Sub32F (Const32F [c]) (Const32F [d]))
 11175  	// cond:
 11176  	// result: (Const32F [f2i(float64(i2f32(c) - i2f32(d)))])
 11177  	for {
 11178  		v_0 := v.Args[0]
 11179  		if v_0.Op != OpConst32F {
 11180  			break
 11181  		}
 11182  		c := v_0.AuxInt
 11183  		v_1 := v.Args[1]
 11184  		if v_1.Op != OpConst32F {
 11185  			break
 11186  		}
 11187  		d := v_1.AuxInt
 11188  		v.reset(OpConst32F)
 11189  		v.AuxInt = f2i(float64(i2f32(c) - i2f32(d)))
 11190  		return true
 11191  	}
 11192  	// match: (Sub32F x (Const32F [0]))
 11193  	// cond:
 11194  	// result: x
 11195  	for {
 11196  		x := v.Args[0]
 11197  		v_1 := v.Args[1]
 11198  		if v_1.Op != OpConst32F {
 11199  			break
 11200  		}
 11201  		if v_1.AuxInt != 0 {
 11202  			break
 11203  		}
 11204  		v.reset(OpCopy)
 11205  		v.Type = x.Type
 11206  		v.AddArg(x)
 11207  		return true
 11208  	}
 11209  	return false
 11210  }
 11211  func rewriteValuegeneric_OpSub64(v *Value, config *Config) bool {
 11212  	b := v.Block
 11213  	_ = b
 11214  	// match: (Sub64  (Const64 [c]) (Const64 [d]))
 11215  	// cond:
 11216  	// result: (Const64 [c-d])
 11217  	for {
 11218  		v_0 := v.Args[0]
 11219  		if v_0.Op != OpConst64 {
 11220  			break
 11221  		}
 11222  		c := v_0.AuxInt
 11223  		v_1 := v.Args[1]
 11224  		if v_1.Op != OpConst64 {
 11225  			break
 11226  		}
 11227  		d := v_1.AuxInt
 11228  		v.reset(OpConst64)
 11229  		v.AuxInt = c - d
 11230  		return true
 11231  	}
 11232  	// match: (Sub64 x (Const64 <t> [c]))
 11233  	// cond: x.Op != OpConst64
 11234  	// result: (Add64 (Const64 <t> [-c]) x)
 11235  	for {
 11236  		x := v.Args[0]
 11237  		v_1 := v.Args[1]
 11238  		if v_1.Op != OpConst64 {
 11239  			break
 11240  		}
 11241  		t := v_1.Type
 11242  		c := v_1.AuxInt
 11243  		if !(x.Op != OpConst64) {
 11244  			break
 11245  		}
 11246  		v.reset(OpAdd64)
 11247  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 11248  		v0.AuxInt = -c
 11249  		v.AddArg(v0)
 11250  		v.AddArg(x)
 11251  		return true
 11252  	}
 11253  	// match: (Sub64 x x)
 11254  	// cond:
 11255  	// result: (Const64 [0])
 11256  	for {
 11257  		x := v.Args[0]
 11258  		if x != v.Args[1] {
 11259  			break
 11260  		}
 11261  		v.reset(OpConst64)
 11262  		v.AuxInt = 0
 11263  		return true
 11264  	}
 11265  	// match: (Sub64 (Add64 x y) x)
 11266  	// cond:
 11267  	// result: y
 11268  	for {
 11269  		v_0 := v.Args[0]
 11270  		if v_0.Op != OpAdd64 {
 11271  			break
 11272  		}
 11273  		x := v_0.Args[0]
 11274  		y := v_0.Args[1]
 11275  		if x != v.Args[1] {
 11276  			break
 11277  		}
 11278  		v.reset(OpCopy)
 11279  		v.Type = y.Type
 11280  		v.AddArg(y)
 11281  		return true
 11282  	}
 11283  	// match: (Sub64 (Add64 x y) y)
 11284  	// cond:
 11285  	// result: x
 11286  	for {
 11287  		v_0 := v.Args[0]
 11288  		if v_0.Op != OpAdd64 {
 11289  			break
 11290  		}
 11291  		x := v_0.Args[0]
 11292  		y := v_0.Args[1]
 11293  		if y != v.Args[1] {
 11294  			break
 11295  		}
 11296  		v.reset(OpCopy)
 11297  		v.Type = x.Type
 11298  		v.AddArg(x)
 11299  		return true
 11300  	}
 11301  	return false
 11302  }
 11303  func rewriteValuegeneric_OpSub64F(v *Value, config *Config) bool {
 11304  	b := v.Block
 11305  	_ = b
 11306  	// match: (Sub64F (Const64F [c]) (Const64F [d]))
 11307  	// cond:
 11308  	// result: (Const64F [f2i(i2f(c) - i2f(d))])
 11309  	for {
 11310  		v_0 := v.Args[0]
 11311  		if v_0.Op != OpConst64F {
 11312  			break
 11313  		}
 11314  		c := v_0.AuxInt
 11315  		v_1 := v.Args[1]
 11316  		if v_1.Op != OpConst64F {
 11317  			break
 11318  		}
 11319  		d := v_1.AuxInt
 11320  		v.reset(OpConst64F)
 11321  		v.AuxInt = f2i(i2f(c) - i2f(d))
 11322  		return true
 11323  	}
 11324  	// match: (Sub64F x (Const64F [0]))
 11325  	// cond:
 11326  	// result: x
 11327  	for {
 11328  		x := v.Args[0]
 11329  		v_1 := v.Args[1]
 11330  		if v_1.Op != OpConst64F {
 11331  			break
 11332  		}
 11333  		if v_1.AuxInt != 0 {
 11334  			break
 11335  		}
 11336  		v.reset(OpCopy)
 11337  		v.Type = x.Type
 11338  		v.AddArg(x)
 11339  		return true
 11340  	}
 11341  	return false
 11342  }
 11343  func rewriteValuegeneric_OpSub8(v *Value, config *Config) bool {
 11344  	b := v.Block
 11345  	_ = b
 11346  	// match: (Sub8   (Const8 [c]) (Const8 [d]))
 11347  	// cond:
 11348  	// result: (Const8 [int64(int8(c-d))])
 11349  	for {
 11350  		v_0 := v.Args[0]
 11351  		if v_0.Op != OpConst8 {
 11352  			break
 11353  		}
 11354  		c := v_0.AuxInt
 11355  		v_1 := v.Args[1]
 11356  		if v_1.Op != OpConst8 {
 11357  			break
 11358  		}
 11359  		d := v_1.AuxInt
 11360  		v.reset(OpConst8)
 11361  		v.AuxInt = int64(int8(c - d))
 11362  		return true
 11363  	}
 11364  	// match: (Sub8  x (Const8  <t> [c]))
 11365  	// cond: x.Op != OpConst8
 11366  	// result: (Add8  (Const8  <t> [int64(int8(-c))]) x)
 11367  	for {
 11368  		x := v.Args[0]
 11369  		v_1 := v.Args[1]
 11370  		if v_1.Op != OpConst8 {
 11371  			break
 11372  		}
 11373  		t := v_1.Type
 11374  		c := v_1.AuxInt
 11375  		if !(x.Op != OpConst8) {
 11376  			break
 11377  		}
 11378  		v.reset(OpAdd8)
 11379  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 11380  		v0.AuxInt = int64(int8(-c))
 11381  		v.AddArg(v0)
 11382  		v.AddArg(x)
 11383  		return true
 11384  	}
 11385  	// match: (Sub8  x x)
 11386  	// cond:
 11387  	// result: (Const8  [0])
 11388  	for {
 11389  		x := v.Args[0]
 11390  		if x != v.Args[1] {
 11391  			break
 11392  		}
 11393  		v.reset(OpConst8)
 11394  		v.AuxInt = 0
 11395  		return true
 11396  	}
 11397  	// match: (Sub8  (Add8  x y) x)
 11398  	// cond:
 11399  	// result: y
 11400  	for {
 11401  		v_0 := v.Args[0]
 11402  		if v_0.Op != OpAdd8 {
 11403  			break
 11404  		}
 11405  		x := v_0.Args[0]
 11406  		y := v_0.Args[1]
 11407  		if x != v.Args[1] {
 11408  			break
 11409  		}
 11410  		v.reset(OpCopy)
 11411  		v.Type = y.Type
 11412  		v.AddArg(y)
 11413  		return true
 11414  	}
 11415  	// match: (Sub8  (Add8  x y) y)
 11416  	// cond:
 11417  	// result: x
 11418  	for {
 11419  		v_0 := v.Args[0]
 11420  		if v_0.Op != OpAdd8 {
 11421  			break
 11422  		}
 11423  		x := v_0.Args[0]
 11424  		y := v_0.Args[1]
 11425  		if y != v.Args[1] {
 11426  			break
 11427  		}
 11428  		v.reset(OpCopy)
 11429  		v.Type = x.Type
 11430  		v.AddArg(x)
 11431  		return true
 11432  	}
 11433  	return false
 11434  }
 11435  func rewriteValuegeneric_OpTrunc16to8(v *Value, config *Config) bool {
 11436  	b := v.Block
 11437  	_ = b
 11438  	// match: (Trunc16to8  (Const16 [c]))
 11439  	// cond:
 11440  	// result: (Const8   [int64(int8(c))])
 11441  	for {
 11442  		v_0 := v.Args[0]
 11443  		if v_0.Op != OpConst16 {
 11444  			break
 11445  		}
 11446  		c := v_0.AuxInt
 11447  		v.reset(OpConst8)
 11448  		v.AuxInt = int64(int8(c))
 11449  		return true
 11450  	}
 11451  	// match: (Trunc16to8  (ZeroExt8to16  x))
 11452  	// cond:
 11453  	// result: x
 11454  	for {
 11455  		v_0 := v.Args[0]
 11456  		if v_0.Op != OpZeroExt8to16 {
 11457  			break
 11458  		}
 11459  		x := v_0.Args[0]
 11460  		v.reset(OpCopy)
 11461  		v.Type = x.Type
 11462  		v.AddArg(x)
 11463  		return true
 11464  	}
 11465  	// match: (Trunc16to8  (SignExt8to16  x))
 11466  	// cond:
 11467  	// result: x
 11468  	for {
 11469  		v_0 := v.Args[0]
 11470  		if v_0.Op != OpSignExt8to16 {
 11471  			break
 11472  		}
 11473  		x := v_0.Args[0]
 11474  		v.reset(OpCopy)
 11475  		v.Type = x.Type
 11476  		v.AddArg(x)
 11477  		return true
 11478  	}
 11479  	// match: (Trunc16to8  (And16 (Const16 [y]) x))
 11480  	// cond: y&0xFF == 0xFF
 11481  	// result: (Trunc16to8 x)
 11482  	for {
 11483  		v_0 := v.Args[0]
 11484  		if v_0.Op != OpAnd16 {
 11485  			break
 11486  		}
 11487  		v_0_0 := v_0.Args[0]
 11488  		if v_0_0.Op != OpConst16 {
 11489  			break
 11490  		}
 11491  		y := v_0_0.AuxInt
 11492  		x := v_0.Args[1]
 11493  		if !(y&0xFF == 0xFF) {
 11494  			break
 11495  		}
 11496  		v.reset(OpTrunc16to8)
 11497  		v.AddArg(x)
 11498  		return true
 11499  	}
 11500  	return false
 11501  }
 11502  func rewriteValuegeneric_OpTrunc32to16(v *Value, config *Config) bool {
 11503  	b := v.Block
 11504  	_ = b
 11505  	// match: (Trunc32to16 (Const32 [c]))
 11506  	// cond:
 11507  	// result: (Const16  [int64(int16(c))])
 11508  	for {
 11509  		v_0 := v.Args[0]
 11510  		if v_0.Op != OpConst32 {
 11511  			break
 11512  		}
 11513  		c := v_0.AuxInt
 11514  		v.reset(OpConst16)
 11515  		v.AuxInt = int64(int16(c))
 11516  		return true
 11517  	}
 11518  	// match: (Trunc32to16 (ZeroExt8to32  x))
 11519  	// cond:
 11520  	// result: (ZeroExt8to16  x)
 11521  	for {
 11522  		v_0 := v.Args[0]
 11523  		if v_0.Op != OpZeroExt8to32 {
 11524  			break
 11525  		}
 11526  		x := v_0.Args[0]
 11527  		v.reset(OpZeroExt8to16)
 11528  		v.AddArg(x)
 11529  		return true
 11530  	}
 11531  	// match: (Trunc32to16 (ZeroExt16to32 x))
 11532  	// cond:
 11533  	// result: x
 11534  	for {
 11535  		v_0 := v.Args[0]
 11536  		if v_0.Op != OpZeroExt16to32 {
 11537  			break
 11538  		}
 11539  		x := v_0.Args[0]
 11540  		v.reset(OpCopy)
 11541  		v.Type = x.Type
 11542  		v.AddArg(x)
 11543  		return true
 11544  	}
 11545  	// match: (Trunc32to16 (SignExt8to32  x))
 11546  	// cond:
 11547  	// result: (SignExt8to16  x)
 11548  	for {
 11549  		v_0 := v.Args[0]
 11550  		if v_0.Op != OpSignExt8to32 {
 11551  			break
 11552  		}
 11553  		x := v_0.Args[0]
 11554  		v.reset(OpSignExt8to16)
 11555  		v.AddArg(x)
 11556  		return true
 11557  	}
 11558  	// match: (Trunc32to16 (SignExt16to32 x))
 11559  	// cond:
 11560  	// result: x
 11561  	for {
 11562  		v_0 := v.Args[0]
 11563  		if v_0.Op != OpSignExt16to32 {
 11564  			break
 11565  		}
 11566  		x := v_0.Args[0]
 11567  		v.reset(OpCopy)
 11568  		v.Type = x.Type
 11569  		v.AddArg(x)
 11570  		return true
 11571  	}
 11572  	// match: (Trunc32to16 (And32 (Const32 [y]) x))
 11573  	// cond: y&0xFFFF == 0xFFFF
 11574  	// result: (Trunc32to16 x)
 11575  	for {
 11576  		v_0 := v.Args[0]
 11577  		if v_0.Op != OpAnd32 {
 11578  			break
 11579  		}
 11580  		v_0_0 := v_0.Args[0]
 11581  		if v_0_0.Op != OpConst32 {
 11582  			break
 11583  		}
 11584  		y := v_0_0.AuxInt
 11585  		x := v_0.Args[1]
 11586  		if !(y&0xFFFF == 0xFFFF) {
 11587  			break
 11588  		}
 11589  		v.reset(OpTrunc32to16)
 11590  		v.AddArg(x)
 11591  		return true
 11592  	}
 11593  	return false
 11594  }
 11595  func rewriteValuegeneric_OpTrunc32to8(v *Value, config *Config) bool {
 11596  	b := v.Block
 11597  	_ = b
 11598  	// match: (Trunc32to8  (Const32 [c]))
 11599  	// cond:
 11600  	// result: (Const8   [int64(int8(c))])
 11601  	for {
 11602  		v_0 := v.Args[0]
 11603  		if v_0.Op != OpConst32 {
 11604  			break
 11605  		}
 11606  		c := v_0.AuxInt
 11607  		v.reset(OpConst8)
 11608  		v.AuxInt = int64(int8(c))
 11609  		return true
 11610  	}
 11611  	// match: (Trunc32to8  (ZeroExt8to32  x))
 11612  	// cond:
 11613  	// result: x
 11614  	for {
 11615  		v_0 := v.Args[0]
 11616  		if v_0.Op != OpZeroExt8to32 {
 11617  			break
 11618  		}
 11619  		x := v_0.Args[0]
 11620  		v.reset(OpCopy)
 11621  		v.Type = x.Type
 11622  		v.AddArg(x)
 11623  		return true
 11624  	}
 11625  	// match: (Trunc32to8  (SignExt8to32  x))
 11626  	// cond:
 11627  	// result: x
 11628  	for {
 11629  		v_0 := v.Args[0]
 11630  		if v_0.Op != OpSignExt8to32 {
 11631  			break
 11632  		}
 11633  		x := v_0.Args[0]
 11634  		v.reset(OpCopy)
 11635  		v.Type = x.Type
 11636  		v.AddArg(x)
 11637  		return true
 11638  	}
 11639  	// match: (Trunc32to8  (And32 (Const32 [y]) x))
 11640  	// cond: y&0xFF == 0xFF
 11641  	// result: (Trunc32to8 x)
 11642  	for {
 11643  		v_0 := v.Args[0]
 11644  		if v_0.Op != OpAnd32 {
 11645  			break
 11646  		}
 11647  		v_0_0 := v_0.Args[0]
 11648  		if v_0_0.Op != OpConst32 {
 11649  			break
 11650  		}
 11651  		y := v_0_0.AuxInt
 11652  		x := v_0.Args[1]
 11653  		if !(y&0xFF == 0xFF) {
 11654  			break
 11655  		}
 11656  		v.reset(OpTrunc32to8)
 11657  		v.AddArg(x)
 11658  		return true
 11659  	}
 11660  	return false
 11661  }
 11662  func rewriteValuegeneric_OpTrunc64to16(v *Value, config *Config) bool {
 11663  	b := v.Block
 11664  	_ = b
 11665  	// match: (Trunc64to16 (Const64 [c]))
 11666  	// cond:
 11667  	// result: (Const16  [int64(int16(c))])
 11668  	for {
 11669  		v_0 := v.Args[0]
 11670  		if v_0.Op != OpConst64 {
 11671  			break
 11672  		}
 11673  		c := v_0.AuxInt
 11674  		v.reset(OpConst16)
 11675  		v.AuxInt = int64(int16(c))
 11676  		return true
 11677  	}
 11678  	// match: (Trunc64to16 (ZeroExt8to64  x))
 11679  	// cond:
 11680  	// result: (ZeroExt8to16  x)
 11681  	for {
 11682  		v_0 := v.Args[0]
 11683  		if v_0.Op != OpZeroExt8to64 {
 11684  			break
 11685  		}
 11686  		x := v_0.Args[0]
 11687  		v.reset(OpZeroExt8to16)
 11688  		v.AddArg(x)
 11689  		return true
 11690  	}
 11691  	// match: (Trunc64to16 (ZeroExt16to64 x))
 11692  	// cond:
 11693  	// result: x
 11694  	for {
 11695  		v_0 := v.Args[0]
 11696  		if v_0.Op != OpZeroExt16to64 {
 11697  			break
 11698  		}
 11699  		x := v_0.Args[0]
 11700  		v.reset(OpCopy)
 11701  		v.Type = x.Type
 11702  		v.AddArg(x)
 11703  		return true
 11704  	}
 11705  	// match: (Trunc64to16 (SignExt8to64  x))
 11706  	// cond:
 11707  	// result: (SignExt8to16  x)
 11708  	for {
 11709  		v_0 := v.Args[0]
 11710  		if v_0.Op != OpSignExt8to64 {
 11711  			break
 11712  		}
 11713  		x := v_0.Args[0]
 11714  		v.reset(OpSignExt8to16)
 11715  		v.AddArg(x)
 11716  		return true
 11717  	}
 11718  	// match: (Trunc64to16 (SignExt16to64 x))
 11719  	// cond:
 11720  	// result: x
 11721  	for {
 11722  		v_0 := v.Args[0]
 11723  		if v_0.Op != OpSignExt16to64 {
 11724  			break
 11725  		}
 11726  		x := v_0.Args[0]
 11727  		v.reset(OpCopy)
 11728  		v.Type = x.Type
 11729  		v.AddArg(x)
 11730  		return true
 11731  	}
 11732  	// match: (Trunc64to16 (And64 (Const64 [y]) x))
 11733  	// cond: y&0xFFFF == 0xFFFF
 11734  	// result: (Trunc64to16 x)
 11735  	for {
 11736  		v_0 := v.Args[0]
 11737  		if v_0.Op != OpAnd64 {
 11738  			break
 11739  		}
 11740  		v_0_0 := v_0.Args[0]
 11741  		if v_0_0.Op != OpConst64 {
 11742  			break
 11743  		}
 11744  		y := v_0_0.AuxInt
 11745  		x := v_0.Args[1]
 11746  		if !(y&0xFFFF == 0xFFFF) {
 11747  			break
 11748  		}
 11749  		v.reset(OpTrunc64to16)
 11750  		v.AddArg(x)
 11751  		return true
 11752  	}
 11753  	return false
 11754  }
 11755  func rewriteValuegeneric_OpTrunc64to32(v *Value, config *Config) bool {
 11756  	b := v.Block
 11757  	_ = b
 11758  	// match: (Trunc64to32 (Const64 [c]))
 11759  	// cond:
 11760  	// result: (Const32  [int64(int32(c))])
 11761  	for {
 11762  		v_0 := v.Args[0]
 11763  		if v_0.Op != OpConst64 {
 11764  			break
 11765  		}
 11766  		c := v_0.AuxInt
 11767  		v.reset(OpConst32)
 11768  		v.AuxInt = int64(int32(c))
 11769  		return true
 11770  	}
 11771  	// match: (Trunc64to32 (ZeroExt8to64  x))
 11772  	// cond:
 11773  	// result: (ZeroExt8to32  x)
 11774  	for {
 11775  		v_0 := v.Args[0]
 11776  		if v_0.Op != OpZeroExt8to64 {
 11777  			break
 11778  		}
 11779  		x := v_0.Args[0]
 11780  		v.reset(OpZeroExt8to32)
 11781  		v.AddArg(x)
 11782  		return true
 11783  	}
 11784  	// match: (Trunc64to32 (ZeroExt16to64 x))
 11785  	// cond:
 11786  	// result: (ZeroExt16to32 x)
 11787  	for {
 11788  		v_0 := v.Args[0]
 11789  		if v_0.Op != OpZeroExt16to64 {
 11790  			break
 11791  		}
 11792  		x := v_0.Args[0]
 11793  		v.reset(OpZeroExt16to32)
 11794  		v.AddArg(x)
 11795  		return true
 11796  	}
 11797  	// match: (Trunc64to32 (ZeroExt32to64 x))
 11798  	// cond:
 11799  	// result: x
 11800  	for {
 11801  		v_0 := v.Args[0]
 11802  		if v_0.Op != OpZeroExt32to64 {
 11803  			break
 11804  		}
 11805  		x := v_0.Args[0]
 11806  		v.reset(OpCopy)
 11807  		v.Type = x.Type
 11808  		v.AddArg(x)
 11809  		return true
 11810  	}
 11811  	// match: (Trunc64to32 (SignExt8to64  x))
 11812  	// cond:
 11813  	// result: (SignExt8to32  x)
 11814  	for {
 11815  		v_0 := v.Args[0]
 11816  		if v_0.Op != OpSignExt8to64 {
 11817  			break
 11818  		}
 11819  		x := v_0.Args[0]
 11820  		v.reset(OpSignExt8to32)
 11821  		v.AddArg(x)
 11822  		return true
 11823  	}
 11824  	// match: (Trunc64to32 (SignExt16to64 x))
 11825  	// cond:
 11826  	// result: (SignExt16to32 x)
 11827  	for {
 11828  		v_0 := v.Args[0]
 11829  		if v_0.Op != OpSignExt16to64 {
 11830  			break
 11831  		}
 11832  		x := v_0.Args[0]
 11833  		v.reset(OpSignExt16to32)
 11834  		v.AddArg(x)
 11835  		return true
 11836  	}
 11837  	// match: (Trunc64to32 (SignExt32to64 x))
 11838  	// cond:
 11839  	// result: x
 11840  	for {
 11841  		v_0 := v.Args[0]
 11842  		if v_0.Op != OpSignExt32to64 {
 11843  			break
 11844  		}
 11845  		x := v_0.Args[0]
 11846  		v.reset(OpCopy)
 11847  		v.Type = x.Type
 11848  		v.AddArg(x)
 11849  		return true
 11850  	}
 11851  	// match: (Trunc64to32 (And64 (Const64 [y]) x))
 11852  	// cond: y&0xFFFFFFFF == 0xFFFFFFFF
 11853  	// result: (Trunc64to32 x)
 11854  	for {
 11855  		v_0 := v.Args[0]
 11856  		if v_0.Op != OpAnd64 {
 11857  			break
 11858  		}
 11859  		v_0_0 := v_0.Args[0]
 11860  		if v_0_0.Op != OpConst64 {
 11861  			break
 11862  		}
 11863  		y := v_0_0.AuxInt
 11864  		x := v_0.Args[1]
 11865  		if !(y&0xFFFFFFFF == 0xFFFFFFFF) {
 11866  			break
 11867  		}
 11868  		v.reset(OpTrunc64to32)
 11869  		v.AddArg(x)
 11870  		return true
 11871  	}
 11872  	return false
 11873  }
 11874  func rewriteValuegeneric_OpTrunc64to8(v *Value, config *Config) bool {
 11875  	b := v.Block
 11876  	_ = b
 11877  	// match: (Trunc64to8  (Const64 [c]))
 11878  	// cond:
 11879  	// result: (Const8   [int64(int8(c))])
 11880  	for {
 11881  		v_0 := v.Args[0]
 11882  		if v_0.Op != OpConst64 {
 11883  			break
 11884  		}
 11885  		c := v_0.AuxInt
 11886  		v.reset(OpConst8)
 11887  		v.AuxInt = int64(int8(c))
 11888  		return true
 11889  	}
 11890  	// match: (Trunc64to8  (ZeroExt8to64  x))
 11891  	// cond:
 11892  	// result: x
 11893  	for {
 11894  		v_0 := v.Args[0]
 11895  		if v_0.Op != OpZeroExt8to64 {
 11896  			break
 11897  		}
 11898  		x := v_0.Args[0]
 11899  		v.reset(OpCopy)
 11900  		v.Type = x.Type
 11901  		v.AddArg(x)
 11902  		return true
 11903  	}
 11904  	// match: (Trunc64to8  (SignExt8to64  x))
 11905  	// cond:
 11906  	// result: x
 11907  	for {
 11908  		v_0 := v.Args[0]
 11909  		if v_0.Op != OpSignExt8to64 {
 11910  			break
 11911  		}
 11912  		x := v_0.Args[0]
 11913  		v.reset(OpCopy)
 11914  		v.Type = x.Type
 11915  		v.AddArg(x)
 11916  		return true
 11917  	}
 11918  	// match: (Trunc64to8  (And64 (Const64 [y]) x))
 11919  	// cond: y&0xFF == 0xFF
 11920  	// result: (Trunc64to8 x)
 11921  	for {
 11922  		v_0 := v.Args[0]
 11923  		if v_0.Op != OpAnd64 {
 11924  			break
 11925  		}
 11926  		v_0_0 := v_0.Args[0]
 11927  		if v_0_0.Op != OpConst64 {
 11928  			break
 11929  		}
 11930  		y := v_0_0.AuxInt
 11931  		x := v_0.Args[1]
 11932  		if !(y&0xFF == 0xFF) {
 11933  			break
 11934  		}
 11935  		v.reset(OpTrunc64to8)
 11936  		v.AddArg(x)
 11937  		return true
 11938  	}
 11939  	return false
 11940  }
 11941  func rewriteValuegeneric_OpXor16(v *Value, config *Config) bool {
 11942  	b := v.Block
 11943  	_ = b
 11944  	// match: (Xor16 x (Const16 <t> [c]))
 11945  	// cond: x.Op != OpConst16
 11946  	// result: (Xor16 (Const16 <t> [c]) x)
 11947  	for {
 11948  		x := v.Args[0]
 11949  		v_1 := v.Args[1]
 11950  		if v_1.Op != OpConst16 {
 11951  			break
 11952  		}
 11953  		t := v_1.Type
 11954  		c := v_1.AuxInt
 11955  		if !(x.Op != OpConst16) {
 11956  			break
 11957  		}
 11958  		v.reset(OpXor16)
 11959  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 11960  		v0.AuxInt = c
 11961  		v.AddArg(v0)
 11962  		v.AddArg(x)
 11963  		return true
 11964  	}
 11965  	// match: (Xor16 x x)
 11966  	// cond:
 11967  	// result: (Const16 [0])
 11968  	for {
 11969  		x := v.Args[0]
 11970  		if x != v.Args[1] {
 11971  			break
 11972  		}
 11973  		v.reset(OpConst16)
 11974  		v.AuxInt = 0
 11975  		return true
 11976  	}
 11977  	// match: (Xor16 (Const16 [0]) x)
 11978  	// cond:
 11979  	// result: x
 11980  	for {
 11981  		v_0 := v.Args[0]
 11982  		if v_0.Op != OpConst16 {
 11983  			break
 11984  		}
 11985  		if v_0.AuxInt != 0 {
 11986  			break
 11987  		}
 11988  		x := v.Args[1]
 11989  		v.reset(OpCopy)
 11990  		v.Type = x.Type
 11991  		v.AddArg(x)
 11992  		return true
 11993  	}
 11994  	// match: (Xor16 x (Xor16 x y))
 11995  	// cond:
 11996  	// result: y
 11997  	for {
 11998  		x := v.Args[0]
 11999  		v_1 := v.Args[1]
 12000  		if v_1.Op != OpXor16 {
 12001  			break
 12002  		}
 12003  		if x != v_1.Args[0] {
 12004  			break
 12005  		}
 12006  		y := v_1.Args[1]
 12007  		v.reset(OpCopy)
 12008  		v.Type = y.Type
 12009  		v.AddArg(y)
 12010  		return true
 12011  	}
 12012  	// match: (Xor16 x (Xor16 y x))
 12013  	// cond:
 12014  	// result: y
 12015  	for {
 12016  		x := v.Args[0]
 12017  		v_1 := v.Args[1]
 12018  		if v_1.Op != OpXor16 {
 12019  			break
 12020  		}
 12021  		y := v_1.Args[0]
 12022  		if x != v_1.Args[1] {
 12023  			break
 12024  		}
 12025  		v.reset(OpCopy)
 12026  		v.Type = y.Type
 12027  		v.AddArg(y)
 12028  		return true
 12029  	}
 12030  	// match: (Xor16 (Xor16 x y) x)
 12031  	// cond:
 12032  	// result: y
 12033  	for {
 12034  		v_0 := v.Args[0]
 12035  		if v_0.Op != OpXor16 {
 12036  			break
 12037  		}
 12038  		x := v_0.Args[0]
 12039  		y := v_0.Args[1]
 12040  		if x != v.Args[1] {
 12041  			break
 12042  		}
 12043  		v.reset(OpCopy)
 12044  		v.Type = y.Type
 12045  		v.AddArg(y)
 12046  		return true
 12047  	}
 12048  	// match: (Xor16 (Xor16 x y) y)
 12049  	// cond:
 12050  	// result: x
 12051  	for {
 12052  		v_0 := v.Args[0]
 12053  		if v_0.Op != OpXor16 {
 12054  			break
 12055  		}
 12056  		x := v_0.Args[0]
 12057  		y := v_0.Args[1]
 12058  		if y != v.Args[1] {
 12059  			break
 12060  		}
 12061  		v.reset(OpCopy)
 12062  		v.Type = x.Type
 12063  		v.AddArg(x)
 12064  		return true
 12065  	}
 12066  	return false
 12067  }
 12068  func rewriteValuegeneric_OpXor32(v *Value, config *Config) bool {
 12069  	b := v.Block
 12070  	_ = b
 12071  	// match: (Xor32 x (Const32 <t> [c]))
 12072  	// cond: x.Op != OpConst32
 12073  	// result: (Xor32 (Const32 <t> [c]) x)
 12074  	for {
 12075  		x := v.Args[0]
 12076  		v_1 := v.Args[1]
 12077  		if v_1.Op != OpConst32 {
 12078  			break
 12079  		}
 12080  		t := v_1.Type
 12081  		c := v_1.AuxInt
 12082  		if !(x.Op != OpConst32) {
 12083  			break
 12084  		}
 12085  		v.reset(OpXor32)
 12086  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12087  		v0.AuxInt = c
 12088  		v.AddArg(v0)
 12089  		v.AddArg(x)
 12090  		return true
 12091  	}
 12092  	// match: (Xor32 x x)
 12093  	// cond:
 12094  	// result: (Const32 [0])
 12095  	for {
 12096  		x := v.Args[0]
 12097  		if x != v.Args[1] {
 12098  			break
 12099  		}
 12100  		v.reset(OpConst32)
 12101  		v.AuxInt = 0
 12102  		return true
 12103  	}
 12104  	// match: (Xor32 (Const32 [0]) x)
 12105  	// cond:
 12106  	// result: x
 12107  	for {
 12108  		v_0 := v.Args[0]
 12109  		if v_0.Op != OpConst32 {
 12110  			break
 12111  		}
 12112  		if v_0.AuxInt != 0 {
 12113  			break
 12114  		}
 12115  		x := v.Args[1]
 12116  		v.reset(OpCopy)
 12117  		v.Type = x.Type
 12118  		v.AddArg(x)
 12119  		return true
 12120  	}
 12121  	// match: (Xor32 x (Xor32 x y))
 12122  	// cond:
 12123  	// result: y
 12124  	for {
 12125  		x := v.Args[0]
 12126  		v_1 := v.Args[1]
 12127  		if v_1.Op != OpXor32 {
 12128  			break
 12129  		}
 12130  		if x != v_1.Args[0] {
 12131  			break
 12132  		}
 12133  		y := v_1.Args[1]
 12134  		v.reset(OpCopy)
 12135  		v.Type = y.Type
 12136  		v.AddArg(y)
 12137  		return true
 12138  	}
 12139  	// match: (Xor32 x (Xor32 y x))
 12140  	// cond:
 12141  	// result: y
 12142  	for {
 12143  		x := v.Args[0]
 12144  		v_1 := v.Args[1]
 12145  		if v_1.Op != OpXor32 {
 12146  			break
 12147  		}
 12148  		y := v_1.Args[0]
 12149  		if x != v_1.Args[1] {
 12150  			break
 12151  		}
 12152  		v.reset(OpCopy)
 12153  		v.Type = y.Type
 12154  		v.AddArg(y)
 12155  		return true
 12156  	}
 12157  	// match: (Xor32 (Xor32 x y) x)
 12158  	// cond:
 12159  	// result: y
 12160  	for {
 12161  		v_0 := v.Args[0]
 12162  		if v_0.Op != OpXor32 {
 12163  			break
 12164  		}
 12165  		x := v_0.Args[0]
 12166  		y := v_0.Args[1]
 12167  		if x != v.Args[1] {
 12168  			break
 12169  		}
 12170  		v.reset(OpCopy)
 12171  		v.Type = y.Type
 12172  		v.AddArg(y)
 12173  		return true
 12174  	}
 12175  	// match: (Xor32 (Xor32 x y) y)
 12176  	// cond:
 12177  	// result: x
 12178  	for {
 12179  		v_0 := v.Args[0]
 12180  		if v_0.Op != OpXor32 {
 12181  			break
 12182  		}
 12183  		x := v_0.Args[0]
 12184  		y := v_0.Args[1]
 12185  		if y != v.Args[1] {
 12186  			break
 12187  		}
 12188  		v.reset(OpCopy)
 12189  		v.Type = x.Type
 12190  		v.AddArg(x)
 12191  		return true
 12192  	}
 12193  	return false
 12194  }
 12195  func rewriteValuegeneric_OpXor64(v *Value, config *Config) bool {
 12196  	b := v.Block
 12197  	_ = b
 12198  	// match: (Xor64 x (Const64 <t> [c]))
 12199  	// cond: x.Op != OpConst64
 12200  	// result: (Xor64 (Const64 <t> [c]) x)
 12201  	for {
 12202  		x := v.Args[0]
 12203  		v_1 := v.Args[1]
 12204  		if v_1.Op != OpConst64 {
 12205  			break
 12206  		}
 12207  		t := v_1.Type
 12208  		c := v_1.AuxInt
 12209  		if !(x.Op != OpConst64) {
 12210  			break
 12211  		}
 12212  		v.reset(OpXor64)
 12213  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12214  		v0.AuxInt = c
 12215  		v.AddArg(v0)
 12216  		v.AddArg(x)
 12217  		return true
 12218  	}
 12219  	// match: (Xor64 x x)
 12220  	// cond:
 12221  	// result: (Const64 [0])
 12222  	for {
 12223  		x := v.Args[0]
 12224  		if x != v.Args[1] {
 12225  			break
 12226  		}
 12227  		v.reset(OpConst64)
 12228  		v.AuxInt = 0
 12229  		return true
 12230  	}
 12231  	// match: (Xor64 (Const64 [0]) x)
 12232  	// cond:
 12233  	// result: x
 12234  	for {
 12235  		v_0 := v.Args[0]
 12236  		if v_0.Op != OpConst64 {
 12237  			break
 12238  		}
 12239  		if v_0.AuxInt != 0 {
 12240  			break
 12241  		}
 12242  		x := v.Args[1]
 12243  		v.reset(OpCopy)
 12244  		v.Type = x.Type
 12245  		v.AddArg(x)
 12246  		return true
 12247  	}
 12248  	// match: (Xor64 x (Xor64 x y))
 12249  	// cond:
 12250  	// result: y
 12251  	for {
 12252  		x := v.Args[0]
 12253  		v_1 := v.Args[1]
 12254  		if v_1.Op != OpXor64 {
 12255  			break
 12256  		}
 12257  		if x != v_1.Args[0] {
 12258  			break
 12259  		}
 12260  		y := v_1.Args[1]
 12261  		v.reset(OpCopy)
 12262  		v.Type = y.Type
 12263  		v.AddArg(y)
 12264  		return true
 12265  	}
 12266  	// match: (Xor64 x (Xor64 y x))
 12267  	// cond:
 12268  	// result: y
 12269  	for {
 12270  		x := v.Args[0]
 12271  		v_1 := v.Args[1]
 12272  		if v_1.Op != OpXor64 {
 12273  			break
 12274  		}
 12275  		y := v_1.Args[0]
 12276  		if x != v_1.Args[1] {
 12277  			break
 12278  		}
 12279  		v.reset(OpCopy)
 12280  		v.Type = y.Type
 12281  		v.AddArg(y)
 12282  		return true
 12283  	}
 12284  	// match: (Xor64 (Xor64 x y) x)
 12285  	// cond:
 12286  	// result: y
 12287  	for {
 12288  		v_0 := v.Args[0]
 12289  		if v_0.Op != OpXor64 {
 12290  			break
 12291  		}
 12292  		x := v_0.Args[0]
 12293  		y := v_0.Args[1]
 12294  		if x != v.Args[1] {
 12295  			break
 12296  		}
 12297  		v.reset(OpCopy)
 12298  		v.Type = y.Type
 12299  		v.AddArg(y)
 12300  		return true
 12301  	}
 12302  	// match: (Xor64 (Xor64 x y) y)
 12303  	// cond:
 12304  	// result: x
 12305  	for {
 12306  		v_0 := v.Args[0]
 12307  		if v_0.Op != OpXor64 {
 12308  			break
 12309  		}
 12310  		x := v_0.Args[0]
 12311  		y := v_0.Args[1]
 12312  		if y != v.Args[1] {
 12313  			break
 12314  		}
 12315  		v.reset(OpCopy)
 12316  		v.Type = x.Type
 12317  		v.AddArg(x)
 12318  		return true
 12319  	}
 12320  	return false
 12321  }
 12322  func rewriteValuegeneric_OpXor8(v *Value, config *Config) bool {
 12323  	b := v.Block
 12324  	_ = b
 12325  	// match: (Xor8  x (Const8  <t> [c]))
 12326  	// cond: x.Op != OpConst8
 12327  	// result: (Xor8  (Const8  <t> [c]) x)
 12328  	for {
 12329  		x := v.Args[0]
 12330  		v_1 := v.Args[1]
 12331  		if v_1.Op != OpConst8 {
 12332  			break
 12333  		}
 12334  		t := v_1.Type
 12335  		c := v_1.AuxInt
 12336  		if !(x.Op != OpConst8) {
 12337  			break
 12338  		}
 12339  		v.reset(OpXor8)
 12340  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 12341  		v0.AuxInt = c
 12342  		v.AddArg(v0)
 12343  		v.AddArg(x)
 12344  		return true
 12345  	}
 12346  	// match: (Xor8  x x)
 12347  	// cond:
 12348  	// result: (Const8  [0])
 12349  	for {
 12350  		x := v.Args[0]
 12351  		if x != v.Args[1] {
 12352  			break
 12353  		}
 12354  		v.reset(OpConst8)
 12355  		v.AuxInt = 0
 12356  		return true
 12357  	}
 12358  	// match: (Xor8  (Const8  [0]) x)
 12359  	// cond:
 12360  	// result: x
 12361  	for {
 12362  		v_0 := v.Args[0]
 12363  		if v_0.Op != OpConst8 {
 12364  			break
 12365  		}
 12366  		if v_0.AuxInt != 0 {
 12367  			break
 12368  		}
 12369  		x := v.Args[1]
 12370  		v.reset(OpCopy)
 12371  		v.Type = x.Type
 12372  		v.AddArg(x)
 12373  		return true
 12374  	}
 12375  	// match: (Xor8  x (Xor8  x y))
 12376  	// cond:
 12377  	// result: y
 12378  	for {
 12379  		x := v.Args[0]
 12380  		v_1 := v.Args[1]
 12381  		if v_1.Op != OpXor8 {
 12382  			break
 12383  		}
 12384  		if x != v_1.Args[0] {
 12385  			break
 12386  		}
 12387  		y := v_1.Args[1]
 12388  		v.reset(OpCopy)
 12389  		v.Type = y.Type
 12390  		v.AddArg(y)
 12391  		return true
 12392  	}
 12393  	// match: (Xor8  x (Xor8  y x))
 12394  	// cond:
 12395  	// result: y
 12396  	for {
 12397  		x := v.Args[0]
 12398  		v_1 := v.Args[1]
 12399  		if v_1.Op != OpXor8 {
 12400  			break
 12401  		}
 12402  		y := v_1.Args[0]
 12403  		if x != v_1.Args[1] {
 12404  			break
 12405  		}
 12406  		v.reset(OpCopy)
 12407  		v.Type = y.Type
 12408  		v.AddArg(y)
 12409  		return true
 12410  	}
 12411  	// match: (Xor8  (Xor8  x y) x)
 12412  	// cond:
 12413  	// result: y
 12414  	for {
 12415  		v_0 := v.Args[0]
 12416  		if v_0.Op != OpXor8 {
 12417  			break
 12418  		}
 12419  		x := v_0.Args[0]
 12420  		y := v_0.Args[1]
 12421  		if x != v.Args[1] {
 12422  			break
 12423  		}
 12424  		v.reset(OpCopy)
 12425  		v.Type = y.Type
 12426  		v.AddArg(y)
 12427  		return true
 12428  	}
 12429  	// match: (Xor8  (Xor8  x y) y)
 12430  	// cond:
 12431  	// result: x
 12432  	for {
 12433  		v_0 := v.Args[0]
 12434  		if v_0.Op != OpXor8 {
 12435  			break
 12436  		}
 12437  		x := v_0.Args[0]
 12438  		y := v_0.Args[1]
 12439  		if y != v.Args[1] {
 12440  			break
 12441  		}
 12442  		v.reset(OpCopy)
 12443  		v.Type = x.Type
 12444  		v.AddArg(x)
 12445  		return true
 12446  	}
 12447  	return false
 12448  }
 12449  func rewriteValuegeneric_OpZero(v *Value, config *Config) bool {
 12450  	b := v.Block
 12451  	_ = b
 12452  	// match: (Zero (Load (OffPtr [c] (SP)) mem) mem)
 12453  	// cond: mem.Op == OpStaticCall 	&& isSameSym(mem.Aux, "runtime.newobject") 	&& c == config.ctxt.FixedFrameSize() + config.PtrSize
 12454  	// result: mem
 12455  	for {
 12456  		v_0 := v.Args[0]
 12457  		if v_0.Op != OpLoad {
 12458  			break
 12459  		}
 12460  		v_0_0 := v_0.Args[0]
 12461  		if v_0_0.Op != OpOffPtr {
 12462  			break
 12463  		}
 12464  		c := v_0_0.AuxInt
 12465  		v_0_0_0 := v_0_0.Args[0]
 12466  		if v_0_0_0.Op != OpSP {
 12467  			break
 12468  		}
 12469  		mem := v_0.Args[1]
 12470  		if mem != v.Args[1] {
 12471  			break
 12472  		}
 12473  		if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.PtrSize) {
 12474  			break
 12475  		}
 12476  		v.reset(OpCopy)
 12477  		v.Type = mem.Type
 12478  		v.AddArg(mem)
 12479  		return true
 12480  	}
 12481  	return false
 12482  }
 12483  func rewriteValuegeneric_OpZeroExt16to32(v *Value, config *Config) bool {
 12484  	b := v.Block
 12485  	_ = b
 12486  	// match: (ZeroExt16to32 (Const16 [c]))
 12487  	// cond:
 12488  	// result: (Const32 [int64(uint16(c))])
 12489  	for {
 12490  		v_0 := v.Args[0]
 12491  		if v_0.Op != OpConst16 {
 12492  			break
 12493  		}
 12494  		c := v_0.AuxInt
 12495  		v.reset(OpConst32)
 12496  		v.AuxInt = int64(uint16(c))
 12497  		return true
 12498  	}
 12499  	// match: (ZeroExt16to32 (Trunc32to16 x:(Rsh32Ux64 _ (Const64 [s]))))
 12500  	// cond: s >= 16
 12501  	// result: x
 12502  	for {
 12503  		v_0 := v.Args[0]
 12504  		if v_0.Op != OpTrunc32to16 {
 12505  			break
 12506  		}
 12507  		x := v_0.Args[0]
 12508  		if x.Op != OpRsh32Ux64 {
 12509  			break
 12510  		}
 12511  		x_1 := x.Args[1]
 12512  		if x_1.Op != OpConst64 {
 12513  			break
 12514  		}
 12515  		s := x_1.AuxInt
 12516  		if !(s >= 16) {
 12517  			break
 12518  		}
 12519  		v.reset(OpCopy)
 12520  		v.Type = x.Type
 12521  		v.AddArg(x)
 12522  		return true
 12523  	}
 12524  	return false
 12525  }
 12526  func rewriteValuegeneric_OpZeroExt16to64(v *Value, config *Config) bool {
 12527  	b := v.Block
 12528  	_ = b
 12529  	// match: (ZeroExt16to64 (Const16 [c]))
 12530  	// cond:
 12531  	// result: (Const64 [int64(uint16(c))])
 12532  	for {
 12533  		v_0 := v.Args[0]
 12534  		if v_0.Op != OpConst16 {
 12535  			break
 12536  		}
 12537  		c := v_0.AuxInt
 12538  		v.reset(OpConst64)
 12539  		v.AuxInt = int64(uint16(c))
 12540  		return true
 12541  	}
 12542  	// match: (ZeroExt16to64 (Trunc64to16 x:(Rsh64Ux64 _ (Const64 [s]))))
 12543  	// cond: s >= 48
 12544  	// result: x
 12545  	for {
 12546  		v_0 := v.Args[0]
 12547  		if v_0.Op != OpTrunc64to16 {
 12548  			break
 12549  		}
 12550  		x := v_0.Args[0]
 12551  		if x.Op != OpRsh64Ux64 {
 12552  			break
 12553  		}
 12554  		x_1 := x.Args[1]
 12555  		if x_1.Op != OpConst64 {
 12556  			break
 12557  		}
 12558  		s := x_1.AuxInt
 12559  		if !(s >= 48) {
 12560  			break
 12561  		}
 12562  		v.reset(OpCopy)
 12563  		v.Type = x.Type
 12564  		v.AddArg(x)
 12565  		return true
 12566  	}
 12567  	return false
 12568  }
 12569  func rewriteValuegeneric_OpZeroExt32to64(v *Value, config *Config) bool {
 12570  	b := v.Block
 12571  	_ = b
 12572  	// match: (ZeroExt32to64 (Const32 [c]))
 12573  	// cond:
 12574  	// result: (Const64 [int64(uint32(c))])
 12575  	for {
 12576  		v_0 := v.Args[0]
 12577  		if v_0.Op != OpConst32 {
 12578  			break
 12579  		}
 12580  		c := v_0.AuxInt
 12581  		v.reset(OpConst64)
 12582  		v.AuxInt = int64(uint32(c))
 12583  		return true
 12584  	}
 12585  	// match: (ZeroExt32to64 (Trunc64to32 x:(Rsh64Ux64 _ (Const64 [s]))))
 12586  	// cond: s >= 32
 12587  	// result: x
 12588  	for {
 12589  		v_0 := v.Args[0]
 12590  		if v_0.Op != OpTrunc64to32 {
 12591  			break
 12592  		}
 12593  		x := v_0.Args[0]
 12594  		if x.Op != OpRsh64Ux64 {
 12595  			break
 12596  		}
 12597  		x_1 := x.Args[1]
 12598  		if x_1.Op != OpConst64 {
 12599  			break
 12600  		}
 12601  		s := x_1.AuxInt
 12602  		if !(s >= 32) {
 12603  			break
 12604  		}
 12605  		v.reset(OpCopy)
 12606  		v.Type = x.Type
 12607  		v.AddArg(x)
 12608  		return true
 12609  	}
 12610  	return false
 12611  }
 12612  func rewriteValuegeneric_OpZeroExt8to16(v *Value, config *Config) bool {
 12613  	b := v.Block
 12614  	_ = b
 12615  	// match: (ZeroExt8to16  (Const8  [c]))
 12616  	// cond:
 12617  	// result: (Const16 [int64( uint8(c))])
 12618  	for {
 12619  		v_0 := v.Args[0]
 12620  		if v_0.Op != OpConst8 {
 12621  			break
 12622  		}
 12623  		c := v_0.AuxInt
 12624  		v.reset(OpConst16)
 12625  		v.AuxInt = int64(uint8(c))
 12626  		return true
 12627  	}
 12628  	// match: (ZeroExt8to16  (Trunc16to8  x:(Rsh16Ux64 _ (Const64 [s]))))
 12629  	// cond: s >= 8
 12630  	// result: x
 12631  	for {
 12632  		v_0 := v.Args[0]
 12633  		if v_0.Op != OpTrunc16to8 {
 12634  			break
 12635  		}
 12636  		x := v_0.Args[0]
 12637  		if x.Op != OpRsh16Ux64 {
 12638  			break
 12639  		}
 12640  		x_1 := x.Args[1]
 12641  		if x_1.Op != OpConst64 {
 12642  			break
 12643  		}
 12644  		s := x_1.AuxInt
 12645  		if !(s >= 8) {
 12646  			break
 12647  		}
 12648  		v.reset(OpCopy)
 12649  		v.Type = x.Type
 12650  		v.AddArg(x)
 12651  		return true
 12652  	}
 12653  	return false
 12654  }
 12655  func rewriteValuegeneric_OpZeroExt8to32(v *Value, config *Config) bool {
 12656  	b := v.Block
 12657  	_ = b
 12658  	// match: (ZeroExt8to32  (Const8  [c]))
 12659  	// cond:
 12660  	// result: (Const32 [int64( uint8(c))])
 12661  	for {
 12662  		v_0 := v.Args[0]
 12663  		if v_0.Op != OpConst8 {
 12664  			break
 12665  		}
 12666  		c := v_0.AuxInt
 12667  		v.reset(OpConst32)
 12668  		v.AuxInt = int64(uint8(c))
 12669  		return true
 12670  	}
 12671  	// match: (ZeroExt8to32  (Trunc32to8  x:(Rsh32Ux64 _ (Const64 [s]))))
 12672  	// cond: s >= 24
 12673  	// result: x
 12674  	for {
 12675  		v_0 := v.Args[0]
 12676  		if v_0.Op != OpTrunc32to8 {
 12677  			break
 12678  		}
 12679  		x := v_0.Args[0]
 12680  		if x.Op != OpRsh32Ux64 {
 12681  			break
 12682  		}
 12683  		x_1 := x.Args[1]
 12684  		if x_1.Op != OpConst64 {
 12685  			break
 12686  		}
 12687  		s := x_1.AuxInt
 12688  		if !(s >= 24) {
 12689  			break
 12690  		}
 12691  		v.reset(OpCopy)
 12692  		v.Type = x.Type
 12693  		v.AddArg(x)
 12694  		return true
 12695  	}
 12696  	return false
 12697  }
 12698  func rewriteValuegeneric_OpZeroExt8to64(v *Value, config *Config) bool {
 12699  	b := v.Block
 12700  	_ = b
 12701  	// match: (ZeroExt8to64  (Const8  [c]))
 12702  	// cond:
 12703  	// result: (Const64 [int64( uint8(c))])
 12704  	for {
 12705  		v_0 := v.Args[0]
 12706  		if v_0.Op != OpConst8 {
 12707  			break
 12708  		}
 12709  		c := v_0.AuxInt
 12710  		v.reset(OpConst64)
 12711  		v.AuxInt = int64(uint8(c))
 12712  		return true
 12713  	}
 12714  	// match: (ZeroExt8to64  (Trunc64to8  x:(Rsh64Ux64 _ (Const64 [s]))))
 12715  	// cond: s >= 56
 12716  	// result: x
 12717  	for {
 12718  		v_0 := v.Args[0]
 12719  		if v_0.Op != OpTrunc64to8 {
 12720  			break
 12721  		}
 12722  		x := v_0.Args[0]
 12723  		if x.Op != OpRsh64Ux64 {
 12724  			break
 12725  		}
 12726  		x_1 := x.Args[1]
 12727  		if x_1.Op != OpConst64 {
 12728  			break
 12729  		}
 12730  		s := x_1.AuxInt
 12731  		if !(s >= 56) {
 12732  			break
 12733  		}
 12734  		v.reset(OpCopy)
 12735  		v.Type = x.Type
 12736  		v.AddArg(x)
 12737  		return true
 12738  	}
 12739  	return false
 12740  }
 12741  func rewriteBlockgeneric(b *Block, config *Config) bool {
 12742  	switch b.Kind {
 12743  	case BlockIf:
 12744  		// match: (If (Not cond) yes no)
 12745  		// cond:
 12746  		// result: (If cond no yes)
 12747  		for {
 12748  			v := b.Control
 12749  			if v.Op != OpNot {
 12750  				break
 12751  			}
 12752  			cond := v.Args[0]
 12753  			yes := b.Succs[0]
 12754  			no := b.Succs[1]
 12755  			b.Kind = BlockIf
 12756  			b.SetControl(cond)
 12757  			b.swapSuccessors()
 12758  			_ = no
 12759  			_ = yes
 12760  			return true
 12761  		}
 12762  		// match: (If (ConstBool [c]) yes no)
 12763  		// cond: c == 1
 12764  		// result: (First nil yes no)
 12765  		for {
 12766  			v := b.Control
 12767  			if v.Op != OpConstBool {
 12768  				break
 12769  			}
 12770  			c := v.AuxInt
 12771  			yes := b.Succs[0]
 12772  			no := b.Succs[1]
 12773  			if !(c == 1) {
 12774  				break
 12775  			}
 12776  			b.Kind = BlockFirst
 12777  			b.SetControl(nil)
 12778  			_ = yes
 12779  			_ = no
 12780  			return true
 12781  		}
 12782  		// match: (If (ConstBool [c]) yes no)
 12783  		// cond: c == 0
 12784  		// result: (First nil no yes)
 12785  		for {
 12786  			v := b.Control
 12787  			if v.Op != OpConstBool {
 12788  				break
 12789  			}
 12790  			c := v.AuxInt
 12791  			yes := b.Succs[0]
 12792  			no := b.Succs[1]
 12793  			if !(c == 0) {
 12794  				break
 12795  			}
 12796  			b.Kind = BlockFirst
 12797  			b.SetControl(nil)
 12798  			b.swapSuccessors()
 12799  			_ = no
 12800  			_ = yes
 12801  			return true
 12802  		}
 12803  	}
 12804  	return false
 12805  }