github.com/riscv/riscv-go@v0.0.0-20200123204226-124ebd6fcc8e/src/cmd/compile/internal/ssa/rewritegeneric.go (about)

     1  // autogenerated from gen/generic.rules: do not edit!
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  
     8  var _ = math.MinInt8 // in case not otherwise used
     9  func rewriteValuegeneric(v *Value, config *Config) bool {
    10  	switch v.Op {
    11  	case OpAdd16:
    12  		return rewriteValuegeneric_OpAdd16(v, config)
    13  	case OpAdd32:
    14  		return rewriteValuegeneric_OpAdd32(v, config)
    15  	case OpAdd32F:
    16  		return rewriteValuegeneric_OpAdd32F(v, config)
    17  	case OpAdd64:
    18  		return rewriteValuegeneric_OpAdd64(v, config)
    19  	case OpAdd64F:
    20  		return rewriteValuegeneric_OpAdd64F(v, config)
    21  	case OpAdd8:
    22  		return rewriteValuegeneric_OpAdd8(v, config)
    23  	case OpAddPtr:
    24  		return rewriteValuegeneric_OpAddPtr(v, config)
    25  	case OpAnd16:
    26  		return rewriteValuegeneric_OpAnd16(v, config)
    27  	case OpAnd32:
    28  		return rewriteValuegeneric_OpAnd32(v, config)
    29  	case OpAnd64:
    30  		return rewriteValuegeneric_OpAnd64(v, config)
    31  	case OpAnd8:
    32  		return rewriteValuegeneric_OpAnd8(v, config)
    33  	case OpArg:
    34  		return rewriteValuegeneric_OpArg(v, config)
    35  	case OpArraySelect:
    36  		return rewriteValuegeneric_OpArraySelect(v, config)
    37  	case OpCom16:
    38  		return rewriteValuegeneric_OpCom16(v, config)
    39  	case OpCom32:
    40  		return rewriteValuegeneric_OpCom32(v, config)
    41  	case OpCom64:
    42  		return rewriteValuegeneric_OpCom64(v, config)
    43  	case OpCom8:
    44  		return rewriteValuegeneric_OpCom8(v, config)
    45  	case OpConstInterface:
    46  		return rewriteValuegeneric_OpConstInterface(v, config)
    47  	case OpConstSlice:
    48  		return rewriteValuegeneric_OpConstSlice(v, config)
    49  	case OpConstString:
    50  		return rewriteValuegeneric_OpConstString(v, config)
    51  	case OpConvert:
    52  		return rewriteValuegeneric_OpConvert(v, config)
    53  	case OpCvt32Fto64F:
    54  		return rewriteValuegeneric_OpCvt32Fto64F(v, config)
    55  	case OpCvt64Fto32F:
    56  		return rewriteValuegeneric_OpCvt64Fto32F(v, config)
    57  	case OpDiv32F:
    58  		return rewriteValuegeneric_OpDiv32F(v, config)
    59  	case OpDiv64:
    60  		return rewriteValuegeneric_OpDiv64(v, config)
    61  	case OpDiv64F:
    62  		return rewriteValuegeneric_OpDiv64F(v, config)
    63  	case OpDiv64u:
    64  		return rewriteValuegeneric_OpDiv64u(v, config)
    65  	case OpEq16:
    66  		return rewriteValuegeneric_OpEq16(v, config)
    67  	case OpEq32:
    68  		return rewriteValuegeneric_OpEq32(v, config)
    69  	case OpEq64:
    70  		return rewriteValuegeneric_OpEq64(v, config)
    71  	case OpEq8:
    72  		return rewriteValuegeneric_OpEq8(v, config)
    73  	case OpEqB:
    74  		return rewriteValuegeneric_OpEqB(v, config)
    75  	case OpEqInter:
    76  		return rewriteValuegeneric_OpEqInter(v, config)
    77  	case OpEqPtr:
    78  		return rewriteValuegeneric_OpEqPtr(v, config)
    79  	case OpEqSlice:
    80  		return rewriteValuegeneric_OpEqSlice(v, config)
    81  	case OpGeq16:
    82  		return rewriteValuegeneric_OpGeq16(v, config)
    83  	case OpGeq16U:
    84  		return rewriteValuegeneric_OpGeq16U(v, config)
    85  	case OpGeq32:
    86  		return rewriteValuegeneric_OpGeq32(v, config)
    87  	case OpGeq32U:
    88  		return rewriteValuegeneric_OpGeq32U(v, config)
    89  	case OpGeq64:
    90  		return rewriteValuegeneric_OpGeq64(v, config)
    91  	case OpGeq64U:
    92  		return rewriteValuegeneric_OpGeq64U(v, config)
    93  	case OpGeq8:
    94  		return rewriteValuegeneric_OpGeq8(v, config)
    95  	case OpGeq8U:
    96  		return rewriteValuegeneric_OpGeq8U(v, config)
    97  	case OpGreater16:
    98  		return rewriteValuegeneric_OpGreater16(v, config)
    99  	case OpGreater16U:
   100  		return rewriteValuegeneric_OpGreater16U(v, config)
   101  	case OpGreater32:
   102  		return rewriteValuegeneric_OpGreater32(v, config)
   103  	case OpGreater32U:
   104  		return rewriteValuegeneric_OpGreater32U(v, config)
   105  	case OpGreater64:
   106  		return rewriteValuegeneric_OpGreater64(v, config)
   107  	case OpGreater64U:
   108  		return rewriteValuegeneric_OpGreater64U(v, config)
   109  	case OpGreater8:
   110  		return rewriteValuegeneric_OpGreater8(v, config)
   111  	case OpGreater8U:
   112  		return rewriteValuegeneric_OpGreater8U(v, config)
   113  	case OpIMake:
   114  		return rewriteValuegeneric_OpIMake(v, config)
   115  	case OpIsInBounds:
   116  		return rewriteValuegeneric_OpIsInBounds(v, config)
   117  	case OpIsNonNil:
   118  		return rewriteValuegeneric_OpIsNonNil(v, config)
   119  	case OpIsSliceInBounds:
   120  		return rewriteValuegeneric_OpIsSliceInBounds(v, config)
   121  	case OpLeq16:
   122  		return rewriteValuegeneric_OpLeq16(v, config)
   123  	case OpLeq16U:
   124  		return rewriteValuegeneric_OpLeq16U(v, config)
   125  	case OpLeq32:
   126  		return rewriteValuegeneric_OpLeq32(v, config)
   127  	case OpLeq32U:
   128  		return rewriteValuegeneric_OpLeq32U(v, config)
   129  	case OpLeq64:
   130  		return rewriteValuegeneric_OpLeq64(v, config)
   131  	case OpLeq64U:
   132  		return rewriteValuegeneric_OpLeq64U(v, config)
   133  	case OpLeq8:
   134  		return rewriteValuegeneric_OpLeq8(v, config)
   135  	case OpLeq8U:
   136  		return rewriteValuegeneric_OpLeq8U(v, config)
   137  	case OpLess16:
   138  		return rewriteValuegeneric_OpLess16(v, config)
   139  	case OpLess16U:
   140  		return rewriteValuegeneric_OpLess16U(v, config)
   141  	case OpLess32:
   142  		return rewriteValuegeneric_OpLess32(v, config)
   143  	case OpLess32U:
   144  		return rewriteValuegeneric_OpLess32U(v, config)
   145  	case OpLess64:
   146  		return rewriteValuegeneric_OpLess64(v, config)
   147  	case OpLess64U:
   148  		return rewriteValuegeneric_OpLess64U(v, config)
   149  	case OpLess8:
   150  		return rewriteValuegeneric_OpLess8(v, config)
   151  	case OpLess8U:
   152  		return rewriteValuegeneric_OpLess8U(v, config)
   153  	case OpLoad:
   154  		return rewriteValuegeneric_OpLoad(v, config)
   155  	case OpLsh16x16:
   156  		return rewriteValuegeneric_OpLsh16x16(v, config)
   157  	case OpLsh16x32:
   158  		return rewriteValuegeneric_OpLsh16x32(v, config)
   159  	case OpLsh16x64:
   160  		return rewriteValuegeneric_OpLsh16x64(v, config)
   161  	case OpLsh16x8:
   162  		return rewriteValuegeneric_OpLsh16x8(v, config)
   163  	case OpLsh32x16:
   164  		return rewriteValuegeneric_OpLsh32x16(v, config)
   165  	case OpLsh32x32:
   166  		return rewriteValuegeneric_OpLsh32x32(v, config)
   167  	case OpLsh32x64:
   168  		return rewriteValuegeneric_OpLsh32x64(v, config)
   169  	case OpLsh32x8:
   170  		return rewriteValuegeneric_OpLsh32x8(v, config)
   171  	case OpLsh64x16:
   172  		return rewriteValuegeneric_OpLsh64x16(v, config)
   173  	case OpLsh64x32:
   174  		return rewriteValuegeneric_OpLsh64x32(v, config)
   175  	case OpLsh64x64:
   176  		return rewriteValuegeneric_OpLsh64x64(v, config)
   177  	case OpLsh64x8:
   178  		return rewriteValuegeneric_OpLsh64x8(v, config)
   179  	case OpLsh8x16:
   180  		return rewriteValuegeneric_OpLsh8x16(v, config)
   181  	case OpLsh8x32:
   182  		return rewriteValuegeneric_OpLsh8x32(v, config)
   183  	case OpLsh8x64:
   184  		return rewriteValuegeneric_OpLsh8x64(v, config)
   185  	case OpLsh8x8:
   186  		return rewriteValuegeneric_OpLsh8x8(v, config)
   187  	case OpMod16:
   188  		return rewriteValuegeneric_OpMod16(v, config)
   189  	case OpMod16u:
   190  		return rewriteValuegeneric_OpMod16u(v, config)
   191  	case OpMod32:
   192  		return rewriteValuegeneric_OpMod32(v, config)
   193  	case OpMod32u:
   194  		return rewriteValuegeneric_OpMod32u(v, config)
   195  	case OpMod64:
   196  		return rewriteValuegeneric_OpMod64(v, config)
   197  	case OpMod64u:
   198  		return rewriteValuegeneric_OpMod64u(v, config)
   199  	case OpMod8:
   200  		return rewriteValuegeneric_OpMod8(v, config)
   201  	case OpMod8u:
   202  		return rewriteValuegeneric_OpMod8u(v, config)
   203  	case OpMul16:
   204  		return rewriteValuegeneric_OpMul16(v, config)
   205  	case OpMul32:
   206  		return rewriteValuegeneric_OpMul32(v, config)
   207  	case OpMul32F:
   208  		return rewriteValuegeneric_OpMul32F(v, config)
   209  	case OpMul64:
   210  		return rewriteValuegeneric_OpMul64(v, config)
   211  	case OpMul64F:
   212  		return rewriteValuegeneric_OpMul64F(v, config)
   213  	case OpMul8:
   214  		return rewriteValuegeneric_OpMul8(v, config)
   215  	case OpNeg16:
   216  		return rewriteValuegeneric_OpNeg16(v, config)
   217  	case OpNeg32:
   218  		return rewriteValuegeneric_OpNeg32(v, config)
   219  	case OpNeg64:
   220  		return rewriteValuegeneric_OpNeg64(v, config)
   221  	case OpNeg8:
   222  		return rewriteValuegeneric_OpNeg8(v, config)
   223  	case OpNeq16:
   224  		return rewriteValuegeneric_OpNeq16(v, config)
   225  	case OpNeq32:
   226  		return rewriteValuegeneric_OpNeq32(v, config)
   227  	case OpNeq64:
   228  		return rewriteValuegeneric_OpNeq64(v, config)
   229  	case OpNeq8:
   230  		return rewriteValuegeneric_OpNeq8(v, config)
   231  	case OpNeqB:
   232  		return rewriteValuegeneric_OpNeqB(v, config)
   233  	case OpNeqInter:
   234  		return rewriteValuegeneric_OpNeqInter(v, config)
   235  	case OpNeqPtr:
   236  		return rewriteValuegeneric_OpNeqPtr(v, config)
   237  	case OpNeqSlice:
   238  		return rewriteValuegeneric_OpNeqSlice(v, config)
   239  	case OpNilCheck:
   240  		return rewriteValuegeneric_OpNilCheck(v, config)
   241  	case OpNot:
   242  		return rewriteValuegeneric_OpNot(v, config)
   243  	case OpOffPtr:
   244  		return rewriteValuegeneric_OpOffPtr(v, config)
   245  	case OpOr16:
   246  		return rewriteValuegeneric_OpOr16(v, config)
   247  	case OpOr32:
   248  		return rewriteValuegeneric_OpOr32(v, config)
   249  	case OpOr64:
   250  		return rewriteValuegeneric_OpOr64(v, config)
   251  	case OpOr8:
   252  		return rewriteValuegeneric_OpOr8(v, config)
   253  	case OpPhi:
   254  		return rewriteValuegeneric_OpPhi(v, config)
   255  	case OpPtrIndex:
   256  		return rewriteValuegeneric_OpPtrIndex(v, config)
   257  	case OpRsh16Ux16:
   258  		return rewriteValuegeneric_OpRsh16Ux16(v, config)
   259  	case OpRsh16Ux32:
   260  		return rewriteValuegeneric_OpRsh16Ux32(v, config)
   261  	case OpRsh16Ux64:
   262  		return rewriteValuegeneric_OpRsh16Ux64(v, config)
   263  	case OpRsh16Ux8:
   264  		return rewriteValuegeneric_OpRsh16Ux8(v, config)
   265  	case OpRsh16x16:
   266  		return rewriteValuegeneric_OpRsh16x16(v, config)
   267  	case OpRsh16x32:
   268  		return rewriteValuegeneric_OpRsh16x32(v, config)
   269  	case OpRsh16x64:
   270  		return rewriteValuegeneric_OpRsh16x64(v, config)
   271  	case OpRsh16x8:
   272  		return rewriteValuegeneric_OpRsh16x8(v, config)
   273  	case OpRsh32Ux16:
   274  		return rewriteValuegeneric_OpRsh32Ux16(v, config)
   275  	case OpRsh32Ux32:
   276  		return rewriteValuegeneric_OpRsh32Ux32(v, config)
   277  	case OpRsh32Ux64:
   278  		return rewriteValuegeneric_OpRsh32Ux64(v, config)
   279  	case OpRsh32Ux8:
   280  		return rewriteValuegeneric_OpRsh32Ux8(v, config)
   281  	case OpRsh32x16:
   282  		return rewriteValuegeneric_OpRsh32x16(v, config)
   283  	case OpRsh32x32:
   284  		return rewriteValuegeneric_OpRsh32x32(v, config)
   285  	case OpRsh32x64:
   286  		return rewriteValuegeneric_OpRsh32x64(v, config)
   287  	case OpRsh32x8:
   288  		return rewriteValuegeneric_OpRsh32x8(v, config)
   289  	case OpRsh64Ux16:
   290  		return rewriteValuegeneric_OpRsh64Ux16(v, config)
   291  	case OpRsh64Ux32:
   292  		return rewriteValuegeneric_OpRsh64Ux32(v, config)
   293  	case OpRsh64Ux64:
   294  		return rewriteValuegeneric_OpRsh64Ux64(v, config)
   295  	case OpRsh64Ux8:
   296  		return rewriteValuegeneric_OpRsh64Ux8(v, config)
   297  	case OpRsh64x16:
   298  		return rewriteValuegeneric_OpRsh64x16(v, config)
   299  	case OpRsh64x32:
   300  		return rewriteValuegeneric_OpRsh64x32(v, config)
   301  	case OpRsh64x64:
   302  		return rewriteValuegeneric_OpRsh64x64(v, config)
   303  	case OpRsh64x8:
   304  		return rewriteValuegeneric_OpRsh64x8(v, config)
   305  	case OpRsh8Ux16:
   306  		return rewriteValuegeneric_OpRsh8Ux16(v, config)
   307  	case OpRsh8Ux32:
   308  		return rewriteValuegeneric_OpRsh8Ux32(v, config)
   309  	case OpRsh8Ux64:
   310  		return rewriteValuegeneric_OpRsh8Ux64(v, config)
   311  	case OpRsh8Ux8:
   312  		return rewriteValuegeneric_OpRsh8Ux8(v, config)
   313  	case OpRsh8x16:
   314  		return rewriteValuegeneric_OpRsh8x16(v, config)
   315  	case OpRsh8x32:
   316  		return rewriteValuegeneric_OpRsh8x32(v, config)
   317  	case OpRsh8x64:
   318  		return rewriteValuegeneric_OpRsh8x64(v, config)
   319  	case OpRsh8x8:
   320  		return rewriteValuegeneric_OpRsh8x8(v, config)
   321  	case OpSignExt16to32:
   322  		return rewriteValuegeneric_OpSignExt16to32(v, config)
   323  	case OpSignExt16to64:
   324  		return rewriteValuegeneric_OpSignExt16to64(v, config)
   325  	case OpSignExt32to64:
   326  		return rewriteValuegeneric_OpSignExt32to64(v, config)
   327  	case OpSignExt8to16:
   328  		return rewriteValuegeneric_OpSignExt8to16(v, config)
   329  	case OpSignExt8to32:
   330  		return rewriteValuegeneric_OpSignExt8to32(v, config)
   331  	case OpSignExt8to64:
   332  		return rewriteValuegeneric_OpSignExt8to64(v, config)
   333  	case OpSliceCap:
   334  		return rewriteValuegeneric_OpSliceCap(v, config)
   335  	case OpSliceLen:
   336  		return rewriteValuegeneric_OpSliceLen(v, config)
   337  	case OpSlicePtr:
   338  		return rewriteValuegeneric_OpSlicePtr(v, config)
   339  	case OpSlicemask:
   340  		return rewriteValuegeneric_OpSlicemask(v, config)
   341  	case OpSqrt:
   342  		return rewriteValuegeneric_OpSqrt(v, config)
   343  	case OpStore:
   344  		return rewriteValuegeneric_OpStore(v, config)
   345  	case OpStringLen:
   346  		return rewriteValuegeneric_OpStringLen(v, config)
   347  	case OpStringPtr:
   348  		return rewriteValuegeneric_OpStringPtr(v, config)
   349  	case OpStructSelect:
   350  		return rewriteValuegeneric_OpStructSelect(v, config)
   351  	case OpSub16:
   352  		return rewriteValuegeneric_OpSub16(v, config)
   353  	case OpSub32:
   354  		return rewriteValuegeneric_OpSub32(v, config)
   355  	case OpSub32F:
   356  		return rewriteValuegeneric_OpSub32F(v, config)
   357  	case OpSub64:
   358  		return rewriteValuegeneric_OpSub64(v, config)
   359  	case OpSub64F:
   360  		return rewriteValuegeneric_OpSub64F(v, config)
   361  	case OpSub8:
   362  		return rewriteValuegeneric_OpSub8(v, config)
   363  	case OpTrunc16to8:
   364  		return rewriteValuegeneric_OpTrunc16to8(v, config)
   365  	case OpTrunc32to16:
   366  		return rewriteValuegeneric_OpTrunc32to16(v, config)
   367  	case OpTrunc32to8:
   368  		return rewriteValuegeneric_OpTrunc32to8(v, config)
   369  	case OpTrunc64to16:
   370  		return rewriteValuegeneric_OpTrunc64to16(v, config)
   371  	case OpTrunc64to32:
   372  		return rewriteValuegeneric_OpTrunc64to32(v, config)
   373  	case OpTrunc64to8:
   374  		return rewriteValuegeneric_OpTrunc64to8(v, config)
   375  	case OpXor16:
   376  		return rewriteValuegeneric_OpXor16(v, config)
   377  	case OpXor32:
   378  		return rewriteValuegeneric_OpXor32(v, config)
   379  	case OpXor64:
   380  		return rewriteValuegeneric_OpXor64(v, config)
   381  	case OpXor8:
   382  		return rewriteValuegeneric_OpXor8(v, config)
   383  	case OpZero:
   384  		return rewriteValuegeneric_OpZero(v, config)
   385  	case OpZeroExt16to32:
   386  		return rewriteValuegeneric_OpZeroExt16to32(v, config)
   387  	case OpZeroExt16to64:
   388  		return rewriteValuegeneric_OpZeroExt16to64(v, config)
   389  	case OpZeroExt32to64:
   390  		return rewriteValuegeneric_OpZeroExt32to64(v, config)
   391  	case OpZeroExt8to16:
   392  		return rewriteValuegeneric_OpZeroExt8to16(v, config)
   393  	case OpZeroExt8to32:
   394  		return rewriteValuegeneric_OpZeroExt8to32(v, config)
   395  	case OpZeroExt8to64:
   396  		return rewriteValuegeneric_OpZeroExt8to64(v, config)
   397  	}
   398  	return false
   399  }
   400  func rewriteValuegeneric_OpAdd16(v *Value, config *Config) bool {
   401  	b := v.Block
   402  	_ = b
   403  	// match: (Add16  (Const16 [c])  (Const16 [d]))
   404  	// cond:
   405  	// result: (Const16 [int64(int16(c+d))])
   406  	for {
   407  		v_0 := v.Args[0]
   408  		if v_0.Op != OpConst16 {
   409  			break
   410  		}
   411  		c := v_0.AuxInt
   412  		v_1 := v.Args[1]
   413  		if v_1.Op != OpConst16 {
   414  			break
   415  		}
   416  		d := v_1.AuxInt
   417  		v.reset(OpConst16)
   418  		v.AuxInt = int64(int16(c + d))
   419  		return true
   420  	}
   421  	// match: (Add16 x (Const16 <t> [c]))
   422  	// cond: x.Op != OpConst16
   423  	// result: (Add16 (Const16 <t> [c]) x)
   424  	for {
   425  		x := v.Args[0]
   426  		v_1 := v.Args[1]
   427  		if v_1.Op != OpConst16 {
   428  			break
   429  		}
   430  		t := v_1.Type
   431  		c := v_1.AuxInt
   432  		if !(x.Op != OpConst16) {
   433  			break
   434  		}
   435  		v.reset(OpAdd16)
   436  		v0 := b.NewValue0(v.Pos, OpConst16, t)
   437  		v0.AuxInt = c
   438  		v.AddArg(v0)
   439  		v.AddArg(x)
   440  		return true
   441  	}
   442  	// match: (Add16 (Const16 [0]) x)
   443  	// cond:
   444  	// result: x
   445  	for {
   446  		v_0 := v.Args[0]
   447  		if v_0.Op != OpConst16 {
   448  			break
   449  		}
   450  		if v_0.AuxInt != 0 {
   451  			break
   452  		}
   453  		x := v.Args[1]
   454  		v.reset(OpCopy)
   455  		v.Type = x.Type
   456  		v.AddArg(x)
   457  		return true
   458  	}
   459  	return false
   460  }
   461  func rewriteValuegeneric_OpAdd32(v *Value, config *Config) bool {
   462  	b := v.Block
   463  	_ = b
   464  	// match: (Add32  (Const32 [c])  (Const32 [d]))
   465  	// cond:
   466  	// result: (Const32 [int64(int32(c+d))])
   467  	for {
   468  		v_0 := v.Args[0]
   469  		if v_0.Op != OpConst32 {
   470  			break
   471  		}
   472  		c := v_0.AuxInt
   473  		v_1 := v.Args[1]
   474  		if v_1.Op != OpConst32 {
   475  			break
   476  		}
   477  		d := v_1.AuxInt
   478  		v.reset(OpConst32)
   479  		v.AuxInt = int64(int32(c + d))
   480  		return true
   481  	}
   482  	// match: (Add32 x (Const32 <t> [c]))
   483  	// cond: x.Op != OpConst32
   484  	// result: (Add32 (Const32 <t> [c]) x)
   485  	for {
   486  		x := v.Args[0]
   487  		v_1 := v.Args[1]
   488  		if v_1.Op != OpConst32 {
   489  			break
   490  		}
   491  		t := v_1.Type
   492  		c := v_1.AuxInt
   493  		if !(x.Op != OpConst32) {
   494  			break
   495  		}
   496  		v.reset(OpAdd32)
   497  		v0 := b.NewValue0(v.Pos, OpConst32, t)
   498  		v0.AuxInt = c
   499  		v.AddArg(v0)
   500  		v.AddArg(x)
   501  		return true
   502  	}
   503  	// match: (Add32 (Const32 [0]) x)
   504  	// cond:
   505  	// result: x
   506  	for {
   507  		v_0 := v.Args[0]
   508  		if v_0.Op != OpConst32 {
   509  			break
   510  		}
   511  		if v_0.AuxInt != 0 {
   512  			break
   513  		}
   514  		x := v.Args[1]
   515  		v.reset(OpCopy)
   516  		v.Type = x.Type
   517  		v.AddArg(x)
   518  		return true
   519  	}
   520  	return false
   521  }
   522  func rewriteValuegeneric_OpAdd32F(v *Value, config *Config) bool {
   523  	b := v.Block
   524  	_ = b
   525  	// match: (Add32F (Const32F [c]) (Const32F [d]))
   526  	// cond:
   527  	// result: (Const32F [f2i(float64(i2f32(c) + i2f32(d)))])
   528  	for {
   529  		v_0 := v.Args[0]
   530  		if v_0.Op != OpConst32F {
   531  			break
   532  		}
   533  		c := v_0.AuxInt
   534  		v_1 := v.Args[1]
   535  		if v_1.Op != OpConst32F {
   536  			break
   537  		}
   538  		d := v_1.AuxInt
   539  		v.reset(OpConst32F)
   540  		v.AuxInt = f2i(float64(i2f32(c) + i2f32(d)))
   541  		return true
   542  	}
   543  	// match: (Add32F x (Const32F [0]))
   544  	// cond:
   545  	// result: x
   546  	for {
   547  		x := v.Args[0]
   548  		v_1 := v.Args[1]
   549  		if v_1.Op != OpConst32F {
   550  			break
   551  		}
   552  		if v_1.AuxInt != 0 {
   553  			break
   554  		}
   555  		v.reset(OpCopy)
   556  		v.Type = x.Type
   557  		v.AddArg(x)
   558  		return true
   559  	}
   560  	// match: (Add32F (Const32F [0]) x)
   561  	// cond:
   562  	// result: x
   563  	for {
   564  		v_0 := v.Args[0]
   565  		if v_0.Op != OpConst32F {
   566  			break
   567  		}
   568  		if v_0.AuxInt != 0 {
   569  			break
   570  		}
   571  		x := v.Args[1]
   572  		v.reset(OpCopy)
   573  		v.Type = x.Type
   574  		v.AddArg(x)
   575  		return true
   576  	}
   577  	return false
   578  }
   579  func rewriteValuegeneric_OpAdd64(v *Value, config *Config) bool {
   580  	b := v.Block
   581  	_ = b
   582  	// match: (Add64  (Const64 [c])  (Const64 [d]))
   583  	// cond:
   584  	// result: (Const64 [c+d])
   585  	for {
   586  		v_0 := v.Args[0]
   587  		if v_0.Op != OpConst64 {
   588  			break
   589  		}
   590  		c := v_0.AuxInt
   591  		v_1 := v.Args[1]
   592  		if v_1.Op != OpConst64 {
   593  			break
   594  		}
   595  		d := v_1.AuxInt
   596  		v.reset(OpConst64)
   597  		v.AuxInt = c + d
   598  		return true
   599  	}
   600  	// match: (Add64 x (Const64 <t> [c]))
   601  	// cond: x.Op != OpConst64
   602  	// result: (Add64 (Const64 <t> [c]) x)
   603  	for {
   604  		x := v.Args[0]
   605  		v_1 := v.Args[1]
   606  		if v_1.Op != OpConst64 {
   607  			break
   608  		}
   609  		t := v_1.Type
   610  		c := v_1.AuxInt
   611  		if !(x.Op != OpConst64) {
   612  			break
   613  		}
   614  		v.reset(OpAdd64)
   615  		v0 := b.NewValue0(v.Pos, OpConst64, t)
   616  		v0.AuxInt = c
   617  		v.AddArg(v0)
   618  		v.AddArg(x)
   619  		return true
   620  	}
   621  	// match: (Add64 (Const64 [0]) x)
   622  	// cond:
   623  	// result: x
   624  	for {
   625  		v_0 := v.Args[0]
   626  		if v_0.Op != OpConst64 {
   627  			break
   628  		}
   629  		if v_0.AuxInt != 0 {
   630  			break
   631  		}
   632  		x := v.Args[1]
   633  		v.reset(OpCopy)
   634  		v.Type = x.Type
   635  		v.AddArg(x)
   636  		return true
   637  	}
   638  	return false
   639  }
   640  func rewriteValuegeneric_OpAdd64F(v *Value, config *Config) bool {
   641  	b := v.Block
   642  	_ = b
   643  	// match: (Add64F (Const64F [c]) (Const64F [d]))
   644  	// cond:
   645  	// result: (Const64F [f2i(i2f(c) + i2f(d))])
   646  	for {
   647  		v_0 := v.Args[0]
   648  		if v_0.Op != OpConst64F {
   649  			break
   650  		}
   651  		c := v_0.AuxInt
   652  		v_1 := v.Args[1]
   653  		if v_1.Op != OpConst64F {
   654  			break
   655  		}
   656  		d := v_1.AuxInt
   657  		v.reset(OpConst64F)
   658  		v.AuxInt = f2i(i2f(c) + i2f(d))
   659  		return true
   660  	}
   661  	// match: (Add64F x (Const64F [0]))
   662  	// cond:
   663  	// result: x
   664  	for {
   665  		x := v.Args[0]
   666  		v_1 := v.Args[1]
   667  		if v_1.Op != OpConst64F {
   668  			break
   669  		}
   670  		if v_1.AuxInt != 0 {
   671  			break
   672  		}
   673  		v.reset(OpCopy)
   674  		v.Type = x.Type
   675  		v.AddArg(x)
   676  		return true
   677  	}
   678  	// match: (Add64F (Const64F [0]) x)
   679  	// cond:
   680  	// result: x
   681  	for {
   682  		v_0 := v.Args[0]
   683  		if v_0.Op != OpConst64F {
   684  			break
   685  		}
   686  		if v_0.AuxInt != 0 {
   687  			break
   688  		}
   689  		x := v.Args[1]
   690  		v.reset(OpCopy)
   691  		v.Type = x.Type
   692  		v.AddArg(x)
   693  		return true
   694  	}
   695  	return false
   696  }
   697  func rewriteValuegeneric_OpAdd8(v *Value, config *Config) bool {
   698  	b := v.Block
   699  	_ = b
   700  	// match: (Add8   (Const8 [c])   (Const8 [d]))
   701  	// cond:
   702  	// result: (Const8  [int64(int8(c+d))])
   703  	for {
   704  		v_0 := v.Args[0]
   705  		if v_0.Op != OpConst8 {
   706  			break
   707  		}
   708  		c := v_0.AuxInt
   709  		v_1 := v.Args[1]
   710  		if v_1.Op != OpConst8 {
   711  			break
   712  		}
   713  		d := v_1.AuxInt
   714  		v.reset(OpConst8)
   715  		v.AuxInt = int64(int8(c + d))
   716  		return true
   717  	}
   718  	// match: (Add8  x (Const8  <t> [c]))
   719  	// cond: x.Op != OpConst8
   720  	// result: (Add8  (Const8  <t> [c]) x)
   721  	for {
   722  		x := v.Args[0]
   723  		v_1 := v.Args[1]
   724  		if v_1.Op != OpConst8 {
   725  			break
   726  		}
   727  		t := v_1.Type
   728  		c := v_1.AuxInt
   729  		if !(x.Op != OpConst8) {
   730  			break
   731  		}
   732  		v.reset(OpAdd8)
   733  		v0 := b.NewValue0(v.Pos, OpConst8, t)
   734  		v0.AuxInt = c
   735  		v.AddArg(v0)
   736  		v.AddArg(x)
   737  		return true
   738  	}
   739  	// match: (Add8  (Const8  [0]) x)
   740  	// cond:
   741  	// result: x
   742  	for {
   743  		v_0 := v.Args[0]
   744  		if v_0.Op != OpConst8 {
   745  			break
   746  		}
   747  		if v_0.AuxInt != 0 {
   748  			break
   749  		}
   750  		x := v.Args[1]
   751  		v.reset(OpCopy)
   752  		v.Type = x.Type
   753  		v.AddArg(x)
   754  		return true
   755  	}
   756  	return false
   757  }
   758  func rewriteValuegeneric_OpAddPtr(v *Value, config *Config) bool {
   759  	b := v.Block
   760  	_ = b
   761  	// match: (AddPtr <t> x (Const64 [c]))
   762  	// cond:
   763  	// result: (OffPtr <t> x [c])
   764  	for {
   765  		t := v.Type
   766  		x := v.Args[0]
   767  		v_1 := v.Args[1]
   768  		if v_1.Op != OpConst64 {
   769  			break
   770  		}
   771  		c := v_1.AuxInt
   772  		v.reset(OpOffPtr)
   773  		v.Type = t
   774  		v.AuxInt = c
   775  		v.AddArg(x)
   776  		return true
   777  	}
   778  	return false
   779  }
   780  func rewriteValuegeneric_OpAnd16(v *Value, config *Config) bool {
   781  	b := v.Block
   782  	_ = b
   783  	// match: (And16 x (Const16 <t> [c]))
   784  	// cond: x.Op != OpConst16
   785  	// result: (And16 (Const16 <t> [c]) x)
   786  	for {
   787  		x := v.Args[0]
   788  		v_1 := v.Args[1]
   789  		if v_1.Op != OpConst16 {
   790  			break
   791  		}
   792  		t := v_1.Type
   793  		c := v_1.AuxInt
   794  		if !(x.Op != OpConst16) {
   795  			break
   796  		}
   797  		v.reset(OpAnd16)
   798  		v0 := b.NewValue0(v.Pos, OpConst16, t)
   799  		v0.AuxInt = c
   800  		v.AddArg(v0)
   801  		v.AddArg(x)
   802  		return true
   803  	}
   804  	// match: (And16 x x)
   805  	// cond:
   806  	// result: x
   807  	for {
   808  		x := v.Args[0]
   809  		if x != v.Args[1] {
   810  			break
   811  		}
   812  		v.reset(OpCopy)
   813  		v.Type = x.Type
   814  		v.AddArg(x)
   815  		return true
   816  	}
   817  	// match: (And16 (Const16 [-1]) x)
   818  	// cond:
   819  	// result: x
   820  	for {
   821  		v_0 := v.Args[0]
   822  		if v_0.Op != OpConst16 {
   823  			break
   824  		}
   825  		if v_0.AuxInt != -1 {
   826  			break
   827  		}
   828  		x := v.Args[1]
   829  		v.reset(OpCopy)
   830  		v.Type = x.Type
   831  		v.AddArg(x)
   832  		return true
   833  	}
   834  	// match: (And16 (Const16 [0]) _)
   835  	// cond:
   836  	// result: (Const16 [0])
   837  	for {
   838  		v_0 := v.Args[0]
   839  		if v_0.Op != OpConst16 {
   840  			break
   841  		}
   842  		if v_0.AuxInt != 0 {
   843  			break
   844  		}
   845  		v.reset(OpConst16)
   846  		v.AuxInt = 0
   847  		return true
   848  	}
   849  	// match: (And16 x (And16 x y))
   850  	// cond:
   851  	// result: (And16 x y)
   852  	for {
   853  		x := v.Args[0]
   854  		v_1 := v.Args[1]
   855  		if v_1.Op != OpAnd16 {
   856  			break
   857  		}
   858  		if x != v_1.Args[0] {
   859  			break
   860  		}
   861  		y := v_1.Args[1]
   862  		v.reset(OpAnd16)
   863  		v.AddArg(x)
   864  		v.AddArg(y)
   865  		return true
   866  	}
   867  	// match: (And16 x (And16 y x))
   868  	// cond:
   869  	// result: (And16 x y)
   870  	for {
   871  		x := v.Args[0]
   872  		v_1 := v.Args[1]
   873  		if v_1.Op != OpAnd16 {
   874  			break
   875  		}
   876  		y := v_1.Args[0]
   877  		if x != v_1.Args[1] {
   878  			break
   879  		}
   880  		v.reset(OpAnd16)
   881  		v.AddArg(x)
   882  		v.AddArg(y)
   883  		return true
   884  	}
   885  	// match: (And16 (And16 x y) x)
   886  	// cond:
   887  	// result: (And16 x y)
   888  	for {
   889  		v_0 := v.Args[0]
   890  		if v_0.Op != OpAnd16 {
   891  			break
   892  		}
   893  		x := v_0.Args[0]
   894  		y := v_0.Args[1]
   895  		if x != v.Args[1] {
   896  			break
   897  		}
   898  		v.reset(OpAnd16)
   899  		v.AddArg(x)
   900  		v.AddArg(y)
   901  		return true
   902  	}
   903  	// match: (And16 (And16 x y) y)
   904  	// cond:
   905  	// result: (And16 x y)
   906  	for {
   907  		v_0 := v.Args[0]
   908  		if v_0.Op != OpAnd16 {
   909  			break
   910  		}
   911  		x := v_0.Args[0]
   912  		y := v_0.Args[1]
   913  		if y != v.Args[1] {
   914  			break
   915  		}
   916  		v.reset(OpAnd16)
   917  		v.AddArg(x)
   918  		v.AddArg(y)
   919  		return true
   920  	}
   921  	return false
   922  }
   923  func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool {
   924  	b := v.Block
   925  	_ = b
   926  	// match: (And32 x (Const32 <t> [c]))
   927  	// cond: x.Op != OpConst32
   928  	// result: (And32 (Const32 <t> [c]) x)
   929  	for {
   930  		x := v.Args[0]
   931  		v_1 := v.Args[1]
   932  		if v_1.Op != OpConst32 {
   933  			break
   934  		}
   935  		t := v_1.Type
   936  		c := v_1.AuxInt
   937  		if !(x.Op != OpConst32) {
   938  			break
   939  		}
   940  		v.reset(OpAnd32)
   941  		v0 := b.NewValue0(v.Pos, OpConst32, t)
   942  		v0.AuxInt = c
   943  		v.AddArg(v0)
   944  		v.AddArg(x)
   945  		return true
   946  	}
   947  	// match: (And32 x x)
   948  	// cond:
   949  	// result: x
   950  	for {
   951  		x := v.Args[0]
   952  		if x != v.Args[1] {
   953  			break
   954  		}
   955  		v.reset(OpCopy)
   956  		v.Type = x.Type
   957  		v.AddArg(x)
   958  		return true
   959  	}
   960  	// match: (And32 (Const32 [-1]) x)
   961  	// cond:
   962  	// result: x
   963  	for {
   964  		v_0 := v.Args[0]
   965  		if v_0.Op != OpConst32 {
   966  			break
   967  		}
   968  		if v_0.AuxInt != -1 {
   969  			break
   970  		}
   971  		x := v.Args[1]
   972  		v.reset(OpCopy)
   973  		v.Type = x.Type
   974  		v.AddArg(x)
   975  		return true
   976  	}
   977  	// match: (And32 (Const32 [0]) _)
   978  	// cond:
   979  	// result: (Const32 [0])
   980  	for {
   981  		v_0 := v.Args[0]
   982  		if v_0.Op != OpConst32 {
   983  			break
   984  		}
   985  		if v_0.AuxInt != 0 {
   986  			break
   987  		}
   988  		v.reset(OpConst32)
   989  		v.AuxInt = 0
   990  		return true
   991  	}
   992  	// match: (And32 x (And32 x y))
   993  	// cond:
   994  	// result: (And32 x y)
   995  	for {
   996  		x := v.Args[0]
   997  		v_1 := v.Args[1]
   998  		if v_1.Op != OpAnd32 {
   999  			break
  1000  		}
  1001  		if x != v_1.Args[0] {
  1002  			break
  1003  		}
  1004  		y := v_1.Args[1]
  1005  		v.reset(OpAnd32)
  1006  		v.AddArg(x)
  1007  		v.AddArg(y)
  1008  		return true
  1009  	}
  1010  	// match: (And32 x (And32 y x))
  1011  	// cond:
  1012  	// result: (And32 x y)
  1013  	for {
  1014  		x := v.Args[0]
  1015  		v_1 := v.Args[1]
  1016  		if v_1.Op != OpAnd32 {
  1017  			break
  1018  		}
  1019  		y := v_1.Args[0]
  1020  		if x != v_1.Args[1] {
  1021  			break
  1022  		}
  1023  		v.reset(OpAnd32)
  1024  		v.AddArg(x)
  1025  		v.AddArg(y)
  1026  		return true
  1027  	}
  1028  	// match: (And32 (And32 x y) x)
  1029  	// cond:
  1030  	// result: (And32 x y)
  1031  	for {
  1032  		v_0 := v.Args[0]
  1033  		if v_0.Op != OpAnd32 {
  1034  			break
  1035  		}
  1036  		x := v_0.Args[0]
  1037  		y := v_0.Args[1]
  1038  		if x != v.Args[1] {
  1039  			break
  1040  		}
  1041  		v.reset(OpAnd32)
  1042  		v.AddArg(x)
  1043  		v.AddArg(y)
  1044  		return true
  1045  	}
  1046  	// match: (And32 (And32 x y) y)
  1047  	// cond:
  1048  	// result: (And32 x y)
  1049  	for {
  1050  		v_0 := v.Args[0]
  1051  		if v_0.Op != OpAnd32 {
  1052  			break
  1053  		}
  1054  		x := v_0.Args[0]
  1055  		y := v_0.Args[1]
  1056  		if y != v.Args[1] {
  1057  			break
  1058  		}
  1059  		v.reset(OpAnd32)
  1060  		v.AddArg(x)
  1061  		v.AddArg(y)
  1062  		return true
  1063  	}
  1064  	return false
  1065  }
  1066  func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool {
  1067  	b := v.Block
  1068  	_ = b
  1069  	// match: (And64 x (Const64 <t> [c]))
  1070  	// cond: x.Op != OpConst64
  1071  	// result: (And64 (Const64 <t> [c]) x)
  1072  	for {
  1073  		x := v.Args[0]
  1074  		v_1 := v.Args[1]
  1075  		if v_1.Op != OpConst64 {
  1076  			break
  1077  		}
  1078  		t := v_1.Type
  1079  		c := v_1.AuxInt
  1080  		if !(x.Op != OpConst64) {
  1081  			break
  1082  		}
  1083  		v.reset(OpAnd64)
  1084  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  1085  		v0.AuxInt = c
  1086  		v.AddArg(v0)
  1087  		v.AddArg(x)
  1088  		return true
  1089  	}
  1090  	// match: (And64 x x)
  1091  	// cond:
  1092  	// result: x
  1093  	for {
  1094  		x := v.Args[0]
  1095  		if x != v.Args[1] {
  1096  			break
  1097  		}
  1098  		v.reset(OpCopy)
  1099  		v.Type = x.Type
  1100  		v.AddArg(x)
  1101  		return true
  1102  	}
  1103  	// match: (And64 (Const64 [-1]) x)
  1104  	// cond:
  1105  	// result: x
  1106  	for {
  1107  		v_0 := v.Args[0]
  1108  		if v_0.Op != OpConst64 {
  1109  			break
  1110  		}
  1111  		if v_0.AuxInt != -1 {
  1112  			break
  1113  		}
  1114  		x := v.Args[1]
  1115  		v.reset(OpCopy)
  1116  		v.Type = x.Type
  1117  		v.AddArg(x)
  1118  		return true
  1119  	}
  1120  	// match: (And64 (Const64 [0]) _)
  1121  	// cond:
  1122  	// result: (Const64 [0])
  1123  	for {
  1124  		v_0 := v.Args[0]
  1125  		if v_0.Op != OpConst64 {
  1126  			break
  1127  		}
  1128  		if v_0.AuxInt != 0 {
  1129  			break
  1130  		}
  1131  		v.reset(OpConst64)
  1132  		v.AuxInt = 0
  1133  		return true
  1134  	}
  1135  	// match: (And64 x (And64 x y))
  1136  	// cond:
  1137  	// result: (And64 x y)
  1138  	for {
  1139  		x := v.Args[0]
  1140  		v_1 := v.Args[1]
  1141  		if v_1.Op != OpAnd64 {
  1142  			break
  1143  		}
  1144  		if x != v_1.Args[0] {
  1145  			break
  1146  		}
  1147  		y := v_1.Args[1]
  1148  		v.reset(OpAnd64)
  1149  		v.AddArg(x)
  1150  		v.AddArg(y)
  1151  		return true
  1152  	}
  1153  	// match: (And64 x (And64 y x))
  1154  	// cond:
  1155  	// result: (And64 x y)
  1156  	for {
  1157  		x := v.Args[0]
  1158  		v_1 := v.Args[1]
  1159  		if v_1.Op != OpAnd64 {
  1160  			break
  1161  		}
  1162  		y := v_1.Args[0]
  1163  		if x != v_1.Args[1] {
  1164  			break
  1165  		}
  1166  		v.reset(OpAnd64)
  1167  		v.AddArg(x)
  1168  		v.AddArg(y)
  1169  		return true
  1170  	}
  1171  	// match: (And64 (And64 x y) x)
  1172  	// cond:
  1173  	// result: (And64 x y)
  1174  	for {
  1175  		v_0 := v.Args[0]
  1176  		if v_0.Op != OpAnd64 {
  1177  			break
  1178  		}
  1179  		x := v_0.Args[0]
  1180  		y := v_0.Args[1]
  1181  		if x != v.Args[1] {
  1182  			break
  1183  		}
  1184  		v.reset(OpAnd64)
  1185  		v.AddArg(x)
  1186  		v.AddArg(y)
  1187  		return true
  1188  	}
  1189  	// match: (And64 (And64 x y) y)
  1190  	// cond:
  1191  	// result: (And64 x y)
  1192  	for {
  1193  		v_0 := v.Args[0]
  1194  		if v_0.Op != OpAnd64 {
  1195  			break
  1196  		}
  1197  		x := v_0.Args[0]
  1198  		y := v_0.Args[1]
  1199  		if y != v.Args[1] {
  1200  			break
  1201  		}
  1202  		v.reset(OpAnd64)
  1203  		v.AddArg(x)
  1204  		v.AddArg(y)
  1205  		return true
  1206  	}
  1207  	// match: (And64 <t> (Const64 [y]) x)
  1208  	// cond: nlz(y) + nto(y) == 64 && nto(y) >= 32
  1209  	// result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)]))
  1210  	for {
  1211  		t := v.Type
  1212  		v_0 := v.Args[0]
  1213  		if v_0.Op != OpConst64 {
  1214  			break
  1215  		}
  1216  		y := v_0.AuxInt
  1217  		x := v.Args[1]
  1218  		if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) {
  1219  			break
  1220  		}
  1221  		v.reset(OpRsh64Ux64)
  1222  		v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  1223  		v0.AddArg(x)
  1224  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  1225  		v1.AuxInt = nlz(y)
  1226  		v0.AddArg(v1)
  1227  		v.AddArg(v0)
  1228  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  1229  		v2.AuxInt = nlz(y)
  1230  		v.AddArg(v2)
  1231  		return true
  1232  	}
  1233  	// match: (And64 <t> (Const64 [y]) x)
  1234  	// cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32
  1235  	// result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)]))
  1236  	for {
  1237  		t := v.Type
  1238  		v_0 := v.Args[0]
  1239  		if v_0.Op != OpConst64 {
  1240  			break
  1241  		}
  1242  		y := v_0.AuxInt
  1243  		x := v.Args[1]
  1244  		if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) {
  1245  			break
  1246  		}
  1247  		v.reset(OpLsh64x64)
  1248  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  1249  		v0.AddArg(x)
  1250  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  1251  		v1.AuxInt = ntz(y)
  1252  		v0.AddArg(v1)
  1253  		v.AddArg(v0)
  1254  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  1255  		v2.AuxInt = ntz(y)
  1256  		v.AddArg(v2)
  1257  		return true
  1258  	}
  1259  	return false
  1260  }
  1261  func rewriteValuegeneric_OpAnd8(v *Value, config *Config) bool {
  1262  	b := v.Block
  1263  	_ = b
  1264  	// match: (And8  x (Const8  <t> [c]))
  1265  	// cond: x.Op != OpConst8
  1266  	// result: (And8  (Const8  <t> [c]) x)
  1267  	for {
  1268  		x := v.Args[0]
  1269  		v_1 := v.Args[1]
  1270  		if v_1.Op != OpConst8 {
  1271  			break
  1272  		}
  1273  		t := v_1.Type
  1274  		c := v_1.AuxInt
  1275  		if !(x.Op != OpConst8) {
  1276  			break
  1277  		}
  1278  		v.reset(OpAnd8)
  1279  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  1280  		v0.AuxInt = c
  1281  		v.AddArg(v0)
  1282  		v.AddArg(x)
  1283  		return true
  1284  	}
  1285  	// match: (And8  x x)
  1286  	// cond:
  1287  	// result: x
  1288  	for {
  1289  		x := v.Args[0]
  1290  		if x != v.Args[1] {
  1291  			break
  1292  		}
  1293  		v.reset(OpCopy)
  1294  		v.Type = x.Type
  1295  		v.AddArg(x)
  1296  		return true
  1297  	}
  1298  	// match: (And8  (Const8  [-1]) x)
  1299  	// cond:
  1300  	// result: x
  1301  	for {
  1302  		v_0 := v.Args[0]
  1303  		if v_0.Op != OpConst8 {
  1304  			break
  1305  		}
  1306  		if v_0.AuxInt != -1 {
  1307  			break
  1308  		}
  1309  		x := v.Args[1]
  1310  		v.reset(OpCopy)
  1311  		v.Type = x.Type
  1312  		v.AddArg(x)
  1313  		return true
  1314  	}
  1315  	// match: (And8  (Const8  [0]) _)
  1316  	// cond:
  1317  	// result: (Const8  [0])
  1318  	for {
  1319  		v_0 := v.Args[0]
  1320  		if v_0.Op != OpConst8 {
  1321  			break
  1322  		}
  1323  		if v_0.AuxInt != 0 {
  1324  			break
  1325  		}
  1326  		v.reset(OpConst8)
  1327  		v.AuxInt = 0
  1328  		return true
  1329  	}
  1330  	// match: (And8  x (And8  x y))
  1331  	// cond:
  1332  	// result: (And8  x y)
  1333  	for {
  1334  		x := v.Args[0]
  1335  		v_1 := v.Args[1]
  1336  		if v_1.Op != OpAnd8 {
  1337  			break
  1338  		}
  1339  		if x != v_1.Args[0] {
  1340  			break
  1341  		}
  1342  		y := v_1.Args[1]
  1343  		v.reset(OpAnd8)
  1344  		v.AddArg(x)
  1345  		v.AddArg(y)
  1346  		return true
  1347  	}
  1348  	// match: (And8  x (And8  y x))
  1349  	// cond:
  1350  	// result: (And8  x y)
  1351  	for {
  1352  		x := v.Args[0]
  1353  		v_1 := v.Args[1]
  1354  		if v_1.Op != OpAnd8 {
  1355  			break
  1356  		}
  1357  		y := v_1.Args[0]
  1358  		if x != v_1.Args[1] {
  1359  			break
  1360  		}
  1361  		v.reset(OpAnd8)
  1362  		v.AddArg(x)
  1363  		v.AddArg(y)
  1364  		return true
  1365  	}
  1366  	// match: (And8  (And8  x y) x)
  1367  	// cond:
  1368  	// result: (And8  x y)
  1369  	for {
  1370  		v_0 := v.Args[0]
  1371  		if v_0.Op != OpAnd8 {
  1372  			break
  1373  		}
  1374  		x := v_0.Args[0]
  1375  		y := v_0.Args[1]
  1376  		if x != v.Args[1] {
  1377  			break
  1378  		}
  1379  		v.reset(OpAnd8)
  1380  		v.AddArg(x)
  1381  		v.AddArg(y)
  1382  		return true
  1383  	}
  1384  	// match: (And8  (And8  x y) y)
  1385  	// cond:
  1386  	// result: (And8  x y)
  1387  	for {
  1388  		v_0 := v.Args[0]
  1389  		if v_0.Op != OpAnd8 {
  1390  			break
  1391  		}
  1392  		x := v_0.Args[0]
  1393  		y := v_0.Args[1]
  1394  		if y != v.Args[1] {
  1395  			break
  1396  		}
  1397  		v.reset(OpAnd8)
  1398  		v.AddArg(x)
  1399  		v.AddArg(y)
  1400  		return true
  1401  	}
  1402  	return false
  1403  }
  1404  func rewriteValuegeneric_OpArg(v *Value, config *Config) bool {
  1405  	b := v.Block
  1406  	_ = b
  1407  	// match: (Arg {n} [off])
  1408  	// cond: v.Type.IsString()
  1409  	// result: (StringMake     (Arg <config.fe.TypeBytePtr()> {n} [off])     (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]))
  1410  	for {
  1411  		off := v.AuxInt
  1412  		n := v.Aux
  1413  		if !(v.Type.IsString()) {
  1414  			break
  1415  		}
  1416  		v.reset(OpStringMake)
  1417  		v0 := b.NewValue0(v.Pos, OpArg, config.fe.TypeBytePtr())
  1418  		v0.AuxInt = off
  1419  		v0.Aux = n
  1420  		v.AddArg(v0)
  1421  		v1 := b.NewValue0(v.Pos, OpArg, config.fe.TypeInt())
  1422  		v1.AuxInt = off + config.PtrSize
  1423  		v1.Aux = n
  1424  		v.AddArg(v1)
  1425  		return true
  1426  	}
  1427  	// match: (Arg {n} [off])
  1428  	// cond: v.Type.IsSlice()
  1429  	// result: (SliceMake     (Arg <v.Type.ElemType().PtrTo()> {n} [off])     (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])     (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize]))
  1430  	for {
  1431  		off := v.AuxInt
  1432  		n := v.Aux
  1433  		if !(v.Type.IsSlice()) {
  1434  			break
  1435  		}
  1436  		v.reset(OpSliceMake)
  1437  		v0 := b.NewValue0(v.Pos, OpArg, v.Type.ElemType().PtrTo())
  1438  		v0.AuxInt = off
  1439  		v0.Aux = n
  1440  		v.AddArg(v0)
  1441  		v1 := b.NewValue0(v.Pos, OpArg, config.fe.TypeInt())
  1442  		v1.AuxInt = off + config.PtrSize
  1443  		v1.Aux = n
  1444  		v.AddArg(v1)
  1445  		v2 := b.NewValue0(v.Pos, OpArg, config.fe.TypeInt())
  1446  		v2.AuxInt = off + 2*config.PtrSize
  1447  		v2.Aux = n
  1448  		v.AddArg(v2)
  1449  		return true
  1450  	}
  1451  	// match: (Arg {n} [off])
  1452  	// cond: v.Type.IsInterface()
  1453  	// result: (IMake     (Arg <config.fe.TypeBytePtr()> {n} [off])     (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize]))
  1454  	for {
  1455  		off := v.AuxInt
  1456  		n := v.Aux
  1457  		if !(v.Type.IsInterface()) {
  1458  			break
  1459  		}
  1460  		v.reset(OpIMake)
  1461  		v0 := b.NewValue0(v.Pos, OpArg, config.fe.TypeBytePtr())
  1462  		v0.AuxInt = off
  1463  		v0.Aux = n
  1464  		v.AddArg(v0)
  1465  		v1 := b.NewValue0(v.Pos, OpArg, config.fe.TypeBytePtr())
  1466  		v1.AuxInt = off + config.PtrSize
  1467  		v1.Aux = n
  1468  		v.AddArg(v1)
  1469  		return true
  1470  	}
  1471  	// match: (Arg {n} [off])
  1472  	// cond: v.Type.IsComplex() && v.Type.Size() == 16
  1473  	// result: (ComplexMake     (Arg <config.fe.TypeFloat64()> {n} [off])     (Arg <config.fe.TypeFloat64()> {n} [off+8]))
  1474  	for {
  1475  		off := v.AuxInt
  1476  		n := v.Aux
  1477  		if !(v.Type.IsComplex() && v.Type.Size() == 16) {
  1478  			break
  1479  		}
  1480  		v.reset(OpComplexMake)
  1481  		v0 := b.NewValue0(v.Pos, OpArg, config.fe.TypeFloat64())
  1482  		v0.AuxInt = off
  1483  		v0.Aux = n
  1484  		v.AddArg(v0)
  1485  		v1 := b.NewValue0(v.Pos, OpArg, config.fe.TypeFloat64())
  1486  		v1.AuxInt = off + 8
  1487  		v1.Aux = n
  1488  		v.AddArg(v1)
  1489  		return true
  1490  	}
  1491  	// match: (Arg {n} [off])
  1492  	// cond: v.Type.IsComplex() && v.Type.Size() == 8
  1493  	// result: (ComplexMake     (Arg <config.fe.TypeFloat32()> {n} [off])     (Arg <config.fe.TypeFloat32()> {n} [off+4]))
  1494  	for {
  1495  		off := v.AuxInt
  1496  		n := v.Aux
  1497  		if !(v.Type.IsComplex() && v.Type.Size() == 8) {
  1498  			break
  1499  		}
  1500  		v.reset(OpComplexMake)
  1501  		v0 := b.NewValue0(v.Pos, OpArg, config.fe.TypeFloat32())
  1502  		v0.AuxInt = off
  1503  		v0.Aux = n
  1504  		v.AddArg(v0)
  1505  		v1 := b.NewValue0(v.Pos, OpArg, config.fe.TypeFloat32())
  1506  		v1.AuxInt = off + 4
  1507  		v1.Aux = n
  1508  		v.AddArg(v1)
  1509  		return true
  1510  	}
  1511  	// match: (Arg <t>)
  1512  	// cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)
  1513  	// result: (StructMake0)
  1514  	for {
  1515  		t := v.Type
  1516  		if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) {
  1517  			break
  1518  		}
  1519  		v.reset(OpStructMake0)
  1520  		return true
  1521  	}
  1522  	// match: (Arg <t> {n} [off])
  1523  	// cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)
  1524  	// result: (StructMake1     (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]))
  1525  	for {
  1526  		t := v.Type
  1527  		off := v.AuxInt
  1528  		n := v.Aux
  1529  		if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) {
  1530  			break
  1531  		}
  1532  		v.reset(OpStructMake1)
  1533  		v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
  1534  		v0.AuxInt = off + t.FieldOff(0)
  1535  		v0.Aux = n
  1536  		v.AddArg(v0)
  1537  		return true
  1538  	}
  1539  	// match: (Arg <t> {n} [off])
  1540  	// cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)
  1541  	// result: (StructMake2     (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])     (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]))
  1542  	for {
  1543  		t := v.Type
  1544  		off := v.AuxInt
  1545  		n := v.Aux
  1546  		if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) {
  1547  			break
  1548  		}
  1549  		v.reset(OpStructMake2)
  1550  		v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
  1551  		v0.AuxInt = off + t.FieldOff(0)
  1552  		v0.Aux = n
  1553  		v.AddArg(v0)
  1554  		v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1))
  1555  		v1.AuxInt = off + t.FieldOff(1)
  1556  		v1.Aux = n
  1557  		v.AddArg(v1)
  1558  		return true
  1559  	}
  1560  	// match: (Arg <t> {n} [off])
  1561  	// cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)
  1562  	// result: (StructMake3     (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])     (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])     (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]))
  1563  	for {
  1564  		t := v.Type
  1565  		off := v.AuxInt
  1566  		n := v.Aux
  1567  		if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) {
  1568  			break
  1569  		}
  1570  		v.reset(OpStructMake3)
  1571  		v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
  1572  		v0.AuxInt = off + t.FieldOff(0)
  1573  		v0.Aux = n
  1574  		v.AddArg(v0)
  1575  		v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1))
  1576  		v1.AuxInt = off + t.FieldOff(1)
  1577  		v1.Aux = n
  1578  		v.AddArg(v1)
  1579  		v2 := b.NewValue0(v.Pos, OpArg, t.FieldType(2))
  1580  		v2.AuxInt = off + t.FieldOff(2)
  1581  		v2.Aux = n
  1582  		v.AddArg(v2)
  1583  		return true
  1584  	}
  1585  	// match: (Arg <t> {n} [off])
  1586  	// cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)
  1587  	// result: (StructMake4     (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])     (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])     (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])     (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)]))
  1588  	for {
  1589  		t := v.Type
  1590  		off := v.AuxInt
  1591  		n := v.Aux
  1592  		if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) {
  1593  			break
  1594  		}
  1595  		v.reset(OpStructMake4)
  1596  		v0 := b.NewValue0(v.Pos, OpArg, t.FieldType(0))
  1597  		v0.AuxInt = off + t.FieldOff(0)
  1598  		v0.Aux = n
  1599  		v.AddArg(v0)
  1600  		v1 := b.NewValue0(v.Pos, OpArg, t.FieldType(1))
  1601  		v1.AuxInt = off + t.FieldOff(1)
  1602  		v1.Aux = n
  1603  		v.AddArg(v1)
  1604  		v2 := b.NewValue0(v.Pos, OpArg, t.FieldType(2))
  1605  		v2.AuxInt = off + t.FieldOff(2)
  1606  		v2.Aux = n
  1607  		v.AddArg(v2)
  1608  		v3 := b.NewValue0(v.Pos, OpArg, t.FieldType(3))
  1609  		v3.AuxInt = off + t.FieldOff(3)
  1610  		v3.Aux = n
  1611  		v.AddArg(v3)
  1612  		return true
  1613  	}
  1614  	// match: (Arg <t>)
  1615  	// cond: t.IsArray() && t.NumElem() == 0
  1616  	// result: (ArrayMake0)
  1617  	for {
  1618  		t := v.Type
  1619  		if !(t.IsArray() && t.NumElem() == 0) {
  1620  			break
  1621  		}
  1622  		v.reset(OpArrayMake0)
  1623  		return true
  1624  	}
  1625  	// match: (Arg <t> {n} [off])
  1626  	// cond: t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)
  1627  	// result: (ArrayMake1 (Arg <t.ElemType()> {n} [off]))
  1628  	for {
  1629  		t := v.Type
  1630  		off := v.AuxInt
  1631  		n := v.Aux
  1632  		if !(t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)) {
  1633  			break
  1634  		}
  1635  		v.reset(OpArrayMake1)
  1636  		v0 := b.NewValue0(v.Pos, OpArg, t.ElemType())
  1637  		v0.AuxInt = off
  1638  		v0.Aux = n
  1639  		v.AddArg(v0)
  1640  		return true
  1641  	}
  1642  	return false
  1643  }
  1644  func rewriteValuegeneric_OpArraySelect(v *Value, config *Config) bool {
  1645  	b := v.Block
  1646  	_ = b
  1647  	// match: (ArraySelect (ArrayMake1 x))
  1648  	// cond:
  1649  	// result: x
  1650  	for {
  1651  		v_0 := v.Args[0]
  1652  		if v_0.Op != OpArrayMake1 {
  1653  			break
  1654  		}
  1655  		x := v_0.Args[0]
  1656  		v.reset(OpCopy)
  1657  		v.Type = x.Type
  1658  		v.AddArg(x)
  1659  		return true
  1660  	}
  1661  	// match: (ArraySelect [0] (Load ptr mem))
  1662  	// cond:
  1663  	// result: (Load ptr mem)
  1664  	for {
  1665  		if v.AuxInt != 0 {
  1666  			break
  1667  		}
  1668  		v_0 := v.Args[0]
  1669  		if v_0.Op != OpLoad {
  1670  			break
  1671  		}
  1672  		ptr := v_0.Args[0]
  1673  		mem := v_0.Args[1]
  1674  		v.reset(OpLoad)
  1675  		v.AddArg(ptr)
  1676  		v.AddArg(mem)
  1677  		return true
  1678  	}
  1679  	// match: (ArraySelect [0] x:(IData _))
  1680  	// cond:
  1681  	// result: x
  1682  	for {
  1683  		if v.AuxInt != 0 {
  1684  			break
  1685  		}
  1686  		x := v.Args[0]
  1687  		if x.Op != OpIData {
  1688  			break
  1689  		}
  1690  		v.reset(OpCopy)
  1691  		v.Type = x.Type
  1692  		v.AddArg(x)
  1693  		return true
  1694  	}
  1695  	return false
  1696  }
  1697  func rewriteValuegeneric_OpCom16(v *Value, config *Config) bool {
  1698  	b := v.Block
  1699  	_ = b
  1700  	// match: (Com16 (Com16 x))
  1701  	// cond:
  1702  	// result: x
  1703  	for {
  1704  		v_0 := v.Args[0]
  1705  		if v_0.Op != OpCom16 {
  1706  			break
  1707  		}
  1708  		x := v_0.Args[0]
  1709  		v.reset(OpCopy)
  1710  		v.Type = x.Type
  1711  		v.AddArg(x)
  1712  		return true
  1713  	}
  1714  	return false
  1715  }
  1716  func rewriteValuegeneric_OpCom32(v *Value, config *Config) bool {
  1717  	b := v.Block
  1718  	_ = b
  1719  	// match: (Com32 (Com32 x))
  1720  	// cond:
  1721  	// result: x
  1722  	for {
  1723  		v_0 := v.Args[0]
  1724  		if v_0.Op != OpCom32 {
  1725  			break
  1726  		}
  1727  		x := v_0.Args[0]
  1728  		v.reset(OpCopy)
  1729  		v.Type = x.Type
  1730  		v.AddArg(x)
  1731  		return true
  1732  	}
  1733  	return false
  1734  }
  1735  func rewriteValuegeneric_OpCom64(v *Value, config *Config) bool {
  1736  	b := v.Block
  1737  	_ = b
  1738  	// match: (Com64 (Com64 x))
  1739  	// cond:
  1740  	// result: x
  1741  	for {
  1742  		v_0 := v.Args[0]
  1743  		if v_0.Op != OpCom64 {
  1744  			break
  1745  		}
  1746  		x := v_0.Args[0]
  1747  		v.reset(OpCopy)
  1748  		v.Type = x.Type
  1749  		v.AddArg(x)
  1750  		return true
  1751  	}
  1752  	return false
  1753  }
  1754  func rewriteValuegeneric_OpCom8(v *Value, config *Config) bool {
  1755  	b := v.Block
  1756  	_ = b
  1757  	// match: (Com8  (Com8  x))
  1758  	// cond:
  1759  	// result: x
  1760  	for {
  1761  		v_0 := v.Args[0]
  1762  		if v_0.Op != OpCom8 {
  1763  			break
  1764  		}
  1765  		x := v_0.Args[0]
  1766  		v.reset(OpCopy)
  1767  		v.Type = x.Type
  1768  		v.AddArg(x)
  1769  		return true
  1770  	}
  1771  	return false
  1772  }
  1773  func rewriteValuegeneric_OpConstInterface(v *Value, config *Config) bool {
  1774  	b := v.Block
  1775  	_ = b
  1776  	// match: (ConstInterface)
  1777  	// cond:
  1778  	// result: (IMake     (ConstNil <config.fe.TypeBytePtr()>)     (ConstNil <config.fe.TypeBytePtr()>))
  1779  	for {
  1780  		v.reset(OpIMake)
  1781  		v0 := b.NewValue0(v.Pos, OpConstNil, config.fe.TypeBytePtr())
  1782  		v.AddArg(v0)
  1783  		v1 := b.NewValue0(v.Pos, OpConstNil, config.fe.TypeBytePtr())
  1784  		v.AddArg(v1)
  1785  		return true
  1786  	}
  1787  }
  1788  func rewriteValuegeneric_OpConstSlice(v *Value, config *Config) bool {
  1789  	b := v.Block
  1790  	_ = b
  1791  	// match: (ConstSlice)
  1792  	// cond: config.PtrSize == 4
  1793  	// result: (SliceMake     (ConstNil <v.Type.ElemType().PtrTo()>)     (Const32 <config.fe.TypeInt()> [0])     (Const32 <config.fe.TypeInt()> [0]))
  1794  	for {
  1795  		if !(config.PtrSize == 4) {
  1796  			break
  1797  		}
  1798  		v.reset(OpSliceMake)
  1799  		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.ElemType().PtrTo())
  1800  		v.AddArg(v0)
  1801  		v1 := b.NewValue0(v.Pos, OpConst32, config.fe.TypeInt())
  1802  		v1.AuxInt = 0
  1803  		v.AddArg(v1)
  1804  		v2 := b.NewValue0(v.Pos, OpConst32, config.fe.TypeInt())
  1805  		v2.AuxInt = 0
  1806  		v.AddArg(v2)
  1807  		return true
  1808  	}
  1809  	// match: (ConstSlice)
  1810  	// cond: config.PtrSize == 8
  1811  	// result: (SliceMake     (ConstNil <v.Type.ElemType().PtrTo()>)     (Const64 <config.fe.TypeInt()> [0])     (Const64 <config.fe.TypeInt()> [0]))
  1812  	for {
  1813  		if !(config.PtrSize == 8) {
  1814  			break
  1815  		}
  1816  		v.reset(OpSliceMake)
  1817  		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.ElemType().PtrTo())
  1818  		v.AddArg(v0)
  1819  		v1 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeInt())
  1820  		v1.AuxInt = 0
  1821  		v.AddArg(v1)
  1822  		v2 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeInt())
  1823  		v2.AuxInt = 0
  1824  		v.AddArg(v2)
  1825  		return true
  1826  	}
  1827  	return false
  1828  }
  1829  func rewriteValuegeneric_OpConstString(v *Value, config *Config) bool {
  1830  	b := v.Block
  1831  	_ = b
  1832  	// match: (ConstString {s})
  1833  	// cond: config.PtrSize == 4 && s.(string) == ""
  1834  	// result: (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0]))
  1835  	for {
  1836  		s := v.Aux
  1837  		if !(config.PtrSize == 4 && s.(string) == "") {
  1838  			break
  1839  		}
  1840  		v.reset(OpStringMake)
  1841  		v0 := b.NewValue0(v.Pos, OpConstNil, config.fe.TypeBytePtr())
  1842  		v.AddArg(v0)
  1843  		v1 := b.NewValue0(v.Pos, OpConst32, config.fe.TypeInt())
  1844  		v1.AuxInt = 0
  1845  		v.AddArg(v1)
  1846  		return true
  1847  	}
  1848  	// match: (ConstString {s})
  1849  	// cond: config.PtrSize == 8 && s.(string) == ""
  1850  	// result: (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0]))
  1851  	for {
  1852  		s := v.Aux
  1853  		if !(config.PtrSize == 8 && s.(string) == "") {
  1854  			break
  1855  		}
  1856  		v.reset(OpStringMake)
  1857  		v0 := b.NewValue0(v.Pos, OpConstNil, config.fe.TypeBytePtr())
  1858  		v.AddArg(v0)
  1859  		v1 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeInt())
  1860  		v1.AuxInt = 0
  1861  		v.AddArg(v1)
  1862  		return true
  1863  	}
  1864  	// match: (ConstString {s})
  1865  	// cond: config.PtrSize == 4 && s.(string) != ""
  1866  	// result: (StringMake     (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))}       (SB))     (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))]))
  1867  	for {
  1868  		s := v.Aux
  1869  		if !(config.PtrSize == 4 && s.(string) != "") {
  1870  			break
  1871  		}
  1872  		v.reset(OpStringMake)
  1873  		v0 := b.NewValue0(v.Pos, OpAddr, config.fe.TypeBytePtr())
  1874  		v0.Aux = config.fe.StringData(s.(string))
  1875  		v1 := b.NewValue0(v.Pos, OpSB, config.fe.TypeUintptr())
  1876  		v0.AddArg(v1)
  1877  		v.AddArg(v0)
  1878  		v2 := b.NewValue0(v.Pos, OpConst32, config.fe.TypeInt())
  1879  		v2.AuxInt = int64(len(s.(string)))
  1880  		v.AddArg(v2)
  1881  		return true
  1882  	}
  1883  	// match: (ConstString {s})
  1884  	// cond: config.PtrSize == 8 && s.(string) != ""
  1885  	// result: (StringMake     (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))}       (SB))     (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))]))
  1886  	for {
  1887  		s := v.Aux
  1888  		if !(config.PtrSize == 8 && s.(string) != "") {
  1889  			break
  1890  		}
  1891  		v.reset(OpStringMake)
  1892  		v0 := b.NewValue0(v.Pos, OpAddr, config.fe.TypeBytePtr())
  1893  		v0.Aux = config.fe.StringData(s.(string))
  1894  		v1 := b.NewValue0(v.Pos, OpSB, config.fe.TypeUintptr())
  1895  		v0.AddArg(v1)
  1896  		v.AddArg(v0)
  1897  		v2 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeInt())
  1898  		v2.AuxInt = int64(len(s.(string)))
  1899  		v.AddArg(v2)
  1900  		return true
  1901  	}
  1902  	return false
  1903  }
  1904  func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool {
  1905  	b := v.Block
  1906  	_ = b
  1907  	// match: (Convert (Add64 (Convert ptr mem) off) mem)
  1908  	// cond:
  1909  	// result: (Add64 ptr off)
  1910  	for {
  1911  		v_0 := v.Args[0]
  1912  		if v_0.Op != OpAdd64 {
  1913  			break
  1914  		}
  1915  		v_0_0 := v_0.Args[0]
  1916  		if v_0_0.Op != OpConvert {
  1917  			break
  1918  		}
  1919  		ptr := v_0_0.Args[0]
  1920  		mem := v_0_0.Args[1]
  1921  		off := v_0.Args[1]
  1922  		if mem != v.Args[1] {
  1923  			break
  1924  		}
  1925  		v.reset(OpAdd64)
  1926  		v.AddArg(ptr)
  1927  		v.AddArg(off)
  1928  		return true
  1929  	}
  1930  	// match: (Convert (Add64 off (Convert ptr mem)) mem)
  1931  	// cond:
  1932  	// result: (Add64 ptr off)
  1933  	for {
  1934  		v_0 := v.Args[0]
  1935  		if v_0.Op != OpAdd64 {
  1936  			break
  1937  		}
  1938  		off := v_0.Args[0]
  1939  		v_0_1 := v_0.Args[1]
  1940  		if v_0_1.Op != OpConvert {
  1941  			break
  1942  		}
  1943  		ptr := v_0_1.Args[0]
  1944  		mem := v_0_1.Args[1]
  1945  		if mem != v.Args[1] {
  1946  			break
  1947  		}
  1948  		v.reset(OpAdd64)
  1949  		v.AddArg(ptr)
  1950  		v.AddArg(off)
  1951  		return true
  1952  	}
  1953  	// match: (Convert (Convert ptr mem) mem)
  1954  	// cond:
  1955  	// result: ptr
  1956  	for {
  1957  		v_0 := v.Args[0]
  1958  		if v_0.Op != OpConvert {
  1959  			break
  1960  		}
  1961  		ptr := v_0.Args[0]
  1962  		mem := v_0.Args[1]
  1963  		if mem != v.Args[1] {
  1964  			break
  1965  		}
  1966  		v.reset(OpCopy)
  1967  		v.Type = ptr.Type
  1968  		v.AddArg(ptr)
  1969  		return true
  1970  	}
  1971  	return false
  1972  }
  1973  func rewriteValuegeneric_OpCvt32Fto64F(v *Value, config *Config) bool {
  1974  	b := v.Block
  1975  	_ = b
  1976  	// match: (Cvt32Fto64F (Const32F [c]))
  1977  	// cond:
  1978  	// result: (Const64F [c])
  1979  	for {
  1980  		v_0 := v.Args[0]
  1981  		if v_0.Op != OpConst32F {
  1982  			break
  1983  		}
  1984  		c := v_0.AuxInt
  1985  		v.reset(OpConst64F)
  1986  		v.AuxInt = c
  1987  		return true
  1988  	}
  1989  	return false
  1990  }
  1991  func rewriteValuegeneric_OpCvt64Fto32F(v *Value, config *Config) bool {
  1992  	b := v.Block
  1993  	_ = b
  1994  	// match: (Cvt64Fto32F (Const64F [c]))
  1995  	// cond:
  1996  	// result: (Const32F [f2i(float64(i2f32(c)))])
  1997  	for {
  1998  		v_0 := v.Args[0]
  1999  		if v_0.Op != OpConst64F {
  2000  			break
  2001  		}
  2002  		c := v_0.AuxInt
  2003  		v.reset(OpConst32F)
  2004  		v.AuxInt = f2i(float64(i2f32(c)))
  2005  		return true
  2006  	}
  2007  	return false
  2008  }
  2009  func rewriteValuegeneric_OpDiv32F(v *Value, config *Config) bool {
  2010  	b := v.Block
  2011  	_ = b
  2012  	// match: (Div32F x (Const32F [f2i(1)]))
  2013  	// cond:
  2014  	// result: x
  2015  	for {
  2016  		x := v.Args[0]
  2017  		v_1 := v.Args[1]
  2018  		if v_1.Op != OpConst32F {
  2019  			break
  2020  		}
  2021  		if v_1.AuxInt != f2i(1) {
  2022  			break
  2023  		}
  2024  		v.reset(OpCopy)
  2025  		v.Type = x.Type
  2026  		v.AddArg(x)
  2027  		return true
  2028  	}
  2029  	// match: (Div32F x (Const32F [f2i(-1)]))
  2030  	// cond:
  2031  	// result: (Neg32F x)
  2032  	for {
  2033  		x := v.Args[0]
  2034  		v_1 := v.Args[1]
  2035  		if v_1.Op != OpConst32F {
  2036  			break
  2037  		}
  2038  		if v_1.AuxInt != f2i(-1) {
  2039  			break
  2040  		}
  2041  		v.reset(OpNeg32F)
  2042  		v.AddArg(x)
  2043  		return true
  2044  	}
  2045  	return false
  2046  }
  2047  func rewriteValuegeneric_OpDiv64(v *Value, config *Config) bool {
  2048  	b := v.Block
  2049  	_ = b
  2050  	// match: (Div64 <t> x (Const64 [c]))
  2051  	// cond: c > 0 && smagic64ok(c) && smagic64m(c) > 0
  2052  	// result: (Sub64 <t>     (Rsh64x64 <t>       (Hmul64 <t>         (Const64 <t> [smagic64m(c)])         x)       (Const64 <t> [smagic64s(c)]))     (Rsh64x64 <t>       x       (Const64 <t> [63])))
  2053  	for {
  2054  		t := v.Type
  2055  		x := v.Args[0]
  2056  		v_1 := v.Args[1]
  2057  		if v_1.Op != OpConst64 {
  2058  			break
  2059  		}
  2060  		c := v_1.AuxInt
  2061  		if !(c > 0 && smagic64ok(c) && smagic64m(c) > 0) {
  2062  			break
  2063  		}
  2064  		v.reset(OpSub64)
  2065  		v.Type = t
  2066  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2067  		v1 := b.NewValue0(v.Pos, OpHmul64, t)
  2068  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  2069  		v2.AuxInt = smagic64m(c)
  2070  		v1.AddArg(v2)
  2071  		v1.AddArg(x)
  2072  		v0.AddArg(v1)
  2073  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  2074  		v3.AuxInt = smagic64s(c)
  2075  		v0.AddArg(v3)
  2076  		v.AddArg(v0)
  2077  		v4 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2078  		v4.AddArg(x)
  2079  		v5 := b.NewValue0(v.Pos, OpConst64, t)
  2080  		v5.AuxInt = 63
  2081  		v4.AddArg(v5)
  2082  		v.AddArg(v4)
  2083  		return true
  2084  	}
  2085  	// match: (Div64 <t> x (Const64 [c]))
  2086  	// cond: c > 0 && smagic64ok(c) && smagic64m(c) < 0
  2087  	// result: (Sub64 <t>     (Rsh64x64 <t>       (Add64 <t>         (Hmul64 <t>           (Const64 <t> [smagic64m(c)])           x)         x)       (Const64 <t> [smagic64s(c)]))     (Rsh64x64 <t>       x       (Const64 <t> [63])))
  2088  	for {
  2089  		t := v.Type
  2090  		x := v.Args[0]
  2091  		v_1 := v.Args[1]
  2092  		if v_1.Op != OpConst64 {
  2093  			break
  2094  		}
  2095  		c := v_1.AuxInt
  2096  		if !(c > 0 && smagic64ok(c) && smagic64m(c) < 0) {
  2097  			break
  2098  		}
  2099  		v.reset(OpSub64)
  2100  		v.Type = t
  2101  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2102  		v1 := b.NewValue0(v.Pos, OpAdd64, t)
  2103  		v2 := b.NewValue0(v.Pos, OpHmul64, t)
  2104  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  2105  		v3.AuxInt = smagic64m(c)
  2106  		v2.AddArg(v3)
  2107  		v2.AddArg(x)
  2108  		v1.AddArg(v2)
  2109  		v1.AddArg(x)
  2110  		v0.AddArg(v1)
  2111  		v4 := b.NewValue0(v.Pos, OpConst64, t)
  2112  		v4.AuxInt = smagic64s(c)
  2113  		v0.AddArg(v4)
  2114  		v.AddArg(v0)
  2115  		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2116  		v5.AddArg(x)
  2117  		v6 := b.NewValue0(v.Pos, OpConst64, t)
  2118  		v6.AuxInt = 63
  2119  		v5.AddArg(v6)
  2120  		v.AddArg(v5)
  2121  		return true
  2122  	}
  2123  	// match: (Div64 <t> x (Const64 [c]))
  2124  	// cond: c < 0 && smagic64ok(c) && smagic64m(c) > 0
  2125  	// result: (Neg64 <t>     (Sub64 <t>       (Rsh64x64 <t>         (Hmul64 <t>           (Const64 <t> [smagic64m(c)])           x)         (Const64 <t> [smagic64s(c)]))       (Rsh64x64 <t>         x         (Const64 <t> [63]))))
  2126  	for {
  2127  		t := v.Type
  2128  		x := v.Args[0]
  2129  		v_1 := v.Args[1]
  2130  		if v_1.Op != OpConst64 {
  2131  			break
  2132  		}
  2133  		c := v_1.AuxInt
  2134  		if !(c < 0 && smagic64ok(c) && smagic64m(c) > 0) {
  2135  			break
  2136  		}
  2137  		v.reset(OpNeg64)
  2138  		v.Type = t
  2139  		v0 := b.NewValue0(v.Pos, OpSub64, t)
  2140  		v1 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2141  		v2 := b.NewValue0(v.Pos, OpHmul64, t)
  2142  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  2143  		v3.AuxInt = smagic64m(c)
  2144  		v2.AddArg(v3)
  2145  		v2.AddArg(x)
  2146  		v1.AddArg(v2)
  2147  		v4 := b.NewValue0(v.Pos, OpConst64, t)
  2148  		v4.AuxInt = smagic64s(c)
  2149  		v1.AddArg(v4)
  2150  		v0.AddArg(v1)
  2151  		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2152  		v5.AddArg(x)
  2153  		v6 := b.NewValue0(v.Pos, OpConst64, t)
  2154  		v6.AuxInt = 63
  2155  		v5.AddArg(v6)
  2156  		v0.AddArg(v5)
  2157  		v.AddArg(v0)
  2158  		return true
  2159  	}
  2160  	// match: (Div64 <t> x (Const64 [c]))
  2161  	// cond: c < 0 && smagic64ok(c) && smagic64m(c) < 0
  2162  	// result: (Neg64 <t>     (Sub64 <t>       (Rsh64x64 <t>         (Add64 <t>           (Hmul64 <t>             (Const64 <t> [smagic64m(c)])             x)           x)         (Const64 <t> [smagic64s(c)]))       (Rsh64x64 <t>         x         (Const64 <t> [63]))))
  2163  	for {
  2164  		t := v.Type
  2165  		x := v.Args[0]
  2166  		v_1 := v.Args[1]
  2167  		if v_1.Op != OpConst64 {
  2168  			break
  2169  		}
  2170  		c := v_1.AuxInt
  2171  		if !(c < 0 && smagic64ok(c) && smagic64m(c) < 0) {
  2172  			break
  2173  		}
  2174  		v.reset(OpNeg64)
  2175  		v.Type = t
  2176  		v0 := b.NewValue0(v.Pos, OpSub64, t)
  2177  		v1 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2178  		v2 := b.NewValue0(v.Pos, OpAdd64, t)
  2179  		v3 := b.NewValue0(v.Pos, OpHmul64, t)
  2180  		v4 := b.NewValue0(v.Pos, OpConst64, t)
  2181  		v4.AuxInt = smagic64m(c)
  2182  		v3.AddArg(v4)
  2183  		v3.AddArg(x)
  2184  		v2.AddArg(v3)
  2185  		v2.AddArg(x)
  2186  		v1.AddArg(v2)
  2187  		v5 := b.NewValue0(v.Pos, OpConst64, t)
  2188  		v5.AuxInt = smagic64s(c)
  2189  		v1.AddArg(v5)
  2190  		v0.AddArg(v1)
  2191  		v6 := b.NewValue0(v.Pos, OpRsh64x64, t)
  2192  		v6.AddArg(x)
  2193  		v7 := b.NewValue0(v.Pos, OpConst64, t)
  2194  		v7.AuxInt = 63
  2195  		v6.AddArg(v7)
  2196  		v0.AddArg(v6)
  2197  		v.AddArg(v0)
  2198  		return true
  2199  	}
  2200  	return false
  2201  }
  2202  func rewriteValuegeneric_OpDiv64F(v *Value, config *Config) bool {
  2203  	b := v.Block
  2204  	_ = b
  2205  	// match: (Div64F x (Const64F [f2i(1)]))
  2206  	// cond:
  2207  	// result: x
  2208  	for {
  2209  		x := v.Args[0]
  2210  		v_1 := v.Args[1]
  2211  		if v_1.Op != OpConst64F {
  2212  			break
  2213  		}
  2214  		if v_1.AuxInt != f2i(1) {
  2215  			break
  2216  		}
  2217  		v.reset(OpCopy)
  2218  		v.Type = x.Type
  2219  		v.AddArg(x)
  2220  		return true
  2221  	}
  2222  	// match: (Div64F x (Const64F [f2i(-1)]))
  2223  	// cond:
  2224  	// result: (Neg32F x)
  2225  	for {
  2226  		x := v.Args[0]
  2227  		v_1 := v.Args[1]
  2228  		if v_1.Op != OpConst64F {
  2229  			break
  2230  		}
  2231  		if v_1.AuxInt != f2i(-1) {
  2232  			break
  2233  		}
  2234  		v.reset(OpNeg32F)
  2235  		v.AddArg(x)
  2236  		return true
  2237  	}
  2238  	return false
  2239  }
  2240  func rewriteValuegeneric_OpDiv64u(v *Value, config *Config) bool {
  2241  	b := v.Block
  2242  	_ = b
  2243  	// match: (Div64u <t> n (Const64 [c]))
  2244  	// cond: isPowerOfTwo(c)
  2245  	// result: (Rsh64Ux64 n (Const64 <t> [log2(c)]))
  2246  	for {
  2247  		t := v.Type
  2248  		n := v.Args[0]
  2249  		v_1 := v.Args[1]
  2250  		if v_1.Op != OpConst64 {
  2251  			break
  2252  		}
  2253  		c := v_1.AuxInt
  2254  		if !(isPowerOfTwo(c)) {
  2255  			break
  2256  		}
  2257  		v.reset(OpRsh64Ux64)
  2258  		v.AddArg(n)
  2259  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  2260  		v0.AuxInt = log2(c)
  2261  		v.AddArg(v0)
  2262  		return true
  2263  	}
  2264  	// match: (Div64u <t> x (Const64 [c]))
  2265  	// cond: umagic64ok(c) && !umagic64a(c)
  2266  	// result: (Rsh64Ux64     (Hmul64u <t>       (Const64 <t> [umagic64m(c)])       x)     (Const64 <t> [umagic64s(c)]))
  2267  	for {
  2268  		t := v.Type
  2269  		x := v.Args[0]
  2270  		v_1 := v.Args[1]
  2271  		if v_1.Op != OpConst64 {
  2272  			break
  2273  		}
  2274  		c := v_1.AuxInt
  2275  		if !(umagic64ok(c) && !umagic64a(c)) {
  2276  			break
  2277  		}
  2278  		v.reset(OpRsh64Ux64)
  2279  		v0 := b.NewValue0(v.Pos, OpHmul64u, t)
  2280  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  2281  		v1.AuxInt = umagic64m(c)
  2282  		v0.AddArg(v1)
  2283  		v0.AddArg(x)
  2284  		v.AddArg(v0)
  2285  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  2286  		v2.AuxInt = umagic64s(c)
  2287  		v.AddArg(v2)
  2288  		return true
  2289  	}
  2290  	// match: (Div64u <t> x (Const64 [c]))
  2291  	// cond: umagic64ok(c) && umagic64a(c)
  2292  	// result: (Rsh64Ux64     (Avg64u <t>       (Hmul64u <t>         x         (Const64 <t> [umagic64m(c)]))       x)     (Const64 <t> [umagic64s(c)-1]))
  2293  	for {
  2294  		t := v.Type
  2295  		x := v.Args[0]
  2296  		v_1 := v.Args[1]
  2297  		if v_1.Op != OpConst64 {
  2298  			break
  2299  		}
  2300  		c := v_1.AuxInt
  2301  		if !(umagic64ok(c) && umagic64a(c)) {
  2302  			break
  2303  		}
  2304  		v.reset(OpRsh64Ux64)
  2305  		v0 := b.NewValue0(v.Pos, OpAvg64u, t)
  2306  		v1 := b.NewValue0(v.Pos, OpHmul64u, t)
  2307  		v1.AddArg(x)
  2308  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  2309  		v2.AuxInt = umagic64m(c)
  2310  		v1.AddArg(v2)
  2311  		v0.AddArg(v1)
  2312  		v0.AddArg(x)
  2313  		v.AddArg(v0)
  2314  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  2315  		v3.AuxInt = umagic64s(c) - 1
  2316  		v.AddArg(v3)
  2317  		return true
  2318  	}
  2319  	return false
  2320  }
  2321  func rewriteValuegeneric_OpEq16(v *Value, config *Config) bool {
  2322  	b := v.Block
  2323  	_ = b
  2324  	// match: (Eq16 x x)
  2325  	// cond:
  2326  	// result: (ConstBool [1])
  2327  	for {
  2328  		x := v.Args[0]
  2329  		if x != v.Args[1] {
  2330  			break
  2331  		}
  2332  		v.reset(OpConstBool)
  2333  		v.AuxInt = 1
  2334  		return true
  2335  	}
  2336  	// match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
  2337  	// cond:
  2338  	// result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x)
  2339  	for {
  2340  		v_0 := v.Args[0]
  2341  		if v_0.Op != OpConst16 {
  2342  			break
  2343  		}
  2344  		t := v_0.Type
  2345  		c := v_0.AuxInt
  2346  		v_1 := v.Args[1]
  2347  		if v_1.Op != OpAdd16 {
  2348  			break
  2349  		}
  2350  		v_1_0 := v_1.Args[0]
  2351  		if v_1_0.Op != OpConst16 {
  2352  			break
  2353  		}
  2354  		if v_1_0.Type != t {
  2355  			break
  2356  		}
  2357  		d := v_1_0.AuxInt
  2358  		x := v_1.Args[1]
  2359  		v.reset(OpEq16)
  2360  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  2361  		v0.AuxInt = int64(int16(c - d))
  2362  		v.AddArg(v0)
  2363  		v.AddArg(x)
  2364  		return true
  2365  	}
  2366  	// match: (Eq16 x (Const16 <t> [c]))
  2367  	// cond: x.Op != OpConst16
  2368  	// result: (Eq16 (Const16 <t> [c]) x)
  2369  	for {
  2370  		x := v.Args[0]
  2371  		v_1 := v.Args[1]
  2372  		if v_1.Op != OpConst16 {
  2373  			break
  2374  		}
  2375  		t := v_1.Type
  2376  		c := v_1.AuxInt
  2377  		if !(x.Op != OpConst16) {
  2378  			break
  2379  		}
  2380  		v.reset(OpEq16)
  2381  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  2382  		v0.AuxInt = c
  2383  		v.AddArg(v0)
  2384  		v.AddArg(x)
  2385  		return true
  2386  	}
  2387  	// match: (Eq16 (Const16 [c]) (Const16 [d]))
  2388  	// cond:
  2389  	// result: (ConstBool [b2i(c == d)])
  2390  	for {
  2391  		v_0 := v.Args[0]
  2392  		if v_0.Op != OpConst16 {
  2393  			break
  2394  		}
  2395  		c := v_0.AuxInt
  2396  		v_1 := v.Args[1]
  2397  		if v_1.Op != OpConst16 {
  2398  			break
  2399  		}
  2400  		d := v_1.AuxInt
  2401  		v.reset(OpConstBool)
  2402  		v.AuxInt = b2i(c == d)
  2403  		return true
  2404  	}
  2405  	return false
  2406  }
  2407  func rewriteValuegeneric_OpEq32(v *Value, config *Config) bool {
  2408  	b := v.Block
  2409  	_ = b
  2410  	// match: (Eq32 x x)
  2411  	// cond:
  2412  	// result: (ConstBool [1])
  2413  	for {
  2414  		x := v.Args[0]
  2415  		if x != v.Args[1] {
  2416  			break
  2417  		}
  2418  		v.reset(OpConstBool)
  2419  		v.AuxInt = 1
  2420  		return true
  2421  	}
  2422  	// match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
  2423  	// cond:
  2424  	// result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
  2425  	for {
  2426  		v_0 := v.Args[0]
  2427  		if v_0.Op != OpConst32 {
  2428  			break
  2429  		}
  2430  		t := v_0.Type
  2431  		c := v_0.AuxInt
  2432  		v_1 := v.Args[1]
  2433  		if v_1.Op != OpAdd32 {
  2434  			break
  2435  		}
  2436  		v_1_0 := v_1.Args[0]
  2437  		if v_1_0.Op != OpConst32 {
  2438  			break
  2439  		}
  2440  		if v_1_0.Type != t {
  2441  			break
  2442  		}
  2443  		d := v_1_0.AuxInt
  2444  		x := v_1.Args[1]
  2445  		v.reset(OpEq32)
  2446  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2447  		v0.AuxInt = int64(int32(c - d))
  2448  		v.AddArg(v0)
  2449  		v.AddArg(x)
  2450  		return true
  2451  	}
  2452  	// match: (Eq32 x (Const32 <t> [c]))
  2453  	// cond: x.Op != OpConst32
  2454  	// result: (Eq32 (Const32 <t> [c]) x)
  2455  	for {
  2456  		x := v.Args[0]
  2457  		v_1 := v.Args[1]
  2458  		if v_1.Op != OpConst32 {
  2459  			break
  2460  		}
  2461  		t := v_1.Type
  2462  		c := v_1.AuxInt
  2463  		if !(x.Op != OpConst32) {
  2464  			break
  2465  		}
  2466  		v.reset(OpEq32)
  2467  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2468  		v0.AuxInt = c
  2469  		v.AddArg(v0)
  2470  		v.AddArg(x)
  2471  		return true
  2472  	}
  2473  	// match: (Eq32 (Const32 [c]) (Const32 [d]))
  2474  	// cond:
  2475  	// result: (ConstBool [b2i(c == d)])
  2476  	for {
  2477  		v_0 := v.Args[0]
  2478  		if v_0.Op != OpConst32 {
  2479  			break
  2480  		}
  2481  		c := v_0.AuxInt
  2482  		v_1 := v.Args[1]
  2483  		if v_1.Op != OpConst32 {
  2484  			break
  2485  		}
  2486  		d := v_1.AuxInt
  2487  		v.reset(OpConstBool)
  2488  		v.AuxInt = b2i(c == d)
  2489  		return true
  2490  	}
  2491  	return false
  2492  }
  2493  func rewriteValuegeneric_OpEq64(v *Value, config *Config) bool {
  2494  	b := v.Block
  2495  	_ = b
  2496  	// match: (Eq64 x x)
  2497  	// cond:
  2498  	// result: (ConstBool [1])
  2499  	for {
  2500  		x := v.Args[0]
  2501  		if x != v.Args[1] {
  2502  			break
  2503  		}
  2504  		v.reset(OpConstBool)
  2505  		v.AuxInt = 1
  2506  		return true
  2507  	}
  2508  	// match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
  2509  	// cond:
  2510  	// result: (Eq64 (Const64 <t> [c-d]) x)
  2511  	for {
  2512  		v_0 := v.Args[0]
  2513  		if v_0.Op != OpConst64 {
  2514  			break
  2515  		}
  2516  		t := v_0.Type
  2517  		c := v_0.AuxInt
  2518  		v_1 := v.Args[1]
  2519  		if v_1.Op != OpAdd64 {
  2520  			break
  2521  		}
  2522  		v_1_0 := v_1.Args[0]
  2523  		if v_1_0.Op != OpConst64 {
  2524  			break
  2525  		}
  2526  		if v_1_0.Type != t {
  2527  			break
  2528  		}
  2529  		d := v_1_0.AuxInt
  2530  		x := v_1.Args[1]
  2531  		v.reset(OpEq64)
  2532  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  2533  		v0.AuxInt = c - d
  2534  		v.AddArg(v0)
  2535  		v.AddArg(x)
  2536  		return true
  2537  	}
  2538  	// match: (Eq64 x (Const64 <t> [c]))
  2539  	// cond: x.Op != OpConst64
  2540  	// result: (Eq64 (Const64 <t> [c]) x)
  2541  	for {
  2542  		x := v.Args[0]
  2543  		v_1 := v.Args[1]
  2544  		if v_1.Op != OpConst64 {
  2545  			break
  2546  		}
  2547  		t := v_1.Type
  2548  		c := v_1.AuxInt
  2549  		if !(x.Op != OpConst64) {
  2550  			break
  2551  		}
  2552  		v.reset(OpEq64)
  2553  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  2554  		v0.AuxInt = c
  2555  		v.AddArg(v0)
  2556  		v.AddArg(x)
  2557  		return true
  2558  	}
  2559  	// match: (Eq64 (Const64 [c]) (Const64 [d]))
  2560  	// cond:
  2561  	// result: (ConstBool [b2i(c == d)])
  2562  	for {
  2563  		v_0 := v.Args[0]
  2564  		if v_0.Op != OpConst64 {
  2565  			break
  2566  		}
  2567  		c := v_0.AuxInt
  2568  		v_1 := v.Args[1]
  2569  		if v_1.Op != OpConst64 {
  2570  			break
  2571  		}
  2572  		d := v_1.AuxInt
  2573  		v.reset(OpConstBool)
  2574  		v.AuxInt = b2i(c == d)
  2575  		return true
  2576  	}
  2577  	return false
  2578  }
  2579  func rewriteValuegeneric_OpEq8(v *Value, config *Config) bool {
  2580  	b := v.Block
  2581  	_ = b
  2582  	// match: (Eq8  x x)
  2583  	// cond:
  2584  	// result: (ConstBool [1])
  2585  	for {
  2586  		x := v.Args[0]
  2587  		if x != v.Args[1] {
  2588  			break
  2589  		}
  2590  		v.reset(OpConstBool)
  2591  		v.AuxInt = 1
  2592  		return true
  2593  	}
  2594  	// match: (Eq8  (Const8  <t> [c]) (Add8  (Const8  <t> [d]) x))
  2595  	// cond:
  2596  	// result: (Eq8  (Const8 <t> [int64(int8(c-d))]) x)
  2597  	for {
  2598  		v_0 := v.Args[0]
  2599  		if v_0.Op != OpConst8 {
  2600  			break
  2601  		}
  2602  		t := v_0.Type
  2603  		c := v_0.AuxInt
  2604  		v_1 := v.Args[1]
  2605  		if v_1.Op != OpAdd8 {
  2606  			break
  2607  		}
  2608  		v_1_0 := v_1.Args[0]
  2609  		if v_1_0.Op != OpConst8 {
  2610  			break
  2611  		}
  2612  		if v_1_0.Type != t {
  2613  			break
  2614  		}
  2615  		d := v_1_0.AuxInt
  2616  		x := v_1.Args[1]
  2617  		v.reset(OpEq8)
  2618  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  2619  		v0.AuxInt = int64(int8(c - d))
  2620  		v.AddArg(v0)
  2621  		v.AddArg(x)
  2622  		return true
  2623  	}
  2624  	// match: (Eq8  x (Const8  <t> [c]))
  2625  	// cond: x.Op != OpConst8
  2626  	// result: (Eq8  (Const8  <t> [c]) x)
  2627  	for {
  2628  		x := v.Args[0]
  2629  		v_1 := v.Args[1]
  2630  		if v_1.Op != OpConst8 {
  2631  			break
  2632  		}
  2633  		t := v_1.Type
  2634  		c := v_1.AuxInt
  2635  		if !(x.Op != OpConst8) {
  2636  			break
  2637  		}
  2638  		v.reset(OpEq8)
  2639  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  2640  		v0.AuxInt = c
  2641  		v.AddArg(v0)
  2642  		v.AddArg(x)
  2643  		return true
  2644  	}
  2645  	// match: (Eq8  (Const8  [c]) (Const8  [d]))
  2646  	// cond:
  2647  	// result: (ConstBool [b2i(c == d)])
  2648  	for {
  2649  		v_0 := v.Args[0]
  2650  		if v_0.Op != OpConst8 {
  2651  			break
  2652  		}
  2653  		c := v_0.AuxInt
  2654  		v_1 := v.Args[1]
  2655  		if v_1.Op != OpConst8 {
  2656  			break
  2657  		}
  2658  		d := v_1.AuxInt
  2659  		v.reset(OpConstBool)
  2660  		v.AuxInt = b2i(c == d)
  2661  		return true
  2662  	}
  2663  	return false
  2664  }
  2665  func rewriteValuegeneric_OpEqB(v *Value, config *Config) bool {
  2666  	b := v.Block
  2667  	_ = b
  2668  	// match: (EqB (ConstBool [c]) (ConstBool [d]))
  2669  	// cond:
  2670  	// result: (ConstBool [b2i(c == d)])
  2671  	for {
  2672  		v_0 := v.Args[0]
  2673  		if v_0.Op != OpConstBool {
  2674  			break
  2675  		}
  2676  		c := v_0.AuxInt
  2677  		v_1 := v.Args[1]
  2678  		if v_1.Op != OpConstBool {
  2679  			break
  2680  		}
  2681  		d := v_1.AuxInt
  2682  		v.reset(OpConstBool)
  2683  		v.AuxInt = b2i(c == d)
  2684  		return true
  2685  	}
  2686  	// match: (EqB (ConstBool [0]) x)
  2687  	// cond:
  2688  	// result: (Not x)
  2689  	for {
  2690  		v_0 := v.Args[0]
  2691  		if v_0.Op != OpConstBool {
  2692  			break
  2693  		}
  2694  		if v_0.AuxInt != 0 {
  2695  			break
  2696  		}
  2697  		x := v.Args[1]
  2698  		v.reset(OpNot)
  2699  		v.AddArg(x)
  2700  		return true
  2701  	}
  2702  	// match: (EqB (ConstBool [1]) x)
  2703  	// cond:
  2704  	// result: x
  2705  	for {
  2706  		v_0 := v.Args[0]
  2707  		if v_0.Op != OpConstBool {
  2708  			break
  2709  		}
  2710  		if v_0.AuxInt != 1 {
  2711  			break
  2712  		}
  2713  		x := v.Args[1]
  2714  		v.reset(OpCopy)
  2715  		v.Type = x.Type
  2716  		v.AddArg(x)
  2717  		return true
  2718  	}
  2719  	return false
  2720  }
  2721  func rewriteValuegeneric_OpEqInter(v *Value, config *Config) bool {
  2722  	b := v.Block
  2723  	_ = b
  2724  	// match: (EqInter x y)
  2725  	// cond:
  2726  	// result: (EqPtr  (ITab x) (ITab y))
  2727  	for {
  2728  		x := v.Args[0]
  2729  		y := v.Args[1]
  2730  		v.reset(OpEqPtr)
  2731  		v0 := b.NewValue0(v.Pos, OpITab, config.fe.TypeBytePtr())
  2732  		v0.AddArg(x)
  2733  		v.AddArg(v0)
  2734  		v1 := b.NewValue0(v.Pos, OpITab, config.fe.TypeBytePtr())
  2735  		v1.AddArg(y)
  2736  		v.AddArg(v1)
  2737  		return true
  2738  	}
  2739  }
  2740  func rewriteValuegeneric_OpEqPtr(v *Value, config *Config) bool {
  2741  	b := v.Block
  2742  	_ = b
  2743  	// match: (EqPtr p (ConstNil))
  2744  	// cond:
  2745  	// result: (Not (IsNonNil p))
  2746  	for {
  2747  		p := v.Args[0]
  2748  		v_1 := v.Args[1]
  2749  		if v_1.Op != OpConstNil {
  2750  			break
  2751  		}
  2752  		v.reset(OpNot)
  2753  		v0 := b.NewValue0(v.Pos, OpIsNonNil, config.fe.TypeBool())
  2754  		v0.AddArg(p)
  2755  		v.AddArg(v0)
  2756  		return true
  2757  	}
  2758  	// match: (EqPtr (ConstNil) p)
  2759  	// cond:
  2760  	// result: (Not (IsNonNil p))
  2761  	for {
  2762  		v_0 := v.Args[0]
  2763  		if v_0.Op != OpConstNil {
  2764  			break
  2765  		}
  2766  		p := v.Args[1]
  2767  		v.reset(OpNot)
  2768  		v0 := b.NewValue0(v.Pos, OpIsNonNil, config.fe.TypeBool())
  2769  		v0.AddArg(p)
  2770  		v.AddArg(v0)
  2771  		return true
  2772  	}
  2773  	return false
  2774  }
  2775  func rewriteValuegeneric_OpEqSlice(v *Value, config *Config) bool {
  2776  	b := v.Block
  2777  	_ = b
  2778  	// match: (EqSlice x y)
  2779  	// cond:
  2780  	// result: (EqPtr  (SlicePtr x) (SlicePtr y))
  2781  	for {
  2782  		x := v.Args[0]
  2783  		y := v.Args[1]
  2784  		v.reset(OpEqPtr)
  2785  		v0 := b.NewValue0(v.Pos, OpSlicePtr, config.fe.TypeBytePtr())
  2786  		v0.AddArg(x)
  2787  		v.AddArg(v0)
  2788  		v1 := b.NewValue0(v.Pos, OpSlicePtr, config.fe.TypeBytePtr())
  2789  		v1.AddArg(y)
  2790  		v.AddArg(v1)
  2791  		return true
  2792  	}
  2793  }
  2794  func rewriteValuegeneric_OpGeq16(v *Value, config *Config) bool {
  2795  	b := v.Block
  2796  	_ = b
  2797  	// match: (Geq16 (Const16 [c]) (Const16 [d]))
  2798  	// cond:
  2799  	// result: (ConstBool [b2i(c >= d)])
  2800  	for {
  2801  		v_0 := v.Args[0]
  2802  		if v_0.Op != OpConst16 {
  2803  			break
  2804  		}
  2805  		c := v_0.AuxInt
  2806  		v_1 := v.Args[1]
  2807  		if v_1.Op != OpConst16 {
  2808  			break
  2809  		}
  2810  		d := v_1.AuxInt
  2811  		v.reset(OpConstBool)
  2812  		v.AuxInt = b2i(c >= d)
  2813  		return true
  2814  	}
  2815  	return false
  2816  }
  2817  func rewriteValuegeneric_OpGeq16U(v *Value, config *Config) bool {
  2818  	b := v.Block
  2819  	_ = b
  2820  	// match: (Geq16U (Const16 [c]) (Const16 [d]))
  2821  	// cond:
  2822  	// result: (ConstBool [b2i(uint16(c) >= uint16(d))])
  2823  	for {
  2824  		v_0 := v.Args[0]
  2825  		if v_0.Op != OpConst16 {
  2826  			break
  2827  		}
  2828  		c := v_0.AuxInt
  2829  		v_1 := v.Args[1]
  2830  		if v_1.Op != OpConst16 {
  2831  			break
  2832  		}
  2833  		d := v_1.AuxInt
  2834  		v.reset(OpConstBool)
  2835  		v.AuxInt = b2i(uint16(c) >= uint16(d))
  2836  		return true
  2837  	}
  2838  	return false
  2839  }
  2840  func rewriteValuegeneric_OpGeq32(v *Value, config *Config) bool {
  2841  	b := v.Block
  2842  	_ = b
  2843  	// match: (Geq32 (Const32 [c]) (Const32 [d]))
  2844  	// cond:
  2845  	// result: (ConstBool [b2i(c >= d)])
  2846  	for {
  2847  		v_0 := v.Args[0]
  2848  		if v_0.Op != OpConst32 {
  2849  			break
  2850  		}
  2851  		c := v_0.AuxInt
  2852  		v_1 := v.Args[1]
  2853  		if v_1.Op != OpConst32 {
  2854  			break
  2855  		}
  2856  		d := v_1.AuxInt
  2857  		v.reset(OpConstBool)
  2858  		v.AuxInt = b2i(c >= d)
  2859  		return true
  2860  	}
  2861  	return false
  2862  }
  2863  func rewriteValuegeneric_OpGeq32U(v *Value, config *Config) bool {
  2864  	b := v.Block
  2865  	_ = b
  2866  	// match: (Geq32U (Const32 [c]) (Const32 [d]))
  2867  	// cond:
  2868  	// result: (ConstBool [b2i(uint32(c) >= uint32(d))])
  2869  	for {
  2870  		v_0 := v.Args[0]
  2871  		if v_0.Op != OpConst32 {
  2872  			break
  2873  		}
  2874  		c := v_0.AuxInt
  2875  		v_1 := v.Args[1]
  2876  		if v_1.Op != OpConst32 {
  2877  			break
  2878  		}
  2879  		d := v_1.AuxInt
  2880  		v.reset(OpConstBool)
  2881  		v.AuxInt = b2i(uint32(c) >= uint32(d))
  2882  		return true
  2883  	}
  2884  	return false
  2885  }
  2886  func rewriteValuegeneric_OpGeq64(v *Value, config *Config) bool {
  2887  	b := v.Block
  2888  	_ = b
  2889  	// match: (Geq64 (Const64 [c]) (Const64 [d]))
  2890  	// cond:
  2891  	// result: (ConstBool [b2i(c >= d)])
  2892  	for {
  2893  		v_0 := v.Args[0]
  2894  		if v_0.Op != OpConst64 {
  2895  			break
  2896  		}
  2897  		c := v_0.AuxInt
  2898  		v_1 := v.Args[1]
  2899  		if v_1.Op != OpConst64 {
  2900  			break
  2901  		}
  2902  		d := v_1.AuxInt
  2903  		v.reset(OpConstBool)
  2904  		v.AuxInt = b2i(c >= d)
  2905  		return true
  2906  	}
  2907  	return false
  2908  }
  2909  func rewriteValuegeneric_OpGeq64U(v *Value, config *Config) bool {
  2910  	b := v.Block
  2911  	_ = b
  2912  	// match: (Geq64U (Const64 [c]) (Const64 [d]))
  2913  	// cond:
  2914  	// result: (ConstBool [b2i(uint64(c) >= uint64(d))])
  2915  	for {
  2916  		v_0 := v.Args[0]
  2917  		if v_0.Op != OpConst64 {
  2918  			break
  2919  		}
  2920  		c := v_0.AuxInt
  2921  		v_1 := v.Args[1]
  2922  		if v_1.Op != OpConst64 {
  2923  			break
  2924  		}
  2925  		d := v_1.AuxInt
  2926  		v.reset(OpConstBool)
  2927  		v.AuxInt = b2i(uint64(c) >= uint64(d))
  2928  		return true
  2929  	}
  2930  	return false
  2931  }
  2932  func rewriteValuegeneric_OpGeq8(v *Value, config *Config) bool {
  2933  	b := v.Block
  2934  	_ = b
  2935  	// match: (Geq8  (Const8  [c]) (Const8  [d]))
  2936  	// cond:
  2937  	// result: (ConstBool [b2i(c >= d)])
  2938  	for {
  2939  		v_0 := v.Args[0]
  2940  		if v_0.Op != OpConst8 {
  2941  			break
  2942  		}
  2943  		c := v_0.AuxInt
  2944  		v_1 := v.Args[1]
  2945  		if v_1.Op != OpConst8 {
  2946  			break
  2947  		}
  2948  		d := v_1.AuxInt
  2949  		v.reset(OpConstBool)
  2950  		v.AuxInt = b2i(c >= d)
  2951  		return true
  2952  	}
  2953  	return false
  2954  }
  2955  func rewriteValuegeneric_OpGeq8U(v *Value, config *Config) bool {
  2956  	b := v.Block
  2957  	_ = b
  2958  	// match: (Geq8U  (Const8  [c]) (Const8  [d]))
  2959  	// cond:
  2960  	// result: (ConstBool [b2i(uint8(c)  >= uint8(d))])
  2961  	for {
  2962  		v_0 := v.Args[0]
  2963  		if v_0.Op != OpConst8 {
  2964  			break
  2965  		}
  2966  		c := v_0.AuxInt
  2967  		v_1 := v.Args[1]
  2968  		if v_1.Op != OpConst8 {
  2969  			break
  2970  		}
  2971  		d := v_1.AuxInt
  2972  		v.reset(OpConstBool)
  2973  		v.AuxInt = b2i(uint8(c) >= uint8(d))
  2974  		return true
  2975  	}
  2976  	return false
  2977  }
  2978  func rewriteValuegeneric_OpGreater16(v *Value, config *Config) bool {
  2979  	b := v.Block
  2980  	_ = b
  2981  	// match: (Greater16 (Const16 [c]) (Const16 [d]))
  2982  	// cond:
  2983  	// result: (ConstBool [b2i(c > d)])
  2984  	for {
  2985  		v_0 := v.Args[0]
  2986  		if v_0.Op != OpConst16 {
  2987  			break
  2988  		}
  2989  		c := v_0.AuxInt
  2990  		v_1 := v.Args[1]
  2991  		if v_1.Op != OpConst16 {
  2992  			break
  2993  		}
  2994  		d := v_1.AuxInt
  2995  		v.reset(OpConstBool)
  2996  		v.AuxInt = b2i(c > d)
  2997  		return true
  2998  	}
  2999  	return false
  3000  }
  3001  func rewriteValuegeneric_OpGreater16U(v *Value, config *Config) bool {
  3002  	b := v.Block
  3003  	_ = b
  3004  	// match: (Greater16U (Const16 [c]) (Const16 [d]))
  3005  	// cond:
  3006  	// result: (ConstBool [b2i(uint16(c) > uint16(d))])
  3007  	for {
  3008  		v_0 := v.Args[0]
  3009  		if v_0.Op != OpConst16 {
  3010  			break
  3011  		}
  3012  		c := v_0.AuxInt
  3013  		v_1 := v.Args[1]
  3014  		if v_1.Op != OpConst16 {
  3015  			break
  3016  		}
  3017  		d := v_1.AuxInt
  3018  		v.reset(OpConstBool)
  3019  		v.AuxInt = b2i(uint16(c) > uint16(d))
  3020  		return true
  3021  	}
  3022  	return false
  3023  }
  3024  func rewriteValuegeneric_OpGreater32(v *Value, config *Config) bool {
  3025  	b := v.Block
  3026  	_ = b
  3027  	// match: (Greater32 (Const32 [c]) (Const32 [d]))
  3028  	// cond:
  3029  	// result: (ConstBool [b2i(c > d)])
  3030  	for {
  3031  		v_0 := v.Args[0]
  3032  		if v_0.Op != OpConst32 {
  3033  			break
  3034  		}
  3035  		c := v_0.AuxInt
  3036  		v_1 := v.Args[1]
  3037  		if v_1.Op != OpConst32 {
  3038  			break
  3039  		}
  3040  		d := v_1.AuxInt
  3041  		v.reset(OpConstBool)
  3042  		v.AuxInt = b2i(c > d)
  3043  		return true
  3044  	}
  3045  	return false
  3046  }
  3047  func rewriteValuegeneric_OpGreater32U(v *Value, config *Config) bool {
  3048  	b := v.Block
  3049  	_ = b
  3050  	// match: (Greater32U (Const32 [c]) (Const32 [d]))
  3051  	// cond:
  3052  	// result: (ConstBool [b2i(uint32(c) > uint32(d))])
  3053  	for {
  3054  		v_0 := v.Args[0]
  3055  		if v_0.Op != OpConst32 {
  3056  			break
  3057  		}
  3058  		c := v_0.AuxInt
  3059  		v_1 := v.Args[1]
  3060  		if v_1.Op != OpConst32 {
  3061  			break
  3062  		}
  3063  		d := v_1.AuxInt
  3064  		v.reset(OpConstBool)
  3065  		v.AuxInt = b2i(uint32(c) > uint32(d))
  3066  		return true
  3067  	}
  3068  	return false
  3069  }
  3070  func rewriteValuegeneric_OpGreater64(v *Value, config *Config) bool {
  3071  	b := v.Block
  3072  	_ = b
  3073  	// match: (Greater64 (Const64 [c]) (Const64 [d]))
  3074  	// cond:
  3075  	// result: (ConstBool [b2i(c > d)])
  3076  	for {
  3077  		v_0 := v.Args[0]
  3078  		if v_0.Op != OpConst64 {
  3079  			break
  3080  		}
  3081  		c := v_0.AuxInt
  3082  		v_1 := v.Args[1]
  3083  		if v_1.Op != OpConst64 {
  3084  			break
  3085  		}
  3086  		d := v_1.AuxInt
  3087  		v.reset(OpConstBool)
  3088  		v.AuxInt = b2i(c > d)
  3089  		return true
  3090  	}
  3091  	return false
  3092  }
  3093  func rewriteValuegeneric_OpGreater64U(v *Value, config *Config) bool {
  3094  	b := v.Block
  3095  	_ = b
  3096  	// match: (Greater64U (Const64 [c]) (Const64 [d]))
  3097  	// cond:
  3098  	// result: (ConstBool [b2i(uint64(c) > uint64(d))])
  3099  	for {
  3100  		v_0 := v.Args[0]
  3101  		if v_0.Op != OpConst64 {
  3102  			break
  3103  		}
  3104  		c := v_0.AuxInt
  3105  		v_1 := v.Args[1]
  3106  		if v_1.Op != OpConst64 {
  3107  			break
  3108  		}
  3109  		d := v_1.AuxInt
  3110  		v.reset(OpConstBool)
  3111  		v.AuxInt = b2i(uint64(c) > uint64(d))
  3112  		return true
  3113  	}
  3114  	return false
  3115  }
  3116  func rewriteValuegeneric_OpGreater8(v *Value, config *Config) bool {
  3117  	b := v.Block
  3118  	_ = b
  3119  	// match: (Greater8  (Const8  [c]) (Const8  [d]))
  3120  	// cond:
  3121  	// result: (ConstBool [b2i(c > d)])
  3122  	for {
  3123  		v_0 := v.Args[0]
  3124  		if v_0.Op != OpConst8 {
  3125  			break
  3126  		}
  3127  		c := v_0.AuxInt
  3128  		v_1 := v.Args[1]
  3129  		if v_1.Op != OpConst8 {
  3130  			break
  3131  		}
  3132  		d := v_1.AuxInt
  3133  		v.reset(OpConstBool)
  3134  		v.AuxInt = b2i(c > d)
  3135  		return true
  3136  	}
  3137  	return false
  3138  }
  3139  func rewriteValuegeneric_OpGreater8U(v *Value, config *Config) bool {
  3140  	b := v.Block
  3141  	_ = b
  3142  	// match: (Greater8U  (Const8  [c]) (Const8  [d]))
  3143  	// cond:
  3144  	// result: (ConstBool [b2i(uint8(c)  > uint8(d))])
  3145  	for {
  3146  		v_0 := v.Args[0]
  3147  		if v_0.Op != OpConst8 {
  3148  			break
  3149  		}
  3150  		c := v_0.AuxInt
  3151  		v_1 := v.Args[1]
  3152  		if v_1.Op != OpConst8 {
  3153  			break
  3154  		}
  3155  		d := v_1.AuxInt
  3156  		v.reset(OpConstBool)
  3157  		v.AuxInt = b2i(uint8(c) > uint8(d))
  3158  		return true
  3159  	}
  3160  	return false
  3161  }
  3162  func rewriteValuegeneric_OpIMake(v *Value, config *Config) bool {
  3163  	b := v.Block
  3164  	_ = b
  3165  	// match: (IMake typ (StructMake1 val))
  3166  	// cond:
  3167  	// result: (IMake typ val)
  3168  	for {
  3169  		typ := v.Args[0]
  3170  		v_1 := v.Args[1]
  3171  		if v_1.Op != OpStructMake1 {
  3172  			break
  3173  		}
  3174  		val := v_1.Args[0]
  3175  		v.reset(OpIMake)
  3176  		v.AddArg(typ)
  3177  		v.AddArg(val)
  3178  		return true
  3179  	}
  3180  	// match: (IMake typ (ArrayMake1 val))
  3181  	// cond:
  3182  	// result: (IMake typ val)
  3183  	for {
  3184  		typ := v.Args[0]
  3185  		v_1 := v.Args[1]
  3186  		if v_1.Op != OpArrayMake1 {
  3187  			break
  3188  		}
  3189  		val := v_1.Args[0]
  3190  		v.reset(OpIMake)
  3191  		v.AddArg(typ)
  3192  		v.AddArg(val)
  3193  		return true
  3194  	}
  3195  	return false
  3196  }
  3197  func rewriteValuegeneric_OpIsInBounds(v *Value, config *Config) bool {
  3198  	b := v.Block
  3199  	_ = b
  3200  	// match: (IsInBounds (ZeroExt8to32  _) (Const32 [c]))
  3201  	// cond: (1 << 8)  <= c
  3202  	// result: (ConstBool [1])
  3203  	for {
  3204  		v_0 := v.Args[0]
  3205  		if v_0.Op != OpZeroExt8to32 {
  3206  			break
  3207  		}
  3208  		v_1 := v.Args[1]
  3209  		if v_1.Op != OpConst32 {
  3210  			break
  3211  		}
  3212  		c := v_1.AuxInt
  3213  		if !((1 << 8) <= c) {
  3214  			break
  3215  		}
  3216  		v.reset(OpConstBool)
  3217  		v.AuxInt = 1
  3218  		return true
  3219  	}
  3220  	// match: (IsInBounds (ZeroExt8to64  _) (Const64 [c]))
  3221  	// cond: (1 << 8)  <= c
  3222  	// result: (ConstBool [1])
  3223  	for {
  3224  		v_0 := v.Args[0]
  3225  		if v_0.Op != OpZeroExt8to64 {
  3226  			break
  3227  		}
  3228  		v_1 := v.Args[1]
  3229  		if v_1.Op != OpConst64 {
  3230  			break
  3231  		}
  3232  		c := v_1.AuxInt
  3233  		if !((1 << 8) <= c) {
  3234  			break
  3235  		}
  3236  		v.reset(OpConstBool)
  3237  		v.AuxInt = 1
  3238  		return true
  3239  	}
  3240  	// match: (IsInBounds (ZeroExt16to32 _) (Const32 [c]))
  3241  	// cond: (1 << 16) <= c
  3242  	// result: (ConstBool [1])
  3243  	for {
  3244  		v_0 := v.Args[0]
  3245  		if v_0.Op != OpZeroExt16to32 {
  3246  			break
  3247  		}
  3248  		v_1 := v.Args[1]
  3249  		if v_1.Op != OpConst32 {
  3250  			break
  3251  		}
  3252  		c := v_1.AuxInt
  3253  		if !((1 << 16) <= c) {
  3254  			break
  3255  		}
  3256  		v.reset(OpConstBool)
  3257  		v.AuxInt = 1
  3258  		return true
  3259  	}
  3260  	// match: (IsInBounds (ZeroExt16to64 _) (Const64 [c]))
  3261  	// cond: (1 << 16) <= c
  3262  	// result: (ConstBool [1])
  3263  	for {
  3264  		v_0 := v.Args[0]
  3265  		if v_0.Op != OpZeroExt16to64 {
  3266  			break
  3267  		}
  3268  		v_1 := v.Args[1]
  3269  		if v_1.Op != OpConst64 {
  3270  			break
  3271  		}
  3272  		c := v_1.AuxInt
  3273  		if !((1 << 16) <= c) {
  3274  			break
  3275  		}
  3276  		v.reset(OpConstBool)
  3277  		v.AuxInt = 1
  3278  		return true
  3279  	}
  3280  	// match: (IsInBounds x x)
  3281  	// cond:
  3282  	// result: (ConstBool [0])
  3283  	for {
  3284  		x := v.Args[0]
  3285  		if x != v.Args[1] {
  3286  			break
  3287  		}
  3288  		v.reset(OpConstBool)
  3289  		v.AuxInt = 0
  3290  		return true
  3291  	}
  3292  	// match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d]))
  3293  	// cond: 0 <= c && c < d
  3294  	// result: (ConstBool [1])
  3295  	for {
  3296  		v_0 := v.Args[0]
  3297  		if v_0.Op != OpAnd32 {
  3298  			break
  3299  		}
  3300  		v_0_0 := v_0.Args[0]
  3301  		if v_0_0.Op != OpConst32 {
  3302  			break
  3303  		}
  3304  		c := v_0_0.AuxInt
  3305  		v_1 := v.Args[1]
  3306  		if v_1.Op != OpConst32 {
  3307  			break
  3308  		}
  3309  		d := v_1.AuxInt
  3310  		if !(0 <= c && c < d) {
  3311  			break
  3312  		}
  3313  		v.reset(OpConstBool)
  3314  		v.AuxInt = 1
  3315  		return true
  3316  	}
  3317  	// match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d]))
  3318  	// cond: 0 <= c && c < d
  3319  	// result: (ConstBool [1])
  3320  	for {
  3321  		v_0 := v.Args[0]
  3322  		if v_0.Op != OpAnd64 {
  3323  			break
  3324  		}
  3325  		v_0_0 := v_0.Args[0]
  3326  		if v_0_0.Op != OpConst64 {
  3327  			break
  3328  		}
  3329  		c := v_0_0.AuxInt
  3330  		v_1 := v.Args[1]
  3331  		if v_1.Op != OpConst64 {
  3332  			break
  3333  		}
  3334  		d := v_1.AuxInt
  3335  		if !(0 <= c && c < d) {
  3336  			break
  3337  		}
  3338  		v.reset(OpConstBool)
  3339  		v.AuxInt = 1
  3340  		return true
  3341  	}
  3342  	// match: (IsInBounds (Const32 [c]) (Const32 [d]))
  3343  	// cond:
  3344  	// result: (ConstBool [b2i(0 <= c && c < d)])
  3345  	for {
  3346  		v_0 := v.Args[0]
  3347  		if v_0.Op != OpConst32 {
  3348  			break
  3349  		}
  3350  		c := v_0.AuxInt
  3351  		v_1 := v.Args[1]
  3352  		if v_1.Op != OpConst32 {
  3353  			break
  3354  		}
  3355  		d := v_1.AuxInt
  3356  		v.reset(OpConstBool)
  3357  		v.AuxInt = b2i(0 <= c && c < d)
  3358  		return true
  3359  	}
  3360  	// match: (IsInBounds (Const64 [c]) (Const64 [d]))
  3361  	// cond:
  3362  	// result: (ConstBool [b2i(0 <= c && c < d)])
  3363  	for {
  3364  		v_0 := v.Args[0]
  3365  		if v_0.Op != OpConst64 {
  3366  			break
  3367  		}
  3368  		c := v_0.AuxInt
  3369  		v_1 := v.Args[1]
  3370  		if v_1.Op != OpConst64 {
  3371  			break
  3372  		}
  3373  		d := v_1.AuxInt
  3374  		v.reset(OpConstBool)
  3375  		v.AuxInt = b2i(0 <= c && c < d)
  3376  		return true
  3377  	}
  3378  	// match: (IsInBounds (Mod32u _ y) y)
  3379  	// cond:
  3380  	// result: (ConstBool [1])
  3381  	for {
  3382  		v_0 := v.Args[0]
  3383  		if v_0.Op != OpMod32u {
  3384  			break
  3385  		}
  3386  		y := v_0.Args[1]
  3387  		if y != v.Args[1] {
  3388  			break
  3389  		}
  3390  		v.reset(OpConstBool)
  3391  		v.AuxInt = 1
  3392  		return true
  3393  	}
  3394  	// match: (IsInBounds (Mod64u _ y) y)
  3395  	// cond:
  3396  	// result: (ConstBool [1])
  3397  	for {
  3398  		v_0 := v.Args[0]
  3399  		if v_0.Op != OpMod64u {
  3400  			break
  3401  		}
  3402  		y := v_0.Args[1]
  3403  		if y != v.Args[1] {
  3404  			break
  3405  		}
  3406  		v.reset(OpConstBool)
  3407  		v.AuxInt = 1
  3408  		return true
  3409  	}
  3410  	return false
  3411  }
  3412  func rewriteValuegeneric_OpIsNonNil(v *Value, config *Config) bool {
  3413  	b := v.Block
  3414  	_ = b
  3415  	// match: (IsNonNil (ConstNil))
  3416  	// cond:
  3417  	// result: (ConstBool [0])
  3418  	for {
  3419  		v_0 := v.Args[0]
  3420  		if v_0.Op != OpConstNil {
  3421  			break
  3422  		}
  3423  		v.reset(OpConstBool)
  3424  		v.AuxInt = 0
  3425  		return true
  3426  	}
  3427  	return false
  3428  }
  3429  func rewriteValuegeneric_OpIsSliceInBounds(v *Value, config *Config) bool {
  3430  	b := v.Block
  3431  	_ = b
  3432  	// match: (IsSliceInBounds x x)
  3433  	// cond:
  3434  	// result: (ConstBool [1])
  3435  	for {
  3436  		x := v.Args[0]
  3437  		if x != v.Args[1] {
  3438  			break
  3439  		}
  3440  		v.reset(OpConstBool)
  3441  		v.AuxInt = 1
  3442  		return true
  3443  	}
  3444  	// match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d]))
  3445  	// cond: 0 <= c && c <= d
  3446  	// result: (ConstBool [1])
  3447  	for {
  3448  		v_0 := v.Args[0]
  3449  		if v_0.Op != OpAnd32 {
  3450  			break
  3451  		}
  3452  		v_0_0 := v_0.Args[0]
  3453  		if v_0_0.Op != OpConst32 {
  3454  			break
  3455  		}
  3456  		c := v_0_0.AuxInt
  3457  		v_1 := v.Args[1]
  3458  		if v_1.Op != OpConst32 {
  3459  			break
  3460  		}
  3461  		d := v_1.AuxInt
  3462  		if !(0 <= c && c <= d) {
  3463  			break
  3464  		}
  3465  		v.reset(OpConstBool)
  3466  		v.AuxInt = 1
  3467  		return true
  3468  	}
  3469  	// match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d]))
  3470  	// cond: 0 <= c && c <= d
  3471  	// result: (ConstBool [1])
  3472  	for {
  3473  		v_0 := v.Args[0]
  3474  		if v_0.Op != OpAnd64 {
  3475  			break
  3476  		}
  3477  		v_0_0 := v_0.Args[0]
  3478  		if v_0_0.Op != OpConst64 {
  3479  			break
  3480  		}
  3481  		c := v_0_0.AuxInt
  3482  		v_1 := v.Args[1]
  3483  		if v_1.Op != OpConst64 {
  3484  			break
  3485  		}
  3486  		d := v_1.AuxInt
  3487  		if !(0 <= c && c <= d) {
  3488  			break
  3489  		}
  3490  		v.reset(OpConstBool)
  3491  		v.AuxInt = 1
  3492  		return true
  3493  	}
  3494  	// match: (IsSliceInBounds (Const32 [0]) _)
  3495  	// cond:
  3496  	// result: (ConstBool [1])
  3497  	for {
  3498  		v_0 := v.Args[0]
  3499  		if v_0.Op != OpConst32 {
  3500  			break
  3501  		}
  3502  		if v_0.AuxInt != 0 {
  3503  			break
  3504  		}
  3505  		v.reset(OpConstBool)
  3506  		v.AuxInt = 1
  3507  		return true
  3508  	}
  3509  	// match: (IsSliceInBounds (Const64 [0]) _)
  3510  	// cond:
  3511  	// result: (ConstBool [1])
  3512  	for {
  3513  		v_0 := v.Args[0]
  3514  		if v_0.Op != OpConst64 {
  3515  			break
  3516  		}
  3517  		if v_0.AuxInt != 0 {
  3518  			break
  3519  		}
  3520  		v.reset(OpConstBool)
  3521  		v.AuxInt = 1
  3522  		return true
  3523  	}
  3524  	// match: (IsSliceInBounds (Const32 [c]) (Const32 [d]))
  3525  	// cond:
  3526  	// result: (ConstBool [b2i(0 <= c && c <= d)])
  3527  	for {
  3528  		v_0 := v.Args[0]
  3529  		if v_0.Op != OpConst32 {
  3530  			break
  3531  		}
  3532  		c := v_0.AuxInt
  3533  		v_1 := v.Args[1]
  3534  		if v_1.Op != OpConst32 {
  3535  			break
  3536  		}
  3537  		d := v_1.AuxInt
  3538  		v.reset(OpConstBool)
  3539  		v.AuxInt = b2i(0 <= c && c <= d)
  3540  		return true
  3541  	}
  3542  	// match: (IsSliceInBounds (Const64 [c]) (Const64 [d]))
  3543  	// cond:
  3544  	// result: (ConstBool [b2i(0 <= c && c <= d)])
  3545  	for {
  3546  		v_0 := v.Args[0]
  3547  		if v_0.Op != OpConst64 {
  3548  			break
  3549  		}
  3550  		c := v_0.AuxInt
  3551  		v_1 := v.Args[1]
  3552  		if v_1.Op != OpConst64 {
  3553  			break
  3554  		}
  3555  		d := v_1.AuxInt
  3556  		v.reset(OpConstBool)
  3557  		v.AuxInt = b2i(0 <= c && c <= d)
  3558  		return true
  3559  	}
  3560  	// match: (IsSliceInBounds (SliceLen x) (SliceCap x))
  3561  	// cond:
  3562  	// result: (ConstBool [1])
  3563  	for {
  3564  		v_0 := v.Args[0]
  3565  		if v_0.Op != OpSliceLen {
  3566  			break
  3567  		}
  3568  		x := v_0.Args[0]
  3569  		v_1 := v.Args[1]
  3570  		if v_1.Op != OpSliceCap {
  3571  			break
  3572  		}
  3573  		if x != v_1.Args[0] {
  3574  			break
  3575  		}
  3576  		v.reset(OpConstBool)
  3577  		v.AuxInt = 1
  3578  		return true
  3579  	}
  3580  	return false
  3581  }
  3582  func rewriteValuegeneric_OpLeq16(v *Value, config *Config) bool {
  3583  	b := v.Block
  3584  	_ = b
  3585  	// match: (Leq16 (Const16 [c]) (Const16 [d]))
  3586  	// cond:
  3587  	// result: (ConstBool [b2i(c <= d)])
  3588  	for {
  3589  		v_0 := v.Args[0]
  3590  		if v_0.Op != OpConst16 {
  3591  			break
  3592  		}
  3593  		c := v_0.AuxInt
  3594  		v_1 := v.Args[1]
  3595  		if v_1.Op != OpConst16 {
  3596  			break
  3597  		}
  3598  		d := v_1.AuxInt
  3599  		v.reset(OpConstBool)
  3600  		v.AuxInt = b2i(c <= d)
  3601  		return true
  3602  	}
  3603  	return false
  3604  }
  3605  func rewriteValuegeneric_OpLeq16U(v *Value, config *Config) bool {
  3606  	b := v.Block
  3607  	_ = b
  3608  	// match: (Leq16U (Const16 [c]) (Const16 [d]))
  3609  	// cond:
  3610  	// result: (ConstBool [b2i(uint16(c) <= uint16(d))])
  3611  	for {
  3612  		v_0 := v.Args[0]
  3613  		if v_0.Op != OpConst16 {
  3614  			break
  3615  		}
  3616  		c := v_0.AuxInt
  3617  		v_1 := v.Args[1]
  3618  		if v_1.Op != OpConst16 {
  3619  			break
  3620  		}
  3621  		d := v_1.AuxInt
  3622  		v.reset(OpConstBool)
  3623  		v.AuxInt = b2i(uint16(c) <= uint16(d))
  3624  		return true
  3625  	}
  3626  	return false
  3627  }
  3628  func rewriteValuegeneric_OpLeq32(v *Value, config *Config) bool {
  3629  	b := v.Block
  3630  	_ = b
  3631  	// match: (Leq32 (Const32 [c]) (Const32 [d]))
  3632  	// cond:
  3633  	// result: (ConstBool [b2i(c <= d)])
  3634  	for {
  3635  		v_0 := v.Args[0]
  3636  		if v_0.Op != OpConst32 {
  3637  			break
  3638  		}
  3639  		c := v_0.AuxInt
  3640  		v_1 := v.Args[1]
  3641  		if v_1.Op != OpConst32 {
  3642  			break
  3643  		}
  3644  		d := v_1.AuxInt
  3645  		v.reset(OpConstBool)
  3646  		v.AuxInt = b2i(c <= d)
  3647  		return true
  3648  	}
  3649  	return false
  3650  }
  3651  func rewriteValuegeneric_OpLeq32U(v *Value, config *Config) bool {
  3652  	b := v.Block
  3653  	_ = b
  3654  	// match: (Leq32U (Const32 [c]) (Const32 [d]))
  3655  	// cond:
  3656  	// result: (ConstBool [b2i(uint32(c) <= uint32(d))])
  3657  	for {
  3658  		v_0 := v.Args[0]
  3659  		if v_0.Op != OpConst32 {
  3660  			break
  3661  		}
  3662  		c := v_0.AuxInt
  3663  		v_1 := v.Args[1]
  3664  		if v_1.Op != OpConst32 {
  3665  			break
  3666  		}
  3667  		d := v_1.AuxInt
  3668  		v.reset(OpConstBool)
  3669  		v.AuxInt = b2i(uint32(c) <= uint32(d))
  3670  		return true
  3671  	}
  3672  	return false
  3673  }
  3674  func rewriteValuegeneric_OpLeq64(v *Value, config *Config) bool {
  3675  	b := v.Block
  3676  	_ = b
  3677  	// match: (Leq64 (Const64 [c]) (Const64 [d]))
  3678  	// cond:
  3679  	// result: (ConstBool [b2i(c <= d)])
  3680  	for {
  3681  		v_0 := v.Args[0]
  3682  		if v_0.Op != OpConst64 {
  3683  			break
  3684  		}
  3685  		c := v_0.AuxInt
  3686  		v_1 := v.Args[1]
  3687  		if v_1.Op != OpConst64 {
  3688  			break
  3689  		}
  3690  		d := v_1.AuxInt
  3691  		v.reset(OpConstBool)
  3692  		v.AuxInt = b2i(c <= d)
  3693  		return true
  3694  	}
  3695  	return false
  3696  }
  3697  func rewriteValuegeneric_OpLeq64U(v *Value, config *Config) bool {
  3698  	b := v.Block
  3699  	_ = b
  3700  	// match: (Leq64U (Const64 [c]) (Const64 [d]))
  3701  	// cond:
  3702  	// result: (ConstBool [b2i(uint64(c) <= uint64(d))])
  3703  	for {
  3704  		v_0 := v.Args[0]
  3705  		if v_0.Op != OpConst64 {
  3706  			break
  3707  		}
  3708  		c := v_0.AuxInt
  3709  		v_1 := v.Args[1]
  3710  		if v_1.Op != OpConst64 {
  3711  			break
  3712  		}
  3713  		d := v_1.AuxInt
  3714  		v.reset(OpConstBool)
  3715  		v.AuxInt = b2i(uint64(c) <= uint64(d))
  3716  		return true
  3717  	}
  3718  	return false
  3719  }
  3720  func rewriteValuegeneric_OpLeq8(v *Value, config *Config) bool {
  3721  	b := v.Block
  3722  	_ = b
  3723  	// match: (Leq8  (Const8  [c]) (Const8  [d]))
  3724  	// cond:
  3725  	// result: (ConstBool [b2i(c <= d)])
  3726  	for {
  3727  		v_0 := v.Args[0]
  3728  		if v_0.Op != OpConst8 {
  3729  			break
  3730  		}
  3731  		c := v_0.AuxInt
  3732  		v_1 := v.Args[1]
  3733  		if v_1.Op != OpConst8 {
  3734  			break
  3735  		}
  3736  		d := v_1.AuxInt
  3737  		v.reset(OpConstBool)
  3738  		v.AuxInt = b2i(c <= d)
  3739  		return true
  3740  	}
  3741  	return false
  3742  }
  3743  func rewriteValuegeneric_OpLeq8U(v *Value, config *Config) bool {
  3744  	b := v.Block
  3745  	_ = b
  3746  	// match: (Leq8U  (Const8  [c]) (Const8  [d]))
  3747  	// cond:
  3748  	// result: (ConstBool [b2i(uint8(c)  <= uint8(d))])
  3749  	for {
  3750  		v_0 := v.Args[0]
  3751  		if v_0.Op != OpConst8 {
  3752  			break
  3753  		}
  3754  		c := v_0.AuxInt
  3755  		v_1 := v.Args[1]
  3756  		if v_1.Op != OpConst8 {
  3757  			break
  3758  		}
  3759  		d := v_1.AuxInt
  3760  		v.reset(OpConstBool)
  3761  		v.AuxInt = b2i(uint8(c) <= uint8(d))
  3762  		return true
  3763  	}
  3764  	return false
  3765  }
  3766  func rewriteValuegeneric_OpLess16(v *Value, config *Config) bool {
  3767  	b := v.Block
  3768  	_ = b
  3769  	// match: (Less16 (Const16 [c]) (Const16 [d]))
  3770  	// cond:
  3771  	// result: (ConstBool [b2i(c < d)])
  3772  	for {
  3773  		v_0 := v.Args[0]
  3774  		if v_0.Op != OpConst16 {
  3775  			break
  3776  		}
  3777  		c := v_0.AuxInt
  3778  		v_1 := v.Args[1]
  3779  		if v_1.Op != OpConst16 {
  3780  			break
  3781  		}
  3782  		d := v_1.AuxInt
  3783  		v.reset(OpConstBool)
  3784  		v.AuxInt = b2i(c < d)
  3785  		return true
  3786  	}
  3787  	return false
  3788  }
  3789  func rewriteValuegeneric_OpLess16U(v *Value, config *Config) bool {
  3790  	b := v.Block
  3791  	_ = b
  3792  	// match: (Less16U (Const16 [c]) (Const16 [d]))
  3793  	// cond:
  3794  	// result: (ConstBool [b2i(uint16(c) < uint16(d))])
  3795  	for {
  3796  		v_0 := v.Args[0]
  3797  		if v_0.Op != OpConst16 {
  3798  			break
  3799  		}
  3800  		c := v_0.AuxInt
  3801  		v_1 := v.Args[1]
  3802  		if v_1.Op != OpConst16 {
  3803  			break
  3804  		}
  3805  		d := v_1.AuxInt
  3806  		v.reset(OpConstBool)
  3807  		v.AuxInt = b2i(uint16(c) < uint16(d))
  3808  		return true
  3809  	}
  3810  	return false
  3811  }
  3812  func rewriteValuegeneric_OpLess32(v *Value, config *Config) bool {
  3813  	b := v.Block
  3814  	_ = b
  3815  	// match: (Less32 (Const32 [c]) (Const32 [d]))
  3816  	// cond:
  3817  	// result: (ConstBool [b2i(c < d)])
  3818  	for {
  3819  		v_0 := v.Args[0]
  3820  		if v_0.Op != OpConst32 {
  3821  			break
  3822  		}
  3823  		c := v_0.AuxInt
  3824  		v_1 := v.Args[1]
  3825  		if v_1.Op != OpConst32 {
  3826  			break
  3827  		}
  3828  		d := v_1.AuxInt
  3829  		v.reset(OpConstBool)
  3830  		v.AuxInt = b2i(c < d)
  3831  		return true
  3832  	}
  3833  	return false
  3834  }
  3835  func rewriteValuegeneric_OpLess32U(v *Value, config *Config) bool {
  3836  	b := v.Block
  3837  	_ = b
  3838  	// match: (Less32U (Const32 [c]) (Const32 [d]))
  3839  	// cond:
  3840  	// result: (ConstBool [b2i(uint32(c) < uint32(d))])
  3841  	for {
  3842  		v_0 := v.Args[0]
  3843  		if v_0.Op != OpConst32 {
  3844  			break
  3845  		}
  3846  		c := v_0.AuxInt
  3847  		v_1 := v.Args[1]
  3848  		if v_1.Op != OpConst32 {
  3849  			break
  3850  		}
  3851  		d := v_1.AuxInt
  3852  		v.reset(OpConstBool)
  3853  		v.AuxInt = b2i(uint32(c) < uint32(d))
  3854  		return true
  3855  	}
  3856  	return false
  3857  }
  3858  func rewriteValuegeneric_OpLess64(v *Value, config *Config) bool {
  3859  	b := v.Block
  3860  	_ = b
  3861  	// match: (Less64 (Const64 [c]) (Const64 [d]))
  3862  	// cond:
  3863  	// result: (ConstBool [b2i(c < d)])
  3864  	for {
  3865  		v_0 := v.Args[0]
  3866  		if v_0.Op != OpConst64 {
  3867  			break
  3868  		}
  3869  		c := v_0.AuxInt
  3870  		v_1 := v.Args[1]
  3871  		if v_1.Op != OpConst64 {
  3872  			break
  3873  		}
  3874  		d := v_1.AuxInt
  3875  		v.reset(OpConstBool)
  3876  		v.AuxInt = b2i(c < d)
  3877  		return true
  3878  	}
  3879  	return false
  3880  }
  3881  func rewriteValuegeneric_OpLess64U(v *Value, config *Config) bool {
  3882  	b := v.Block
  3883  	_ = b
  3884  	// match: (Less64U (Const64 [c]) (Const64 [d]))
  3885  	// cond:
  3886  	// result: (ConstBool [b2i(uint64(c) < uint64(d))])
  3887  	for {
  3888  		v_0 := v.Args[0]
  3889  		if v_0.Op != OpConst64 {
  3890  			break
  3891  		}
  3892  		c := v_0.AuxInt
  3893  		v_1 := v.Args[1]
  3894  		if v_1.Op != OpConst64 {
  3895  			break
  3896  		}
  3897  		d := v_1.AuxInt
  3898  		v.reset(OpConstBool)
  3899  		v.AuxInt = b2i(uint64(c) < uint64(d))
  3900  		return true
  3901  	}
  3902  	return false
  3903  }
  3904  func rewriteValuegeneric_OpLess8(v *Value, config *Config) bool {
  3905  	b := v.Block
  3906  	_ = b
  3907  	// match: (Less8  (Const8  [c]) (Const8  [d]))
  3908  	// cond:
  3909  	// result: (ConstBool [b2i(c < d)])
  3910  	for {
  3911  		v_0 := v.Args[0]
  3912  		if v_0.Op != OpConst8 {
  3913  			break
  3914  		}
  3915  		c := v_0.AuxInt
  3916  		v_1 := v.Args[1]
  3917  		if v_1.Op != OpConst8 {
  3918  			break
  3919  		}
  3920  		d := v_1.AuxInt
  3921  		v.reset(OpConstBool)
  3922  		v.AuxInt = b2i(c < d)
  3923  		return true
  3924  	}
  3925  	return false
  3926  }
  3927  func rewriteValuegeneric_OpLess8U(v *Value, config *Config) bool {
  3928  	b := v.Block
  3929  	_ = b
  3930  	// match: (Less8U  (Const8  [c]) (Const8  [d]))
  3931  	// cond:
  3932  	// result: (ConstBool [b2i(uint8(c)  < uint8(d))])
  3933  	for {
  3934  		v_0 := v.Args[0]
  3935  		if v_0.Op != OpConst8 {
  3936  			break
  3937  		}
  3938  		c := v_0.AuxInt
  3939  		v_1 := v.Args[1]
  3940  		if v_1.Op != OpConst8 {
  3941  			break
  3942  		}
  3943  		d := v_1.AuxInt
  3944  		v.reset(OpConstBool)
  3945  		v.AuxInt = b2i(uint8(c) < uint8(d))
  3946  		return true
  3947  	}
  3948  	return false
  3949  }
  3950  func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool {
  3951  	b := v.Block
  3952  	_ = b
  3953  	// match: (Load <t1> p1 (Store [w] p2 x _))
  3954  	// cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size()
  3955  	// result: x
  3956  	for {
  3957  		t1 := v.Type
  3958  		p1 := v.Args[0]
  3959  		v_1 := v.Args[1]
  3960  		if v_1.Op != OpStore {
  3961  			break
  3962  		}
  3963  		w := v_1.AuxInt
  3964  		p2 := v_1.Args[0]
  3965  		x := v_1.Args[1]
  3966  		if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && w == t1.Size()) {
  3967  			break
  3968  		}
  3969  		v.reset(OpCopy)
  3970  		v.Type = x.Type
  3971  		v.AddArg(x)
  3972  		return true
  3973  	}
  3974  	// match: (Load <t> _ _)
  3975  	// cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)
  3976  	// result: (StructMake0)
  3977  	for {
  3978  		t := v.Type
  3979  		if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) {
  3980  			break
  3981  		}
  3982  		v.reset(OpStructMake0)
  3983  		return true
  3984  	}
  3985  	// match: (Load <t> ptr mem)
  3986  	// cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)
  3987  	// result: (StructMake1     (Load <t.FieldType(0)> ptr mem))
  3988  	for {
  3989  		t := v.Type
  3990  		ptr := v.Args[0]
  3991  		mem := v.Args[1]
  3992  		if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) {
  3993  			break
  3994  		}
  3995  		v.reset(OpStructMake1)
  3996  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
  3997  		v0.AddArg(ptr)
  3998  		v0.AddArg(mem)
  3999  		v.AddArg(v0)
  4000  		return true
  4001  	}
  4002  	// match: (Load <t> ptr mem)
  4003  	// cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)
  4004  	// result: (StructMake2     (Load <t.FieldType(0)> ptr mem)     (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem))
  4005  	for {
  4006  		t := v.Type
  4007  		ptr := v.Args[0]
  4008  		mem := v.Args[1]
  4009  		if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) {
  4010  			break
  4011  		}
  4012  		v.reset(OpStructMake2)
  4013  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
  4014  		v0.AddArg(ptr)
  4015  		v0.AddArg(mem)
  4016  		v.AddArg(v0)
  4017  		v1 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
  4018  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
  4019  		v2.AuxInt = t.FieldOff(1)
  4020  		v2.AddArg(ptr)
  4021  		v1.AddArg(v2)
  4022  		v1.AddArg(mem)
  4023  		v.AddArg(v1)
  4024  		return true
  4025  	}
  4026  	// match: (Load <t> ptr mem)
  4027  	// cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)
  4028  	// result: (StructMake3     (Load <t.FieldType(0)> ptr mem)     (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)     (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem))
  4029  	for {
  4030  		t := v.Type
  4031  		ptr := v.Args[0]
  4032  		mem := v.Args[1]
  4033  		if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) {
  4034  			break
  4035  		}
  4036  		v.reset(OpStructMake3)
  4037  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
  4038  		v0.AddArg(ptr)
  4039  		v0.AddArg(mem)
  4040  		v.AddArg(v0)
  4041  		v1 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
  4042  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
  4043  		v2.AuxInt = t.FieldOff(1)
  4044  		v2.AddArg(ptr)
  4045  		v1.AddArg(v2)
  4046  		v1.AddArg(mem)
  4047  		v.AddArg(v1)
  4048  		v3 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
  4049  		v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
  4050  		v4.AuxInt = t.FieldOff(2)
  4051  		v4.AddArg(ptr)
  4052  		v3.AddArg(v4)
  4053  		v3.AddArg(mem)
  4054  		v.AddArg(v3)
  4055  		return true
  4056  	}
  4057  	// match: (Load <t> ptr mem)
  4058  	// cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)
  4059  	// result: (StructMake4     (Load <t.FieldType(0)> ptr mem)     (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)     (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)     (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem))
  4060  	for {
  4061  		t := v.Type
  4062  		ptr := v.Args[0]
  4063  		mem := v.Args[1]
  4064  		if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) {
  4065  			break
  4066  		}
  4067  		v.reset(OpStructMake4)
  4068  		v0 := b.NewValue0(v.Pos, OpLoad, t.FieldType(0))
  4069  		v0.AddArg(ptr)
  4070  		v0.AddArg(mem)
  4071  		v.AddArg(v0)
  4072  		v1 := b.NewValue0(v.Pos, OpLoad, t.FieldType(1))
  4073  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
  4074  		v2.AuxInt = t.FieldOff(1)
  4075  		v2.AddArg(ptr)
  4076  		v1.AddArg(v2)
  4077  		v1.AddArg(mem)
  4078  		v.AddArg(v1)
  4079  		v3 := b.NewValue0(v.Pos, OpLoad, t.FieldType(2))
  4080  		v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
  4081  		v4.AuxInt = t.FieldOff(2)
  4082  		v4.AddArg(ptr)
  4083  		v3.AddArg(v4)
  4084  		v3.AddArg(mem)
  4085  		v.AddArg(v3)
  4086  		v5 := b.NewValue0(v.Pos, OpLoad, t.FieldType(3))
  4087  		v6 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
  4088  		v6.AuxInt = t.FieldOff(3)
  4089  		v6.AddArg(ptr)
  4090  		v5.AddArg(v6)
  4091  		v5.AddArg(mem)
  4092  		v.AddArg(v5)
  4093  		return true
  4094  	}
  4095  	// match: (Load <t> _ _)
  4096  	// cond: t.IsArray() && t.NumElem() == 0
  4097  	// result: (ArrayMake0)
  4098  	for {
  4099  		t := v.Type
  4100  		if !(t.IsArray() && t.NumElem() == 0) {
  4101  			break
  4102  		}
  4103  		v.reset(OpArrayMake0)
  4104  		return true
  4105  	}
  4106  	// match: (Load <t> ptr mem)
  4107  	// cond: t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)
  4108  	// result: (ArrayMake1 (Load <t.ElemType()> ptr mem))
  4109  	for {
  4110  		t := v.Type
  4111  		ptr := v.Args[0]
  4112  		mem := v.Args[1]
  4113  		if !(t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)) {
  4114  			break
  4115  		}
  4116  		v.reset(OpArrayMake1)
  4117  		v0 := b.NewValue0(v.Pos, OpLoad, t.ElemType())
  4118  		v0.AddArg(ptr)
  4119  		v0.AddArg(mem)
  4120  		v.AddArg(v0)
  4121  		return true
  4122  	}
  4123  	return false
  4124  }
  4125  func rewriteValuegeneric_OpLsh16x16(v *Value, config *Config) bool {
  4126  	b := v.Block
  4127  	_ = b
  4128  	// match: (Lsh16x16  <t> x (Const16 [c]))
  4129  	// cond:
  4130  	// result: (Lsh16x64  x (Const64 <t> [int64(uint16(c))]))
  4131  	for {
  4132  		t := v.Type
  4133  		x := v.Args[0]
  4134  		v_1 := v.Args[1]
  4135  		if v_1.Op != OpConst16 {
  4136  			break
  4137  		}
  4138  		c := v_1.AuxInt
  4139  		v.reset(OpLsh16x64)
  4140  		v.AddArg(x)
  4141  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4142  		v0.AuxInt = int64(uint16(c))
  4143  		v.AddArg(v0)
  4144  		return true
  4145  	}
  4146  	// match: (Lsh16x16  (Const16 [0]) _)
  4147  	// cond:
  4148  	// result: (Const16 [0])
  4149  	for {
  4150  		v_0 := v.Args[0]
  4151  		if v_0.Op != OpConst16 {
  4152  			break
  4153  		}
  4154  		if v_0.AuxInt != 0 {
  4155  			break
  4156  		}
  4157  		v.reset(OpConst16)
  4158  		v.AuxInt = 0
  4159  		return true
  4160  	}
  4161  	return false
  4162  }
  4163  func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool {
  4164  	b := v.Block
  4165  	_ = b
  4166  	// match: (Lsh16x32  <t> x (Const32 [c]))
  4167  	// cond:
  4168  	// result: (Lsh16x64  x (Const64 <t> [int64(uint32(c))]))
  4169  	for {
  4170  		t := v.Type
  4171  		x := v.Args[0]
  4172  		v_1 := v.Args[1]
  4173  		if v_1.Op != OpConst32 {
  4174  			break
  4175  		}
  4176  		c := v_1.AuxInt
  4177  		v.reset(OpLsh16x64)
  4178  		v.AddArg(x)
  4179  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4180  		v0.AuxInt = int64(uint32(c))
  4181  		v.AddArg(v0)
  4182  		return true
  4183  	}
  4184  	// match: (Lsh16x32  (Const16 [0]) _)
  4185  	// cond:
  4186  	// result: (Const16 [0])
  4187  	for {
  4188  		v_0 := v.Args[0]
  4189  		if v_0.Op != OpConst16 {
  4190  			break
  4191  		}
  4192  		if v_0.AuxInt != 0 {
  4193  			break
  4194  		}
  4195  		v.reset(OpConst16)
  4196  		v.AuxInt = 0
  4197  		return true
  4198  	}
  4199  	return false
  4200  }
  4201  func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool {
  4202  	b := v.Block
  4203  	_ = b
  4204  	// match: (Lsh16x64  (Const16 [c]) (Const64 [d]))
  4205  	// cond:
  4206  	// result: (Const16 [int64(int16(c) << uint64(d))])
  4207  	for {
  4208  		v_0 := v.Args[0]
  4209  		if v_0.Op != OpConst16 {
  4210  			break
  4211  		}
  4212  		c := v_0.AuxInt
  4213  		v_1 := v.Args[1]
  4214  		if v_1.Op != OpConst64 {
  4215  			break
  4216  		}
  4217  		d := v_1.AuxInt
  4218  		v.reset(OpConst16)
  4219  		v.AuxInt = int64(int16(c) << uint64(d))
  4220  		return true
  4221  	}
  4222  	// match: (Lsh16x64  x (Const64 [0]))
  4223  	// cond:
  4224  	// result: x
  4225  	for {
  4226  		x := v.Args[0]
  4227  		v_1 := v.Args[1]
  4228  		if v_1.Op != OpConst64 {
  4229  			break
  4230  		}
  4231  		if v_1.AuxInt != 0 {
  4232  			break
  4233  		}
  4234  		v.reset(OpCopy)
  4235  		v.Type = x.Type
  4236  		v.AddArg(x)
  4237  		return true
  4238  	}
  4239  	// match: (Lsh16x64  (Const16 [0]) _)
  4240  	// cond:
  4241  	// result: (Const16 [0])
  4242  	for {
  4243  		v_0 := v.Args[0]
  4244  		if v_0.Op != OpConst16 {
  4245  			break
  4246  		}
  4247  		if v_0.AuxInt != 0 {
  4248  			break
  4249  		}
  4250  		v.reset(OpConst16)
  4251  		v.AuxInt = 0
  4252  		return true
  4253  	}
  4254  	// match: (Lsh16x64  _ (Const64 [c]))
  4255  	// cond: uint64(c) >= 16
  4256  	// result: (Const16 [0])
  4257  	for {
  4258  		v_1 := v.Args[1]
  4259  		if v_1.Op != OpConst64 {
  4260  			break
  4261  		}
  4262  		c := v_1.AuxInt
  4263  		if !(uint64(c) >= 16) {
  4264  			break
  4265  		}
  4266  		v.reset(OpConst16)
  4267  		v.AuxInt = 0
  4268  		return true
  4269  	}
  4270  	// match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d]))
  4271  	// cond: !uaddOvf(c,d)
  4272  	// result: (Lsh16x64 x (Const64 <t> [c+d]))
  4273  	for {
  4274  		t := v.Type
  4275  		v_0 := v.Args[0]
  4276  		if v_0.Op != OpLsh16x64 {
  4277  			break
  4278  		}
  4279  		x := v_0.Args[0]
  4280  		v_0_1 := v_0.Args[1]
  4281  		if v_0_1.Op != OpConst64 {
  4282  			break
  4283  		}
  4284  		c := v_0_1.AuxInt
  4285  		v_1 := v.Args[1]
  4286  		if v_1.Op != OpConst64 {
  4287  			break
  4288  		}
  4289  		d := v_1.AuxInt
  4290  		if !(!uaddOvf(c, d)) {
  4291  			break
  4292  		}
  4293  		v.reset(OpLsh16x64)
  4294  		v.AddArg(x)
  4295  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4296  		v0.AuxInt = c + d
  4297  		v.AddArg(v0)
  4298  		return true
  4299  	}
  4300  	// match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  4301  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  4302  	// result: (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  4303  	for {
  4304  		v_0 := v.Args[0]
  4305  		if v_0.Op != OpRsh16Ux64 {
  4306  			break
  4307  		}
  4308  		v_0_0 := v_0.Args[0]
  4309  		if v_0_0.Op != OpLsh16x64 {
  4310  			break
  4311  		}
  4312  		x := v_0_0.Args[0]
  4313  		v_0_0_1 := v_0_0.Args[1]
  4314  		if v_0_0_1.Op != OpConst64 {
  4315  			break
  4316  		}
  4317  		c1 := v_0_0_1.AuxInt
  4318  		v_0_1 := v_0.Args[1]
  4319  		if v_0_1.Op != OpConst64 {
  4320  			break
  4321  		}
  4322  		c2 := v_0_1.AuxInt
  4323  		v_1 := v.Args[1]
  4324  		if v_1.Op != OpConst64 {
  4325  			break
  4326  		}
  4327  		c3 := v_1.AuxInt
  4328  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  4329  			break
  4330  		}
  4331  		v.reset(OpLsh16x64)
  4332  		v.AddArg(x)
  4333  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  4334  		v0.AuxInt = c1 - c2 + c3
  4335  		v.AddArg(v0)
  4336  		return true
  4337  	}
  4338  	return false
  4339  }
  4340  func rewriteValuegeneric_OpLsh16x8(v *Value, config *Config) bool {
  4341  	b := v.Block
  4342  	_ = b
  4343  	// match: (Lsh16x8   <t> x (Const8  [c]))
  4344  	// cond:
  4345  	// result: (Lsh16x64  x (Const64 <t> [int64(uint8(c))]))
  4346  	for {
  4347  		t := v.Type
  4348  		x := v.Args[0]
  4349  		v_1 := v.Args[1]
  4350  		if v_1.Op != OpConst8 {
  4351  			break
  4352  		}
  4353  		c := v_1.AuxInt
  4354  		v.reset(OpLsh16x64)
  4355  		v.AddArg(x)
  4356  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4357  		v0.AuxInt = int64(uint8(c))
  4358  		v.AddArg(v0)
  4359  		return true
  4360  	}
  4361  	// match: (Lsh16x8  (Const16 [0]) _)
  4362  	// cond:
  4363  	// result: (Const16 [0])
  4364  	for {
  4365  		v_0 := v.Args[0]
  4366  		if v_0.Op != OpConst16 {
  4367  			break
  4368  		}
  4369  		if v_0.AuxInt != 0 {
  4370  			break
  4371  		}
  4372  		v.reset(OpConst16)
  4373  		v.AuxInt = 0
  4374  		return true
  4375  	}
  4376  	return false
  4377  }
  4378  func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool {
  4379  	b := v.Block
  4380  	_ = b
  4381  	// match: (Lsh32x16  <t> x (Const16 [c]))
  4382  	// cond:
  4383  	// result: (Lsh32x64  x (Const64 <t> [int64(uint16(c))]))
  4384  	for {
  4385  		t := v.Type
  4386  		x := v.Args[0]
  4387  		v_1 := v.Args[1]
  4388  		if v_1.Op != OpConst16 {
  4389  			break
  4390  		}
  4391  		c := v_1.AuxInt
  4392  		v.reset(OpLsh32x64)
  4393  		v.AddArg(x)
  4394  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4395  		v0.AuxInt = int64(uint16(c))
  4396  		v.AddArg(v0)
  4397  		return true
  4398  	}
  4399  	// match: (Lsh32x16  (Const32 [0]) _)
  4400  	// cond:
  4401  	// result: (Const32 [0])
  4402  	for {
  4403  		v_0 := v.Args[0]
  4404  		if v_0.Op != OpConst32 {
  4405  			break
  4406  		}
  4407  		if v_0.AuxInt != 0 {
  4408  			break
  4409  		}
  4410  		v.reset(OpConst32)
  4411  		v.AuxInt = 0
  4412  		return true
  4413  	}
  4414  	return false
  4415  }
  4416  func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool {
  4417  	b := v.Block
  4418  	_ = b
  4419  	// match: (Lsh32x32  <t> x (Const32 [c]))
  4420  	// cond:
  4421  	// result: (Lsh32x64  x (Const64 <t> [int64(uint32(c))]))
  4422  	for {
  4423  		t := v.Type
  4424  		x := v.Args[0]
  4425  		v_1 := v.Args[1]
  4426  		if v_1.Op != OpConst32 {
  4427  			break
  4428  		}
  4429  		c := v_1.AuxInt
  4430  		v.reset(OpLsh32x64)
  4431  		v.AddArg(x)
  4432  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4433  		v0.AuxInt = int64(uint32(c))
  4434  		v.AddArg(v0)
  4435  		return true
  4436  	}
  4437  	// match: (Lsh32x32  (Const32 [0]) _)
  4438  	// cond:
  4439  	// result: (Const32 [0])
  4440  	for {
  4441  		v_0 := v.Args[0]
  4442  		if v_0.Op != OpConst32 {
  4443  			break
  4444  		}
  4445  		if v_0.AuxInt != 0 {
  4446  			break
  4447  		}
  4448  		v.reset(OpConst32)
  4449  		v.AuxInt = 0
  4450  		return true
  4451  	}
  4452  	return false
  4453  }
  4454  func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool {
  4455  	b := v.Block
  4456  	_ = b
  4457  	// match: (Lsh32x64  (Const32 [c]) (Const64 [d]))
  4458  	// cond:
  4459  	// result: (Const32 [int64(int32(c) << uint64(d))])
  4460  	for {
  4461  		v_0 := v.Args[0]
  4462  		if v_0.Op != OpConst32 {
  4463  			break
  4464  		}
  4465  		c := v_0.AuxInt
  4466  		v_1 := v.Args[1]
  4467  		if v_1.Op != OpConst64 {
  4468  			break
  4469  		}
  4470  		d := v_1.AuxInt
  4471  		v.reset(OpConst32)
  4472  		v.AuxInt = int64(int32(c) << uint64(d))
  4473  		return true
  4474  	}
  4475  	// match: (Lsh32x64  x (Const64 [0]))
  4476  	// cond:
  4477  	// result: x
  4478  	for {
  4479  		x := v.Args[0]
  4480  		v_1 := v.Args[1]
  4481  		if v_1.Op != OpConst64 {
  4482  			break
  4483  		}
  4484  		if v_1.AuxInt != 0 {
  4485  			break
  4486  		}
  4487  		v.reset(OpCopy)
  4488  		v.Type = x.Type
  4489  		v.AddArg(x)
  4490  		return true
  4491  	}
  4492  	// match: (Lsh32x64  (Const32 [0]) _)
  4493  	// cond:
  4494  	// result: (Const32 [0])
  4495  	for {
  4496  		v_0 := v.Args[0]
  4497  		if v_0.Op != OpConst32 {
  4498  			break
  4499  		}
  4500  		if v_0.AuxInt != 0 {
  4501  			break
  4502  		}
  4503  		v.reset(OpConst32)
  4504  		v.AuxInt = 0
  4505  		return true
  4506  	}
  4507  	// match: (Lsh32x64  _ (Const64 [c]))
  4508  	// cond: uint64(c) >= 32
  4509  	// result: (Const32 [0])
  4510  	for {
  4511  		v_1 := v.Args[1]
  4512  		if v_1.Op != OpConst64 {
  4513  			break
  4514  		}
  4515  		c := v_1.AuxInt
  4516  		if !(uint64(c) >= 32) {
  4517  			break
  4518  		}
  4519  		v.reset(OpConst32)
  4520  		v.AuxInt = 0
  4521  		return true
  4522  	}
  4523  	// match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d]))
  4524  	// cond: !uaddOvf(c,d)
  4525  	// result: (Lsh32x64 x (Const64 <t> [c+d]))
  4526  	for {
  4527  		t := v.Type
  4528  		v_0 := v.Args[0]
  4529  		if v_0.Op != OpLsh32x64 {
  4530  			break
  4531  		}
  4532  		x := v_0.Args[0]
  4533  		v_0_1 := v_0.Args[1]
  4534  		if v_0_1.Op != OpConst64 {
  4535  			break
  4536  		}
  4537  		c := v_0_1.AuxInt
  4538  		v_1 := v.Args[1]
  4539  		if v_1.Op != OpConst64 {
  4540  			break
  4541  		}
  4542  		d := v_1.AuxInt
  4543  		if !(!uaddOvf(c, d)) {
  4544  			break
  4545  		}
  4546  		v.reset(OpLsh32x64)
  4547  		v.AddArg(x)
  4548  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4549  		v0.AuxInt = c + d
  4550  		v.AddArg(v0)
  4551  		return true
  4552  	}
  4553  	// match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  4554  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  4555  	// result: (Lsh32x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  4556  	for {
  4557  		v_0 := v.Args[0]
  4558  		if v_0.Op != OpRsh32Ux64 {
  4559  			break
  4560  		}
  4561  		v_0_0 := v_0.Args[0]
  4562  		if v_0_0.Op != OpLsh32x64 {
  4563  			break
  4564  		}
  4565  		x := v_0_0.Args[0]
  4566  		v_0_0_1 := v_0_0.Args[1]
  4567  		if v_0_0_1.Op != OpConst64 {
  4568  			break
  4569  		}
  4570  		c1 := v_0_0_1.AuxInt
  4571  		v_0_1 := v_0.Args[1]
  4572  		if v_0_1.Op != OpConst64 {
  4573  			break
  4574  		}
  4575  		c2 := v_0_1.AuxInt
  4576  		v_1 := v.Args[1]
  4577  		if v_1.Op != OpConst64 {
  4578  			break
  4579  		}
  4580  		c3 := v_1.AuxInt
  4581  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  4582  			break
  4583  		}
  4584  		v.reset(OpLsh32x64)
  4585  		v.AddArg(x)
  4586  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  4587  		v0.AuxInt = c1 - c2 + c3
  4588  		v.AddArg(v0)
  4589  		return true
  4590  	}
  4591  	return false
  4592  }
  4593  func rewriteValuegeneric_OpLsh32x8(v *Value, config *Config) bool {
  4594  	b := v.Block
  4595  	_ = b
  4596  	// match: (Lsh32x8   <t> x (Const8  [c]))
  4597  	// cond:
  4598  	// result: (Lsh32x64  x (Const64 <t> [int64(uint8(c))]))
  4599  	for {
  4600  		t := v.Type
  4601  		x := v.Args[0]
  4602  		v_1 := v.Args[1]
  4603  		if v_1.Op != OpConst8 {
  4604  			break
  4605  		}
  4606  		c := v_1.AuxInt
  4607  		v.reset(OpLsh32x64)
  4608  		v.AddArg(x)
  4609  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4610  		v0.AuxInt = int64(uint8(c))
  4611  		v.AddArg(v0)
  4612  		return true
  4613  	}
  4614  	// match: (Lsh32x8  (Const32 [0]) _)
  4615  	// cond:
  4616  	// result: (Const32 [0])
  4617  	for {
  4618  		v_0 := v.Args[0]
  4619  		if v_0.Op != OpConst32 {
  4620  			break
  4621  		}
  4622  		if v_0.AuxInt != 0 {
  4623  			break
  4624  		}
  4625  		v.reset(OpConst32)
  4626  		v.AuxInt = 0
  4627  		return true
  4628  	}
  4629  	return false
  4630  }
  4631  func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool {
  4632  	b := v.Block
  4633  	_ = b
  4634  	// match: (Lsh64x16  <t> x (Const16 [c]))
  4635  	// cond:
  4636  	// result: (Lsh64x64  x (Const64 <t> [int64(uint16(c))]))
  4637  	for {
  4638  		t := v.Type
  4639  		x := v.Args[0]
  4640  		v_1 := v.Args[1]
  4641  		if v_1.Op != OpConst16 {
  4642  			break
  4643  		}
  4644  		c := v_1.AuxInt
  4645  		v.reset(OpLsh64x64)
  4646  		v.AddArg(x)
  4647  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4648  		v0.AuxInt = int64(uint16(c))
  4649  		v.AddArg(v0)
  4650  		return true
  4651  	}
  4652  	// match: (Lsh64x16  (Const64 [0]) _)
  4653  	// cond:
  4654  	// result: (Const64 [0])
  4655  	for {
  4656  		v_0 := v.Args[0]
  4657  		if v_0.Op != OpConst64 {
  4658  			break
  4659  		}
  4660  		if v_0.AuxInt != 0 {
  4661  			break
  4662  		}
  4663  		v.reset(OpConst64)
  4664  		v.AuxInt = 0
  4665  		return true
  4666  	}
  4667  	return false
  4668  }
  4669  func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool {
  4670  	b := v.Block
  4671  	_ = b
  4672  	// match: (Lsh64x32  <t> x (Const32 [c]))
  4673  	// cond:
  4674  	// result: (Lsh64x64  x (Const64 <t> [int64(uint32(c))]))
  4675  	for {
  4676  		t := v.Type
  4677  		x := v.Args[0]
  4678  		v_1 := v.Args[1]
  4679  		if v_1.Op != OpConst32 {
  4680  			break
  4681  		}
  4682  		c := v_1.AuxInt
  4683  		v.reset(OpLsh64x64)
  4684  		v.AddArg(x)
  4685  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4686  		v0.AuxInt = int64(uint32(c))
  4687  		v.AddArg(v0)
  4688  		return true
  4689  	}
  4690  	// match: (Lsh64x32  (Const64 [0]) _)
  4691  	// cond:
  4692  	// result: (Const64 [0])
  4693  	for {
  4694  		v_0 := v.Args[0]
  4695  		if v_0.Op != OpConst64 {
  4696  			break
  4697  		}
  4698  		if v_0.AuxInt != 0 {
  4699  			break
  4700  		}
  4701  		v.reset(OpConst64)
  4702  		v.AuxInt = 0
  4703  		return true
  4704  	}
  4705  	return false
  4706  }
  4707  func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool {
  4708  	b := v.Block
  4709  	_ = b
  4710  	// match: (Lsh64x64  (Const64 [c]) (Const64 [d]))
  4711  	// cond:
  4712  	// result: (Const64 [c << uint64(d)])
  4713  	for {
  4714  		v_0 := v.Args[0]
  4715  		if v_0.Op != OpConst64 {
  4716  			break
  4717  		}
  4718  		c := v_0.AuxInt
  4719  		v_1 := v.Args[1]
  4720  		if v_1.Op != OpConst64 {
  4721  			break
  4722  		}
  4723  		d := v_1.AuxInt
  4724  		v.reset(OpConst64)
  4725  		v.AuxInt = c << uint64(d)
  4726  		return true
  4727  	}
  4728  	// match: (Lsh64x64  x (Const64 [0]))
  4729  	// cond:
  4730  	// result: x
  4731  	for {
  4732  		x := v.Args[0]
  4733  		v_1 := v.Args[1]
  4734  		if v_1.Op != OpConst64 {
  4735  			break
  4736  		}
  4737  		if v_1.AuxInt != 0 {
  4738  			break
  4739  		}
  4740  		v.reset(OpCopy)
  4741  		v.Type = x.Type
  4742  		v.AddArg(x)
  4743  		return true
  4744  	}
  4745  	// match: (Lsh64x64  (Const64 [0]) _)
  4746  	// cond:
  4747  	// result: (Const64 [0])
  4748  	for {
  4749  		v_0 := v.Args[0]
  4750  		if v_0.Op != OpConst64 {
  4751  			break
  4752  		}
  4753  		if v_0.AuxInt != 0 {
  4754  			break
  4755  		}
  4756  		v.reset(OpConst64)
  4757  		v.AuxInt = 0
  4758  		return true
  4759  	}
  4760  	// match: (Lsh64x64  _ (Const64 [c]))
  4761  	// cond: uint64(c) >= 64
  4762  	// result: (Const64 [0])
  4763  	for {
  4764  		v_1 := v.Args[1]
  4765  		if v_1.Op != OpConst64 {
  4766  			break
  4767  		}
  4768  		c := v_1.AuxInt
  4769  		if !(uint64(c) >= 64) {
  4770  			break
  4771  		}
  4772  		v.reset(OpConst64)
  4773  		v.AuxInt = 0
  4774  		return true
  4775  	}
  4776  	// match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d]))
  4777  	// cond: !uaddOvf(c,d)
  4778  	// result: (Lsh64x64 x (Const64 <t> [c+d]))
  4779  	for {
  4780  		t := v.Type
  4781  		v_0 := v.Args[0]
  4782  		if v_0.Op != OpLsh64x64 {
  4783  			break
  4784  		}
  4785  		x := v_0.Args[0]
  4786  		v_0_1 := v_0.Args[1]
  4787  		if v_0_1.Op != OpConst64 {
  4788  			break
  4789  		}
  4790  		c := v_0_1.AuxInt
  4791  		v_1 := v.Args[1]
  4792  		if v_1.Op != OpConst64 {
  4793  			break
  4794  		}
  4795  		d := v_1.AuxInt
  4796  		if !(!uaddOvf(c, d)) {
  4797  			break
  4798  		}
  4799  		v.reset(OpLsh64x64)
  4800  		v.AddArg(x)
  4801  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4802  		v0.AuxInt = c + d
  4803  		v.AddArg(v0)
  4804  		return true
  4805  	}
  4806  	// match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  4807  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  4808  	// result: (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  4809  	for {
  4810  		v_0 := v.Args[0]
  4811  		if v_0.Op != OpRsh64Ux64 {
  4812  			break
  4813  		}
  4814  		v_0_0 := v_0.Args[0]
  4815  		if v_0_0.Op != OpLsh64x64 {
  4816  			break
  4817  		}
  4818  		x := v_0_0.Args[0]
  4819  		v_0_0_1 := v_0_0.Args[1]
  4820  		if v_0_0_1.Op != OpConst64 {
  4821  			break
  4822  		}
  4823  		c1 := v_0_0_1.AuxInt
  4824  		v_0_1 := v_0.Args[1]
  4825  		if v_0_1.Op != OpConst64 {
  4826  			break
  4827  		}
  4828  		c2 := v_0_1.AuxInt
  4829  		v_1 := v.Args[1]
  4830  		if v_1.Op != OpConst64 {
  4831  			break
  4832  		}
  4833  		c3 := v_1.AuxInt
  4834  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  4835  			break
  4836  		}
  4837  		v.reset(OpLsh64x64)
  4838  		v.AddArg(x)
  4839  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  4840  		v0.AuxInt = c1 - c2 + c3
  4841  		v.AddArg(v0)
  4842  		return true
  4843  	}
  4844  	return false
  4845  }
  4846  func rewriteValuegeneric_OpLsh64x8(v *Value, config *Config) bool {
  4847  	b := v.Block
  4848  	_ = b
  4849  	// match: (Lsh64x8   <t> x (Const8  [c]))
  4850  	// cond:
  4851  	// result: (Lsh64x64  x (Const64 <t> [int64(uint8(c))]))
  4852  	for {
  4853  		t := v.Type
  4854  		x := v.Args[0]
  4855  		v_1 := v.Args[1]
  4856  		if v_1.Op != OpConst8 {
  4857  			break
  4858  		}
  4859  		c := v_1.AuxInt
  4860  		v.reset(OpLsh64x64)
  4861  		v.AddArg(x)
  4862  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4863  		v0.AuxInt = int64(uint8(c))
  4864  		v.AddArg(v0)
  4865  		return true
  4866  	}
  4867  	// match: (Lsh64x8  (Const64 [0]) _)
  4868  	// cond:
  4869  	// result: (Const64 [0])
  4870  	for {
  4871  		v_0 := v.Args[0]
  4872  		if v_0.Op != OpConst64 {
  4873  			break
  4874  		}
  4875  		if v_0.AuxInt != 0 {
  4876  			break
  4877  		}
  4878  		v.reset(OpConst64)
  4879  		v.AuxInt = 0
  4880  		return true
  4881  	}
  4882  	return false
  4883  }
  4884  func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool {
  4885  	b := v.Block
  4886  	_ = b
  4887  	// match: (Lsh8x16  <t> x (Const16 [c]))
  4888  	// cond:
  4889  	// result: (Lsh8x64  x (Const64 <t> [int64(uint16(c))]))
  4890  	for {
  4891  		t := v.Type
  4892  		x := v.Args[0]
  4893  		v_1 := v.Args[1]
  4894  		if v_1.Op != OpConst16 {
  4895  			break
  4896  		}
  4897  		c := v_1.AuxInt
  4898  		v.reset(OpLsh8x64)
  4899  		v.AddArg(x)
  4900  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4901  		v0.AuxInt = int64(uint16(c))
  4902  		v.AddArg(v0)
  4903  		return true
  4904  	}
  4905  	// match: (Lsh8x16   (Const8 [0]) _)
  4906  	// cond:
  4907  	// result: (Const8  [0])
  4908  	for {
  4909  		v_0 := v.Args[0]
  4910  		if v_0.Op != OpConst8 {
  4911  			break
  4912  		}
  4913  		if v_0.AuxInt != 0 {
  4914  			break
  4915  		}
  4916  		v.reset(OpConst8)
  4917  		v.AuxInt = 0
  4918  		return true
  4919  	}
  4920  	return false
  4921  }
  4922  func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool {
  4923  	b := v.Block
  4924  	_ = b
  4925  	// match: (Lsh8x32  <t> x (Const32 [c]))
  4926  	// cond:
  4927  	// result: (Lsh8x64  x (Const64 <t> [int64(uint32(c))]))
  4928  	for {
  4929  		t := v.Type
  4930  		x := v.Args[0]
  4931  		v_1 := v.Args[1]
  4932  		if v_1.Op != OpConst32 {
  4933  			break
  4934  		}
  4935  		c := v_1.AuxInt
  4936  		v.reset(OpLsh8x64)
  4937  		v.AddArg(x)
  4938  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  4939  		v0.AuxInt = int64(uint32(c))
  4940  		v.AddArg(v0)
  4941  		return true
  4942  	}
  4943  	// match: (Lsh8x32   (Const8 [0]) _)
  4944  	// cond:
  4945  	// result: (Const8  [0])
  4946  	for {
  4947  		v_0 := v.Args[0]
  4948  		if v_0.Op != OpConst8 {
  4949  			break
  4950  		}
  4951  		if v_0.AuxInt != 0 {
  4952  			break
  4953  		}
  4954  		v.reset(OpConst8)
  4955  		v.AuxInt = 0
  4956  		return true
  4957  	}
  4958  	return false
  4959  }
  4960  func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool {
  4961  	b := v.Block
  4962  	_ = b
  4963  	// match: (Lsh8x64   (Const8  [c]) (Const64 [d]))
  4964  	// cond:
  4965  	// result: (Const8  [int64(int8(c) << uint64(d))])
  4966  	for {
  4967  		v_0 := v.Args[0]
  4968  		if v_0.Op != OpConst8 {
  4969  			break
  4970  		}
  4971  		c := v_0.AuxInt
  4972  		v_1 := v.Args[1]
  4973  		if v_1.Op != OpConst64 {
  4974  			break
  4975  		}
  4976  		d := v_1.AuxInt
  4977  		v.reset(OpConst8)
  4978  		v.AuxInt = int64(int8(c) << uint64(d))
  4979  		return true
  4980  	}
  4981  	// match: (Lsh8x64   x (Const64 [0]))
  4982  	// cond:
  4983  	// result: x
  4984  	for {
  4985  		x := v.Args[0]
  4986  		v_1 := v.Args[1]
  4987  		if v_1.Op != OpConst64 {
  4988  			break
  4989  		}
  4990  		if v_1.AuxInt != 0 {
  4991  			break
  4992  		}
  4993  		v.reset(OpCopy)
  4994  		v.Type = x.Type
  4995  		v.AddArg(x)
  4996  		return true
  4997  	}
  4998  	// match: (Lsh8x64   (Const8 [0]) _)
  4999  	// cond:
  5000  	// result: (Const8  [0])
  5001  	for {
  5002  		v_0 := v.Args[0]
  5003  		if v_0.Op != OpConst8 {
  5004  			break
  5005  		}
  5006  		if v_0.AuxInt != 0 {
  5007  			break
  5008  		}
  5009  		v.reset(OpConst8)
  5010  		v.AuxInt = 0
  5011  		return true
  5012  	}
  5013  	// match: (Lsh8x64   _ (Const64 [c]))
  5014  	// cond: uint64(c) >= 8
  5015  	// result: (Const8  [0])
  5016  	for {
  5017  		v_1 := v.Args[1]
  5018  		if v_1.Op != OpConst64 {
  5019  			break
  5020  		}
  5021  		c := v_1.AuxInt
  5022  		if !(uint64(c) >= 8) {
  5023  			break
  5024  		}
  5025  		v.reset(OpConst8)
  5026  		v.AuxInt = 0
  5027  		return true
  5028  	}
  5029  	// match: (Lsh8x64  <t> (Lsh8x64  x (Const64 [c])) (Const64 [d]))
  5030  	// cond: !uaddOvf(c,d)
  5031  	// result: (Lsh8x64  x (Const64 <t> [c+d]))
  5032  	for {
  5033  		t := v.Type
  5034  		v_0 := v.Args[0]
  5035  		if v_0.Op != OpLsh8x64 {
  5036  			break
  5037  		}
  5038  		x := v_0.Args[0]
  5039  		v_0_1 := v_0.Args[1]
  5040  		if v_0_1.Op != OpConst64 {
  5041  			break
  5042  		}
  5043  		c := v_0_1.AuxInt
  5044  		v_1 := v.Args[1]
  5045  		if v_1.Op != OpConst64 {
  5046  			break
  5047  		}
  5048  		d := v_1.AuxInt
  5049  		if !(!uaddOvf(c, d)) {
  5050  			break
  5051  		}
  5052  		v.reset(OpLsh8x64)
  5053  		v.AddArg(x)
  5054  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5055  		v0.AuxInt = c + d
  5056  		v.AddArg(v0)
  5057  		return true
  5058  	}
  5059  	// match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  5060  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  5061  	// result: (Lsh8x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  5062  	for {
  5063  		v_0 := v.Args[0]
  5064  		if v_0.Op != OpRsh8Ux64 {
  5065  			break
  5066  		}
  5067  		v_0_0 := v_0.Args[0]
  5068  		if v_0_0.Op != OpLsh8x64 {
  5069  			break
  5070  		}
  5071  		x := v_0_0.Args[0]
  5072  		v_0_0_1 := v_0_0.Args[1]
  5073  		if v_0_0_1.Op != OpConst64 {
  5074  			break
  5075  		}
  5076  		c1 := v_0_0_1.AuxInt
  5077  		v_0_1 := v_0.Args[1]
  5078  		if v_0_1.Op != OpConst64 {
  5079  			break
  5080  		}
  5081  		c2 := v_0_1.AuxInt
  5082  		v_1 := v.Args[1]
  5083  		if v_1.Op != OpConst64 {
  5084  			break
  5085  		}
  5086  		c3 := v_1.AuxInt
  5087  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  5088  			break
  5089  		}
  5090  		v.reset(OpLsh8x64)
  5091  		v.AddArg(x)
  5092  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  5093  		v0.AuxInt = c1 - c2 + c3
  5094  		v.AddArg(v0)
  5095  		return true
  5096  	}
  5097  	return false
  5098  }
  5099  func rewriteValuegeneric_OpLsh8x8(v *Value, config *Config) bool {
  5100  	b := v.Block
  5101  	_ = b
  5102  	// match: (Lsh8x8   <t> x (Const8  [c]))
  5103  	// cond:
  5104  	// result: (Lsh8x64  x (Const64 <t> [int64(uint8(c))]))
  5105  	for {
  5106  		t := v.Type
  5107  		x := v.Args[0]
  5108  		v_1 := v.Args[1]
  5109  		if v_1.Op != OpConst8 {
  5110  			break
  5111  		}
  5112  		c := v_1.AuxInt
  5113  		v.reset(OpLsh8x64)
  5114  		v.AddArg(x)
  5115  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5116  		v0.AuxInt = int64(uint8(c))
  5117  		v.AddArg(v0)
  5118  		return true
  5119  	}
  5120  	// match: (Lsh8x8   (Const8 [0]) _)
  5121  	// cond:
  5122  	// result: (Const8  [0])
  5123  	for {
  5124  		v_0 := v.Args[0]
  5125  		if v_0.Op != OpConst8 {
  5126  			break
  5127  		}
  5128  		if v_0.AuxInt != 0 {
  5129  			break
  5130  		}
  5131  		v.reset(OpConst8)
  5132  		v.AuxInt = 0
  5133  		return true
  5134  	}
  5135  	return false
  5136  }
  5137  func rewriteValuegeneric_OpMod16(v *Value, config *Config) bool {
  5138  	b := v.Block
  5139  	_ = b
  5140  	// match: (Mod16 (Const16 [c]) (Const16 [d]))
  5141  	// cond: d != 0
  5142  	// result: (Const16 [int64(int16(c % d))])
  5143  	for {
  5144  		v_0 := v.Args[0]
  5145  		if v_0.Op != OpConst16 {
  5146  			break
  5147  		}
  5148  		c := v_0.AuxInt
  5149  		v_1 := v.Args[1]
  5150  		if v_1.Op != OpConst16 {
  5151  			break
  5152  		}
  5153  		d := v_1.AuxInt
  5154  		if !(d != 0) {
  5155  			break
  5156  		}
  5157  		v.reset(OpConst16)
  5158  		v.AuxInt = int64(int16(c % d))
  5159  		return true
  5160  	}
  5161  	return false
  5162  }
  5163  func rewriteValuegeneric_OpMod16u(v *Value, config *Config) bool {
  5164  	b := v.Block
  5165  	_ = b
  5166  	// match: (Mod16u (Const16 [c]) (Const16 [d]))
  5167  	// cond: d != 0
  5168  	// result: (Const16 [int64(uint16(c) % uint16(d))])
  5169  	for {
  5170  		v_0 := v.Args[0]
  5171  		if v_0.Op != OpConst16 {
  5172  			break
  5173  		}
  5174  		c := v_0.AuxInt
  5175  		v_1 := v.Args[1]
  5176  		if v_1.Op != OpConst16 {
  5177  			break
  5178  		}
  5179  		d := v_1.AuxInt
  5180  		if !(d != 0) {
  5181  			break
  5182  		}
  5183  		v.reset(OpConst16)
  5184  		v.AuxInt = int64(uint16(c) % uint16(d))
  5185  		return true
  5186  	}
  5187  	return false
  5188  }
  5189  func rewriteValuegeneric_OpMod32(v *Value, config *Config) bool {
  5190  	b := v.Block
  5191  	_ = b
  5192  	// match: (Mod32 (Const32 [c]) (Const32 [d]))
  5193  	// cond: d != 0
  5194  	// result: (Const32 [int64(int32(c % d))])
  5195  	for {
  5196  		v_0 := v.Args[0]
  5197  		if v_0.Op != OpConst32 {
  5198  			break
  5199  		}
  5200  		c := v_0.AuxInt
  5201  		v_1 := v.Args[1]
  5202  		if v_1.Op != OpConst32 {
  5203  			break
  5204  		}
  5205  		d := v_1.AuxInt
  5206  		if !(d != 0) {
  5207  			break
  5208  		}
  5209  		v.reset(OpConst32)
  5210  		v.AuxInt = int64(int32(c % d))
  5211  		return true
  5212  	}
  5213  	return false
  5214  }
  5215  func rewriteValuegeneric_OpMod32u(v *Value, config *Config) bool {
  5216  	b := v.Block
  5217  	_ = b
  5218  	// match: (Mod32u (Const32 [c]) (Const32 [d]))
  5219  	// cond: d != 0
  5220  	// result: (Const32 [int64(uint32(c) % uint32(d))])
  5221  	for {
  5222  		v_0 := v.Args[0]
  5223  		if v_0.Op != OpConst32 {
  5224  			break
  5225  		}
  5226  		c := v_0.AuxInt
  5227  		v_1 := v.Args[1]
  5228  		if v_1.Op != OpConst32 {
  5229  			break
  5230  		}
  5231  		d := v_1.AuxInt
  5232  		if !(d != 0) {
  5233  			break
  5234  		}
  5235  		v.reset(OpConst32)
  5236  		v.AuxInt = int64(uint32(c) % uint32(d))
  5237  		return true
  5238  	}
  5239  	return false
  5240  }
  5241  func rewriteValuegeneric_OpMod64(v *Value, config *Config) bool {
  5242  	b := v.Block
  5243  	_ = b
  5244  	// match: (Mod64 (Const64 [c]) (Const64 [d]))
  5245  	// cond: d != 0
  5246  	// result: (Const64 [c % d])
  5247  	for {
  5248  		v_0 := v.Args[0]
  5249  		if v_0.Op != OpConst64 {
  5250  			break
  5251  		}
  5252  		c := v_0.AuxInt
  5253  		v_1 := v.Args[1]
  5254  		if v_1.Op != OpConst64 {
  5255  			break
  5256  		}
  5257  		d := v_1.AuxInt
  5258  		if !(d != 0) {
  5259  			break
  5260  		}
  5261  		v.reset(OpConst64)
  5262  		v.AuxInt = c % d
  5263  		return true
  5264  	}
  5265  	// match: (Mod64  <t> x (Const64 [c]))
  5266  	// cond: x.Op != OpConst64 && smagic64ok(c)
  5267  	// result: (Sub64 x (Mul64 <t> (Div64  <t> x (Const64 <t> [c])) (Const64 <t> [c])))
  5268  	for {
  5269  		t := v.Type
  5270  		x := v.Args[0]
  5271  		v_1 := v.Args[1]
  5272  		if v_1.Op != OpConst64 {
  5273  			break
  5274  		}
  5275  		c := v_1.AuxInt
  5276  		if !(x.Op != OpConst64 && smagic64ok(c)) {
  5277  			break
  5278  		}
  5279  		v.reset(OpSub64)
  5280  		v.AddArg(x)
  5281  		v0 := b.NewValue0(v.Pos, OpMul64, t)
  5282  		v1 := b.NewValue0(v.Pos, OpDiv64, t)
  5283  		v1.AddArg(x)
  5284  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  5285  		v2.AuxInt = c
  5286  		v1.AddArg(v2)
  5287  		v0.AddArg(v1)
  5288  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  5289  		v3.AuxInt = c
  5290  		v0.AddArg(v3)
  5291  		v.AddArg(v0)
  5292  		return true
  5293  	}
  5294  	return false
  5295  }
  5296  func rewriteValuegeneric_OpMod64u(v *Value, config *Config) bool {
  5297  	b := v.Block
  5298  	_ = b
  5299  	// match: (Mod64u (Const64 [c]) (Const64 [d]))
  5300  	// cond: d != 0
  5301  	// result: (Const64 [int64(uint64(c) % uint64(d))])
  5302  	for {
  5303  		v_0 := v.Args[0]
  5304  		if v_0.Op != OpConst64 {
  5305  			break
  5306  		}
  5307  		c := v_0.AuxInt
  5308  		v_1 := v.Args[1]
  5309  		if v_1.Op != OpConst64 {
  5310  			break
  5311  		}
  5312  		d := v_1.AuxInt
  5313  		if !(d != 0) {
  5314  			break
  5315  		}
  5316  		v.reset(OpConst64)
  5317  		v.AuxInt = int64(uint64(c) % uint64(d))
  5318  		return true
  5319  	}
  5320  	// match: (Mod64u <t> n (Const64 [c]))
  5321  	// cond: isPowerOfTwo(c)
  5322  	// result: (And64 n (Const64 <t> [c-1]))
  5323  	for {
  5324  		t := v.Type
  5325  		n := v.Args[0]
  5326  		v_1 := v.Args[1]
  5327  		if v_1.Op != OpConst64 {
  5328  			break
  5329  		}
  5330  		c := v_1.AuxInt
  5331  		if !(isPowerOfTwo(c)) {
  5332  			break
  5333  		}
  5334  		v.reset(OpAnd64)
  5335  		v.AddArg(n)
  5336  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5337  		v0.AuxInt = c - 1
  5338  		v.AddArg(v0)
  5339  		return true
  5340  	}
  5341  	// match: (Mod64u <t> x (Const64 [c]))
  5342  	// cond: x.Op != OpConst64 && umagic64ok(c)
  5343  	// result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))
  5344  	for {
  5345  		t := v.Type
  5346  		x := v.Args[0]
  5347  		v_1 := v.Args[1]
  5348  		if v_1.Op != OpConst64 {
  5349  			break
  5350  		}
  5351  		c := v_1.AuxInt
  5352  		if !(x.Op != OpConst64 && umagic64ok(c)) {
  5353  			break
  5354  		}
  5355  		v.reset(OpSub64)
  5356  		v.AddArg(x)
  5357  		v0 := b.NewValue0(v.Pos, OpMul64, t)
  5358  		v1 := b.NewValue0(v.Pos, OpDiv64u, t)
  5359  		v1.AddArg(x)
  5360  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  5361  		v2.AuxInt = c
  5362  		v1.AddArg(v2)
  5363  		v0.AddArg(v1)
  5364  		v3 := b.NewValue0(v.Pos, OpConst64, t)
  5365  		v3.AuxInt = c
  5366  		v0.AddArg(v3)
  5367  		v.AddArg(v0)
  5368  		return true
  5369  	}
  5370  	return false
  5371  }
  5372  func rewriteValuegeneric_OpMod8(v *Value, config *Config) bool {
  5373  	b := v.Block
  5374  	_ = b
  5375  	// match: (Mod8  (Const8  [c]) (Const8  [d]))
  5376  	// cond: d != 0
  5377  	// result: (Const8  [int64(int8(c % d))])
  5378  	for {
  5379  		v_0 := v.Args[0]
  5380  		if v_0.Op != OpConst8 {
  5381  			break
  5382  		}
  5383  		c := v_0.AuxInt
  5384  		v_1 := v.Args[1]
  5385  		if v_1.Op != OpConst8 {
  5386  			break
  5387  		}
  5388  		d := v_1.AuxInt
  5389  		if !(d != 0) {
  5390  			break
  5391  		}
  5392  		v.reset(OpConst8)
  5393  		v.AuxInt = int64(int8(c % d))
  5394  		return true
  5395  	}
  5396  	return false
  5397  }
  5398  func rewriteValuegeneric_OpMod8u(v *Value, config *Config) bool {
  5399  	b := v.Block
  5400  	_ = b
  5401  	// match: (Mod8u  (Const8 [c])  (Const8  [d]))
  5402  	// cond: d != 0
  5403  	// result: (Const8  [int64(uint8(c) % uint8(d))])
  5404  	for {
  5405  		v_0 := v.Args[0]
  5406  		if v_0.Op != OpConst8 {
  5407  			break
  5408  		}
  5409  		c := v_0.AuxInt
  5410  		v_1 := v.Args[1]
  5411  		if v_1.Op != OpConst8 {
  5412  			break
  5413  		}
  5414  		d := v_1.AuxInt
  5415  		if !(d != 0) {
  5416  			break
  5417  		}
  5418  		v.reset(OpConst8)
  5419  		v.AuxInt = int64(uint8(c) % uint8(d))
  5420  		return true
  5421  	}
  5422  	return false
  5423  }
  5424  func rewriteValuegeneric_OpMul16(v *Value, config *Config) bool {
  5425  	b := v.Block
  5426  	_ = b
  5427  	// match: (Mul16  (Const16 [c])  (Const16 [d]))
  5428  	// cond:
  5429  	// result: (Const16 [int64(int16(c*d))])
  5430  	for {
  5431  		v_0 := v.Args[0]
  5432  		if v_0.Op != OpConst16 {
  5433  			break
  5434  		}
  5435  		c := v_0.AuxInt
  5436  		v_1 := v.Args[1]
  5437  		if v_1.Op != OpConst16 {
  5438  			break
  5439  		}
  5440  		d := v_1.AuxInt
  5441  		v.reset(OpConst16)
  5442  		v.AuxInt = int64(int16(c * d))
  5443  		return true
  5444  	}
  5445  	// match: (Mul16 (Const16 [-1]) x)
  5446  	// cond:
  5447  	// result: (Neg16 x)
  5448  	for {
  5449  		v_0 := v.Args[0]
  5450  		if v_0.Op != OpConst16 {
  5451  			break
  5452  		}
  5453  		if v_0.AuxInt != -1 {
  5454  			break
  5455  		}
  5456  		x := v.Args[1]
  5457  		v.reset(OpNeg16)
  5458  		v.AddArg(x)
  5459  		return true
  5460  	}
  5461  	// match: (Mul16 x (Const16 <t> [c]))
  5462  	// cond: x.Op != OpConst16
  5463  	// result: (Mul16 (Const16 <t> [c]) x)
  5464  	for {
  5465  		x := v.Args[0]
  5466  		v_1 := v.Args[1]
  5467  		if v_1.Op != OpConst16 {
  5468  			break
  5469  		}
  5470  		t := v_1.Type
  5471  		c := v_1.AuxInt
  5472  		if !(x.Op != OpConst16) {
  5473  			break
  5474  		}
  5475  		v.reset(OpMul16)
  5476  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  5477  		v0.AuxInt = c
  5478  		v.AddArg(v0)
  5479  		v.AddArg(x)
  5480  		return true
  5481  	}
  5482  	// match: (Mul16 (Const16 [0]) _)
  5483  	// cond:
  5484  	// result: (Const16 [0])
  5485  	for {
  5486  		v_0 := v.Args[0]
  5487  		if v_0.Op != OpConst16 {
  5488  			break
  5489  		}
  5490  		if v_0.AuxInt != 0 {
  5491  			break
  5492  		}
  5493  		v.reset(OpConst16)
  5494  		v.AuxInt = 0
  5495  		return true
  5496  	}
  5497  	return false
  5498  }
  5499  func rewriteValuegeneric_OpMul32(v *Value, config *Config) bool {
  5500  	b := v.Block
  5501  	_ = b
  5502  	// match: (Mul32  (Const32 [c])  (Const32 [d]))
  5503  	// cond:
  5504  	// result: (Const32 [int64(int32(c*d))])
  5505  	for {
  5506  		v_0 := v.Args[0]
  5507  		if v_0.Op != OpConst32 {
  5508  			break
  5509  		}
  5510  		c := v_0.AuxInt
  5511  		v_1 := v.Args[1]
  5512  		if v_1.Op != OpConst32 {
  5513  			break
  5514  		}
  5515  		d := v_1.AuxInt
  5516  		v.reset(OpConst32)
  5517  		v.AuxInt = int64(int32(c * d))
  5518  		return true
  5519  	}
  5520  	// match: (Mul32 (Const32 [-1]) x)
  5521  	// cond:
  5522  	// result: (Neg32 x)
  5523  	for {
  5524  		v_0 := v.Args[0]
  5525  		if v_0.Op != OpConst32 {
  5526  			break
  5527  		}
  5528  		if v_0.AuxInt != -1 {
  5529  			break
  5530  		}
  5531  		x := v.Args[1]
  5532  		v.reset(OpNeg32)
  5533  		v.AddArg(x)
  5534  		return true
  5535  	}
  5536  	// match: (Mul32 x (Const32 <t> [c]))
  5537  	// cond: x.Op != OpConst32
  5538  	// result: (Mul32 (Const32 <t> [c]) x)
  5539  	for {
  5540  		x := v.Args[0]
  5541  		v_1 := v.Args[1]
  5542  		if v_1.Op != OpConst32 {
  5543  			break
  5544  		}
  5545  		t := v_1.Type
  5546  		c := v_1.AuxInt
  5547  		if !(x.Op != OpConst32) {
  5548  			break
  5549  		}
  5550  		v.reset(OpMul32)
  5551  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  5552  		v0.AuxInt = c
  5553  		v.AddArg(v0)
  5554  		v.AddArg(x)
  5555  		return true
  5556  	}
  5557  	// match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x))
  5558  	// cond:
  5559  	// result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x))
  5560  	for {
  5561  		v_0 := v.Args[0]
  5562  		if v_0.Op != OpConst32 {
  5563  			break
  5564  		}
  5565  		t := v_0.Type
  5566  		c := v_0.AuxInt
  5567  		v_1 := v.Args[1]
  5568  		if v_1.Op != OpAdd32 {
  5569  			break
  5570  		}
  5571  		if v_1.Type != t {
  5572  			break
  5573  		}
  5574  		v_1_0 := v_1.Args[0]
  5575  		if v_1_0.Op != OpConst32 {
  5576  			break
  5577  		}
  5578  		if v_1_0.Type != t {
  5579  			break
  5580  		}
  5581  		d := v_1_0.AuxInt
  5582  		x := v_1.Args[1]
  5583  		v.reset(OpAdd32)
  5584  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  5585  		v0.AuxInt = int64(int32(c * d))
  5586  		v.AddArg(v0)
  5587  		v1 := b.NewValue0(v.Pos, OpMul32, t)
  5588  		v2 := b.NewValue0(v.Pos, OpConst32, t)
  5589  		v2.AuxInt = c
  5590  		v1.AddArg(v2)
  5591  		v1.AddArg(x)
  5592  		v.AddArg(v1)
  5593  		return true
  5594  	}
  5595  	// match: (Mul32 (Const32 [0]) _)
  5596  	// cond:
  5597  	// result: (Const32 [0])
  5598  	for {
  5599  		v_0 := v.Args[0]
  5600  		if v_0.Op != OpConst32 {
  5601  			break
  5602  		}
  5603  		if v_0.AuxInt != 0 {
  5604  			break
  5605  		}
  5606  		v.reset(OpConst32)
  5607  		v.AuxInt = 0
  5608  		return true
  5609  	}
  5610  	return false
  5611  }
  5612  func rewriteValuegeneric_OpMul32F(v *Value, config *Config) bool {
  5613  	b := v.Block
  5614  	_ = b
  5615  	// match: (Mul32F (Const32F [c]) (Const32F [d]))
  5616  	// cond:
  5617  	// result: (Const32F [f2i(float64(i2f32(c) * i2f32(d)))])
  5618  	for {
  5619  		v_0 := v.Args[0]
  5620  		if v_0.Op != OpConst32F {
  5621  			break
  5622  		}
  5623  		c := v_0.AuxInt
  5624  		v_1 := v.Args[1]
  5625  		if v_1.Op != OpConst32F {
  5626  			break
  5627  		}
  5628  		d := v_1.AuxInt
  5629  		v.reset(OpConst32F)
  5630  		v.AuxInt = f2i(float64(i2f32(c) * i2f32(d)))
  5631  		return true
  5632  	}
  5633  	// match: (Mul32F x (Const32F [f2i(1)]))
  5634  	// cond:
  5635  	// result: x
  5636  	for {
  5637  		x := v.Args[0]
  5638  		v_1 := v.Args[1]
  5639  		if v_1.Op != OpConst32F {
  5640  			break
  5641  		}
  5642  		if v_1.AuxInt != f2i(1) {
  5643  			break
  5644  		}
  5645  		v.reset(OpCopy)
  5646  		v.Type = x.Type
  5647  		v.AddArg(x)
  5648  		return true
  5649  	}
  5650  	// match: (Mul32F (Const32F [f2i(1)]) x)
  5651  	// cond:
  5652  	// result: x
  5653  	for {
  5654  		v_0 := v.Args[0]
  5655  		if v_0.Op != OpConst32F {
  5656  			break
  5657  		}
  5658  		if v_0.AuxInt != f2i(1) {
  5659  			break
  5660  		}
  5661  		x := v.Args[1]
  5662  		v.reset(OpCopy)
  5663  		v.Type = x.Type
  5664  		v.AddArg(x)
  5665  		return true
  5666  	}
  5667  	// match: (Mul32F x (Const32F [f2i(-1)]))
  5668  	// cond:
  5669  	// result: (Neg32F x)
  5670  	for {
  5671  		x := v.Args[0]
  5672  		v_1 := v.Args[1]
  5673  		if v_1.Op != OpConst32F {
  5674  			break
  5675  		}
  5676  		if v_1.AuxInt != f2i(-1) {
  5677  			break
  5678  		}
  5679  		v.reset(OpNeg32F)
  5680  		v.AddArg(x)
  5681  		return true
  5682  	}
  5683  	// match: (Mul32F (Const32F [f2i(-1)]) x)
  5684  	// cond:
  5685  	// result: (Neg32F x)
  5686  	for {
  5687  		v_0 := v.Args[0]
  5688  		if v_0.Op != OpConst32F {
  5689  			break
  5690  		}
  5691  		if v_0.AuxInt != f2i(-1) {
  5692  			break
  5693  		}
  5694  		x := v.Args[1]
  5695  		v.reset(OpNeg32F)
  5696  		v.AddArg(x)
  5697  		return true
  5698  	}
  5699  	return false
  5700  }
  5701  func rewriteValuegeneric_OpMul64(v *Value, config *Config) bool {
  5702  	b := v.Block
  5703  	_ = b
  5704  	// match: (Mul64  (Const64 [c])  (Const64 [d]))
  5705  	// cond:
  5706  	// result: (Const64 [c*d])
  5707  	for {
  5708  		v_0 := v.Args[0]
  5709  		if v_0.Op != OpConst64 {
  5710  			break
  5711  		}
  5712  		c := v_0.AuxInt
  5713  		v_1 := v.Args[1]
  5714  		if v_1.Op != OpConst64 {
  5715  			break
  5716  		}
  5717  		d := v_1.AuxInt
  5718  		v.reset(OpConst64)
  5719  		v.AuxInt = c * d
  5720  		return true
  5721  	}
  5722  	// match: (Mul64 (Const64 [-1]) x)
  5723  	// cond:
  5724  	// result: (Neg64 x)
  5725  	for {
  5726  		v_0 := v.Args[0]
  5727  		if v_0.Op != OpConst64 {
  5728  			break
  5729  		}
  5730  		if v_0.AuxInt != -1 {
  5731  			break
  5732  		}
  5733  		x := v.Args[1]
  5734  		v.reset(OpNeg64)
  5735  		v.AddArg(x)
  5736  		return true
  5737  	}
  5738  	// match: (Mul64 x (Const64 <t> [c]))
  5739  	// cond: x.Op != OpConst64
  5740  	// result: (Mul64 (Const64 <t> [c]) x)
  5741  	for {
  5742  		x := v.Args[0]
  5743  		v_1 := v.Args[1]
  5744  		if v_1.Op != OpConst64 {
  5745  			break
  5746  		}
  5747  		t := v_1.Type
  5748  		c := v_1.AuxInt
  5749  		if !(x.Op != OpConst64) {
  5750  			break
  5751  		}
  5752  		v.reset(OpMul64)
  5753  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5754  		v0.AuxInt = c
  5755  		v.AddArg(v0)
  5756  		v.AddArg(x)
  5757  		return true
  5758  	}
  5759  	// match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x))
  5760  	// cond:
  5761  	// result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x))
  5762  	for {
  5763  		v_0 := v.Args[0]
  5764  		if v_0.Op != OpConst64 {
  5765  			break
  5766  		}
  5767  		t := v_0.Type
  5768  		c := v_0.AuxInt
  5769  		v_1 := v.Args[1]
  5770  		if v_1.Op != OpAdd64 {
  5771  			break
  5772  		}
  5773  		if v_1.Type != t {
  5774  			break
  5775  		}
  5776  		v_1_0 := v_1.Args[0]
  5777  		if v_1_0.Op != OpConst64 {
  5778  			break
  5779  		}
  5780  		if v_1_0.Type != t {
  5781  			break
  5782  		}
  5783  		d := v_1_0.AuxInt
  5784  		x := v_1.Args[1]
  5785  		v.reset(OpAdd64)
  5786  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5787  		v0.AuxInt = c * d
  5788  		v.AddArg(v0)
  5789  		v1 := b.NewValue0(v.Pos, OpMul64, t)
  5790  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  5791  		v2.AuxInt = c
  5792  		v1.AddArg(v2)
  5793  		v1.AddArg(x)
  5794  		v.AddArg(v1)
  5795  		return true
  5796  	}
  5797  	// match: (Mul64 (Const64 [0]) _)
  5798  	// cond:
  5799  	// result: (Const64 [0])
  5800  	for {
  5801  		v_0 := v.Args[0]
  5802  		if v_0.Op != OpConst64 {
  5803  			break
  5804  		}
  5805  		if v_0.AuxInt != 0 {
  5806  			break
  5807  		}
  5808  		v.reset(OpConst64)
  5809  		v.AuxInt = 0
  5810  		return true
  5811  	}
  5812  	return false
  5813  }
  5814  func rewriteValuegeneric_OpMul64F(v *Value, config *Config) bool {
  5815  	b := v.Block
  5816  	_ = b
  5817  	// match: (Mul64F (Const64F [c]) (Const64F [d]))
  5818  	// cond:
  5819  	// result: (Const64F [f2i(i2f(c) * i2f(d))])
  5820  	for {
  5821  		v_0 := v.Args[0]
  5822  		if v_0.Op != OpConst64F {
  5823  			break
  5824  		}
  5825  		c := v_0.AuxInt
  5826  		v_1 := v.Args[1]
  5827  		if v_1.Op != OpConst64F {
  5828  			break
  5829  		}
  5830  		d := v_1.AuxInt
  5831  		v.reset(OpConst64F)
  5832  		v.AuxInt = f2i(i2f(c) * i2f(d))
  5833  		return true
  5834  	}
  5835  	// match: (Mul64F x (Const64F [f2i(1)]))
  5836  	// cond:
  5837  	// result: x
  5838  	for {
  5839  		x := v.Args[0]
  5840  		v_1 := v.Args[1]
  5841  		if v_1.Op != OpConst64F {
  5842  			break
  5843  		}
  5844  		if v_1.AuxInt != f2i(1) {
  5845  			break
  5846  		}
  5847  		v.reset(OpCopy)
  5848  		v.Type = x.Type
  5849  		v.AddArg(x)
  5850  		return true
  5851  	}
  5852  	// match: (Mul64F (Const64F [f2i(1)]) x)
  5853  	// cond:
  5854  	// result: x
  5855  	for {
  5856  		v_0 := v.Args[0]
  5857  		if v_0.Op != OpConst64F {
  5858  			break
  5859  		}
  5860  		if v_0.AuxInt != f2i(1) {
  5861  			break
  5862  		}
  5863  		x := v.Args[1]
  5864  		v.reset(OpCopy)
  5865  		v.Type = x.Type
  5866  		v.AddArg(x)
  5867  		return true
  5868  	}
  5869  	// match: (Mul64F x (Const64F [f2i(-1)]))
  5870  	// cond:
  5871  	// result: (Neg64F x)
  5872  	for {
  5873  		x := v.Args[0]
  5874  		v_1 := v.Args[1]
  5875  		if v_1.Op != OpConst64F {
  5876  			break
  5877  		}
  5878  		if v_1.AuxInt != f2i(-1) {
  5879  			break
  5880  		}
  5881  		v.reset(OpNeg64F)
  5882  		v.AddArg(x)
  5883  		return true
  5884  	}
  5885  	// match: (Mul64F (Const64F [f2i(-1)]) x)
  5886  	// cond:
  5887  	// result: (Neg64F x)
  5888  	for {
  5889  		v_0 := v.Args[0]
  5890  		if v_0.Op != OpConst64F {
  5891  			break
  5892  		}
  5893  		if v_0.AuxInt != f2i(-1) {
  5894  			break
  5895  		}
  5896  		x := v.Args[1]
  5897  		v.reset(OpNeg64F)
  5898  		v.AddArg(x)
  5899  		return true
  5900  	}
  5901  	return false
  5902  }
  5903  func rewriteValuegeneric_OpMul8(v *Value, config *Config) bool {
  5904  	b := v.Block
  5905  	_ = b
  5906  	// match: (Mul8   (Const8 [c])   (Const8 [d]))
  5907  	// cond:
  5908  	// result: (Const8  [int64(int8(c*d))])
  5909  	for {
  5910  		v_0 := v.Args[0]
  5911  		if v_0.Op != OpConst8 {
  5912  			break
  5913  		}
  5914  		c := v_0.AuxInt
  5915  		v_1 := v.Args[1]
  5916  		if v_1.Op != OpConst8 {
  5917  			break
  5918  		}
  5919  		d := v_1.AuxInt
  5920  		v.reset(OpConst8)
  5921  		v.AuxInt = int64(int8(c * d))
  5922  		return true
  5923  	}
  5924  	// match: (Mul8  (Const8  [-1]) x)
  5925  	// cond:
  5926  	// result: (Neg8  x)
  5927  	for {
  5928  		v_0 := v.Args[0]
  5929  		if v_0.Op != OpConst8 {
  5930  			break
  5931  		}
  5932  		if v_0.AuxInt != -1 {
  5933  			break
  5934  		}
  5935  		x := v.Args[1]
  5936  		v.reset(OpNeg8)
  5937  		v.AddArg(x)
  5938  		return true
  5939  	}
  5940  	// match: (Mul8  x (Const8  <t> [c]))
  5941  	// cond: x.Op != OpConst8
  5942  	// result: (Mul8  (Const8  <t> [c]) x)
  5943  	for {
  5944  		x := v.Args[0]
  5945  		v_1 := v.Args[1]
  5946  		if v_1.Op != OpConst8 {
  5947  			break
  5948  		}
  5949  		t := v_1.Type
  5950  		c := v_1.AuxInt
  5951  		if !(x.Op != OpConst8) {
  5952  			break
  5953  		}
  5954  		v.reset(OpMul8)
  5955  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  5956  		v0.AuxInt = c
  5957  		v.AddArg(v0)
  5958  		v.AddArg(x)
  5959  		return true
  5960  	}
  5961  	// match: (Mul8  (Const8  [0]) _)
  5962  	// cond:
  5963  	// result: (Const8  [0])
  5964  	for {
  5965  		v_0 := v.Args[0]
  5966  		if v_0.Op != OpConst8 {
  5967  			break
  5968  		}
  5969  		if v_0.AuxInt != 0 {
  5970  			break
  5971  		}
  5972  		v.reset(OpConst8)
  5973  		v.AuxInt = 0
  5974  		return true
  5975  	}
  5976  	return false
  5977  }
  5978  func rewriteValuegeneric_OpNeg16(v *Value, config *Config) bool {
  5979  	b := v.Block
  5980  	_ = b
  5981  	// match: (Neg16 (Sub16 x y))
  5982  	// cond:
  5983  	// result: (Sub16 y x)
  5984  	for {
  5985  		v_0 := v.Args[0]
  5986  		if v_0.Op != OpSub16 {
  5987  			break
  5988  		}
  5989  		x := v_0.Args[0]
  5990  		y := v_0.Args[1]
  5991  		v.reset(OpSub16)
  5992  		v.AddArg(y)
  5993  		v.AddArg(x)
  5994  		return true
  5995  	}
  5996  	return false
  5997  }
  5998  func rewriteValuegeneric_OpNeg32(v *Value, config *Config) bool {
  5999  	b := v.Block
  6000  	_ = b
  6001  	// match: (Neg32 (Sub32 x y))
  6002  	// cond:
  6003  	// result: (Sub32 y x)
  6004  	for {
  6005  		v_0 := v.Args[0]
  6006  		if v_0.Op != OpSub32 {
  6007  			break
  6008  		}
  6009  		x := v_0.Args[0]
  6010  		y := v_0.Args[1]
  6011  		v.reset(OpSub32)
  6012  		v.AddArg(y)
  6013  		v.AddArg(x)
  6014  		return true
  6015  	}
  6016  	return false
  6017  }
  6018  func rewriteValuegeneric_OpNeg64(v *Value, config *Config) bool {
  6019  	b := v.Block
  6020  	_ = b
  6021  	// match: (Neg64 (Sub64 x y))
  6022  	// cond:
  6023  	// result: (Sub64 y x)
  6024  	for {
  6025  		v_0 := v.Args[0]
  6026  		if v_0.Op != OpSub64 {
  6027  			break
  6028  		}
  6029  		x := v_0.Args[0]
  6030  		y := v_0.Args[1]
  6031  		v.reset(OpSub64)
  6032  		v.AddArg(y)
  6033  		v.AddArg(x)
  6034  		return true
  6035  	}
  6036  	return false
  6037  }
  6038  func rewriteValuegeneric_OpNeg8(v *Value, config *Config) bool {
  6039  	b := v.Block
  6040  	_ = b
  6041  	// match: (Neg8  (Sub8  x y))
  6042  	// cond:
  6043  	// result: (Sub8  y x)
  6044  	for {
  6045  		v_0 := v.Args[0]
  6046  		if v_0.Op != OpSub8 {
  6047  			break
  6048  		}
  6049  		x := v_0.Args[0]
  6050  		y := v_0.Args[1]
  6051  		v.reset(OpSub8)
  6052  		v.AddArg(y)
  6053  		v.AddArg(x)
  6054  		return true
  6055  	}
  6056  	return false
  6057  }
  6058  func rewriteValuegeneric_OpNeq16(v *Value, config *Config) bool {
  6059  	b := v.Block
  6060  	_ = b
  6061  	// match: (Neq16 x x)
  6062  	// cond:
  6063  	// result: (ConstBool [0])
  6064  	for {
  6065  		x := v.Args[0]
  6066  		if x != v.Args[1] {
  6067  			break
  6068  		}
  6069  		v.reset(OpConstBool)
  6070  		v.AuxInt = 0
  6071  		return true
  6072  	}
  6073  	// match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
  6074  	// cond:
  6075  	// result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x)
  6076  	for {
  6077  		v_0 := v.Args[0]
  6078  		if v_0.Op != OpConst16 {
  6079  			break
  6080  		}
  6081  		t := v_0.Type
  6082  		c := v_0.AuxInt
  6083  		v_1 := v.Args[1]
  6084  		if v_1.Op != OpAdd16 {
  6085  			break
  6086  		}
  6087  		v_1_0 := v_1.Args[0]
  6088  		if v_1_0.Op != OpConst16 {
  6089  			break
  6090  		}
  6091  		if v_1_0.Type != t {
  6092  			break
  6093  		}
  6094  		d := v_1_0.AuxInt
  6095  		x := v_1.Args[1]
  6096  		v.reset(OpNeq16)
  6097  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  6098  		v0.AuxInt = int64(int16(c - d))
  6099  		v.AddArg(v0)
  6100  		v.AddArg(x)
  6101  		return true
  6102  	}
  6103  	// match: (Neq16 x (Const16 <t> [c]))
  6104  	// cond: x.Op != OpConst16
  6105  	// result: (Neq16 (Const16 <t> [c]) x)
  6106  	for {
  6107  		x := v.Args[0]
  6108  		v_1 := v.Args[1]
  6109  		if v_1.Op != OpConst16 {
  6110  			break
  6111  		}
  6112  		t := v_1.Type
  6113  		c := v_1.AuxInt
  6114  		if !(x.Op != OpConst16) {
  6115  			break
  6116  		}
  6117  		v.reset(OpNeq16)
  6118  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  6119  		v0.AuxInt = c
  6120  		v.AddArg(v0)
  6121  		v.AddArg(x)
  6122  		return true
  6123  	}
  6124  	// match: (Neq16 (Const16 [c]) (Const16 [d]))
  6125  	// cond:
  6126  	// result: (ConstBool [b2i(c != d)])
  6127  	for {
  6128  		v_0 := v.Args[0]
  6129  		if v_0.Op != OpConst16 {
  6130  			break
  6131  		}
  6132  		c := v_0.AuxInt
  6133  		v_1 := v.Args[1]
  6134  		if v_1.Op != OpConst16 {
  6135  			break
  6136  		}
  6137  		d := v_1.AuxInt
  6138  		v.reset(OpConstBool)
  6139  		v.AuxInt = b2i(c != d)
  6140  		return true
  6141  	}
  6142  	return false
  6143  }
  6144  func rewriteValuegeneric_OpNeq32(v *Value, config *Config) bool {
  6145  	b := v.Block
  6146  	_ = b
  6147  	// match: (Neq32 x x)
  6148  	// cond:
  6149  	// result: (ConstBool [0])
  6150  	for {
  6151  		x := v.Args[0]
  6152  		if x != v.Args[1] {
  6153  			break
  6154  		}
  6155  		v.reset(OpConstBool)
  6156  		v.AuxInt = 0
  6157  		return true
  6158  	}
  6159  	// match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
  6160  	// cond:
  6161  	// result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x)
  6162  	for {
  6163  		v_0 := v.Args[0]
  6164  		if v_0.Op != OpConst32 {
  6165  			break
  6166  		}
  6167  		t := v_0.Type
  6168  		c := v_0.AuxInt
  6169  		v_1 := v.Args[1]
  6170  		if v_1.Op != OpAdd32 {
  6171  			break
  6172  		}
  6173  		v_1_0 := v_1.Args[0]
  6174  		if v_1_0.Op != OpConst32 {
  6175  			break
  6176  		}
  6177  		if v_1_0.Type != t {
  6178  			break
  6179  		}
  6180  		d := v_1_0.AuxInt
  6181  		x := v_1.Args[1]
  6182  		v.reset(OpNeq32)
  6183  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  6184  		v0.AuxInt = int64(int32(c - d))
  6185  		v.AddArg(v0)
  6186  		v.AddArg(x)
  6187  		return true
  6188  	}
  6189  	// match: (Neq32 x (Const32 <t> [c]))
  6190  	// cond: x.Op != OpConst32
  6191  	// result: (Neq32 (Const32 <t> [c]) x)
  6192  	for {
  6193  		x := v.Args[0]
  6194  		v_1 := v.Args[1]
  6195  		if v_1.Op != OpConst32 {
  6196  			break
  6197  		}
  6198  		t := v_1.Type
  6199  		c := v_1.AuxInt
  6200  		if !(x.Op != OpConst32) {
  6201  			break
  6202  		}
  6203  		v.reset(OpNeq32)
  6204  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  6205  		v0.AuxInt = c
  6206  		v.AddArg(v0)
  6207  		v.AddArg(x)
  6208  		return true
  6209  	}
  6210  	// match: (Neq32 (Const32 [c]) (Const32 [d]))
  6211  	// cond:
  6212  	// result: (ConstBool [b2i(c != d)])
  6213  	for {
  6214  		v_0 := v.Args[0]
  6215  		if v_0.Op != OpConst32 {
  6216  			break
  6217  		}
  6218  		c := v_0.AuxInt
  6219  		v_1 := v.Args[1]
  6220  		if v_1.Op != OpConst32 {
  6221  			break
  6222  		}
  6223  		d := v_1.AuxInt
  6224  		v.reset(OpConstBool)
  6225  		v.AuxInt = b2i(c != d)
  6226  		return true
  6227  	}
  6228  	return false
  6229  }
  6230  func rewriteValuegeneric_OpNeq64(v *Value, config *Config) bool {
  6231  	b := v.Block
  6232  	_ = b
  6233  	// match: (Neq64 x x)
  6234  	// cond:
  6235  	// result: (ConstBool [0])
  6236  	for {
  6237  		x := v.Args[0]
  6238  		if x != v.Args[1] {
  6239  			break
  6240  		}
  6241  		v.reset(OpConstBool)
  6242  		v.AuxInt = 0
  6243  		return true
  6244  	}
  6245  	// match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
  6246  	// cond:
  6247  	// result: (Neq64 (Const64 <t> [c-d]) x)
  6248  	for {
  6249  		v_0 := v.Args[0]
  6250  		if v_0.Op != OpConst64 {
  6251  			break
  6252  		}
  6253  		t := v_0.Type
  6254  		c := v_0.AuxInt
  6255  		v_1 := v.Args[1]
  6256  		if v_1.Op != OpAdd64 {
  6257  			break
  6258  		}
  6259  		v_1_0 := v_1.Args[0]
  6260  		if v_1_0.Op != OpConst64 {
  6261  			break
  6262  		}
  6263  		if v_1_0.Type != t {
  6264  			break
  6265  		}
  6266  		d := v_1_0.AuxInt
  6267  		x := v_1.Args[1]
  6268  		v.reset(OpNeq64)
  6269  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  6270  		v0.AuxInt = c - d
  6271  		v.AddArg(v0)
  6272  		v.AddArg(x)
  6273  		return true
  6274  	}
  6275  	// match: (Neq64 x (Const64 <t> [c]))
  6276  	// cond: x.Op != OpConst64
  6277  	// result: (Neq64 (Const64 <t> [c]) x)
  6278  	for {
  6279  		x := v.Args[0]
  6280  		v_1 := v.Args[1]
  6281  		if v_1.Op != OpConst64 {
  6282  			break
  6283  		}
  6284  		t := v_1.Type
  6285  		c := v_1.AuxInt
  6286  		if !(x.Op != OpConst64) {
  6287  			break
  6288  		}
  6289  		v.reset(OpNeq64)
  6290  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  6291  		v0.AuxInt = c
  6292  		v.AddArg(v0)
  6293  		v.AddArg(x)
  6294  		return true
  6295  	}
  6296  	// match: (Neq64 (Const64 [c]) (Const64 [d]))
  6297  	// cond:
  6298  	// result: (ConstBool [b2i(c != d)])
  6299  	for {
  6300  		v_0 := v.Args[0]
  6301  		if v_0.Op != OpConst64 {
  6302  			break
  6303  		}
  6304  		c := v_0.AuxInt
  6305  		v_1 := v.Args[1]
  6306  		if v_1.Op != OpConst64 {
  6307  			break
  6308  		}
  6309  		d := v_1.AuxInt
  6310  		v.reset(OpConstBool)
  6311  		v.AuxInt = b2i(c != d)
  6312  		return true
  6313  	}
  6314  	return false
  6315  }
  6316  func rewriteValuegeneric_OpNeq8(v *Value, config *Config) bool {
  6317  	b := v.Block
  6318  	_ = b
  6319  	// match: (Neq8  x x)
  6320  	// cond:
  6321  	// result: (ConstBool [0])
  6322  	for {
  6323  		x := v.Args[0]
  6324  		if x != v.Args[1] {
  6325  			break
  6326  		}
  6327  		v.reset(OpConstBool)
  6328  		v.AuxInt = 0
  6329  		return true
  6330  	}
  6331  	// match: (Neq8  (Const8  <t> [c]) (Add8  (Const8  <t> [d]) x))
  6332  	// cond:
  6333  	// result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x)
  6334  	for {
  6335  		v_0 := v.Args[0]
  6336  		if v_0.Op != OpConst8 {
  6337  			break
  6338  		}
  6339  		t := v_0.Type
  6340  		c := v_0.AuxInt
  6341  		v_1 := v.Args[1]
  6342  		if v_1.Op != OpAdd8 {
  6343  			break
  6344  		}
  6345  		v_1_0 := v_1.Args[0]
  6346  		if v_1_0.Op != OpConst8 {
  6347  			break
  6348  		}
  6349  		if v_1_0.Type != t {
  6350  			break
  6351  		}
  6352  		d := v_1_0.AuxInt
  6353  		x := v_1.Args[1]
  6354  		v.reset(OpNeq8)
  6355  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  6356  		v0.AuxInt = int64(int8(c - d))
  6357  		v.AddArg(v0)
  6358  		v.AddArg(x)
  6359  		return true
  6360  	}
  6361  	// match: (Neq8  x (Const8 <t>  [c]))
  6362  	// cond: x.Op != OpConst8
  6363  	// result: (Neq8  (Const8  <t> [c]) x)
  6364  	for {
  6365  		x := v.Args[0]
  6366  		v_1 := v.Args[1]
  6367  		if v_1.Op != OpConst8 {
  6368  			break
  6369  		}
  6370  		t := v_1.Type
  6371  		c := v_1.AuxInt
  6372  		if !(x.Op != OpConst8) {
  6373  			break
  6374  		}
  6375  		v.reset(OpNeq8)
  6376  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  6377  		v0.AuxInt = c
  6378  		v.AddArg(v0)
  6379  		v.AddArg(x)
  6380  		return true
  6381  	}
  6382  	// match: (Neq8  (Const8  [c]) (Const8  [d]))
  6383  	// cond:
  6384  	// result: (ConstBool [b2i(c != d)])
  6385  	for {
  6386  		v_0 := v.Args[0]
  6387  		if v_0.Op != OpConst8 {
  6388  			break
  6389  		}
  6390  		c := v_0.AuxInt
  6391  		v_1 := v.Args[1]
  6392  		if v_1.Op != OpConst8 {
  6393  			break
  6394  		}
  6395  		d := v_1.AuxInt
  6396  		v.reset(OpConstBool)
  6397  		v.AuxInt = b2i(c != d)
  6398  		return true
  6399  	}
  6400  	return false
  6401  }
  6402  func rewriteValuegeneric_OpNeqB(v *Value, config *Config) bool {
  6403  	b := v.Block
  6404  	_ = b
  6405  	// match: (NeqB (ConstBool [c]) (ConstBool [d]))
  6406  	// cond:
  6407  	// result: (ConstBool [b2i(c != d)])
  6408  	for {
  6409  		v_0 := v.Args[0]
  6410  		if v_0.Op != OpConstBool {
  6411  			break
  6412  		}
  6413  		c := v_0.AuxInt
  6414  		v_1 := v.Args[1]
  6415  		if v_1.Op != OpConstBool {
  6416  			break
  6417  		}
  6418  		d := v_1.AuxInt
  6419  		v.reset(OpConstBool)
  6420  		v.AuxInt = b2i(c != d)
  6421  		return true
  6422  	}
  6423  	// match: (NeqB (ConstBool [0]) x)
  6424  	// cond:
  6425  	// result: x
  6426  	for {
  6427  		v_0 := v.Args[0]
  6428  		if v_0.Op != OpConstBool {
  6429  			break
  6430  		}
  6431  		if v_0.AuxInt != 0 {
  6432  			break
  6433  		}
  6434  		x := v.Args[1]
  6435  		v.reset(OpCopy)
  6436  		v.Type = x.Type
  6437  		v.AddArg(x)
  6438  		return true
  6439  	}
  6440  	// match: (NeqB (ConstBool [1]) x)
  6441  	// cond:
  6442  	// result: (Not x)
  6443  	for {
  6444  		v_0 := v.Args[0]
  6445  		if v_0.Op != OpConstBool {
  6446  			break
  6447  		}
  6448  		if v_0.AuxInt != 1 {
  6449  			break
  6450  		}
  6451  		x := v.Args[1]
  6452  		v.reset(OpNot)
  6453  		v.AddArg(x)
  6454  		return true
  6455  	}
  6456  	return false
  6457  }
  6458  func rewriteValuegeneric_OpNeqInter(v *Value, config *Config) bool {
  6459  	b := v.Block
  6460  	_ = b
  6461  	// match: (NeqInter x y)
  6462  	// cond:
  6463  	// result: (NeqPtr (ITab x) (ITab y))
  6464  	for {
  6465  		x := v.Args[0]
  6466  		y := v.Args[1]
  6467  		v.reset(OpNeqPtr)
  6468  		v0 := b.NewValue0(v.Pos, OpITab, config.fe.TypeBytePtr())
  6469  		v0.AddArg(x)
  6470  		v.AddArg(v0)
  6471  		v1 := b.NewValue0(v.Pos, OpITab, config.fe.TypeBytePtr())
  6472  		v1.AddArg(y)
  6473  		v.AddArg(v1)
  6474  		return true
  6475  	}
  6476  }
  6477  func rewriteValuegeneric_OpNeqPtr(v *Value, config *Config) bool {
  6478  	b := v.Block
  6479  	_ = b
  6480  	// match: (NeqPtr p (ConstNil))
  6481  	// cond:
  6482  	// result: (IsNonNil p)
  6483  	for {
  6484  		p := v.Args[0]
  6485  		v_1 := v.Args[1]
  6486  		if v_1.Op != OpConstNil {
  6487  			break
  6488  		}
  6489  		v.reset(OpIsNonNil)
  6490  		v.AddArg(p)
  6491  		return true
  6492  	}
  6493  	// match: (NeqPtr (ConstNil) p)
  6494  	// cond:
  6495  	// result: (IsNonNil p)
  6496  	for {
  6497  		v_0 := v.Args[0]
  6498  		if v_0.Op != OpConstNil {
  6499  			break
  6500  		}
  6501  		p := v.Args[1]
  6502  		v.reset(OpIsNonNil)
  6503  		v.AddArg(p)
  6504  		return true
  6505  	}
  6506  	return false
  6507  }
  6508  func rewriteValuegeneric_OpNeqSlice(v *Value, config *Config) bool {
  6509  	b := v.Block
  6510  	_ = b
  6511  	// match: (NeqSlice x y)
  6512  	// cond:
  6513  	// result: (NeqPtr (SlicePtr x) (SlicePtr y))
  6514  	for {
  6515  		x := v.Args[0]
  6516  		y := v.Args[1]
  6517  		v.reset(OpNeqPtr)
  6518  		v0 := b.NewValue0(v.Pos, OpSlicePtr, config.fe.TypeBytePtr())
  6519  		v0.AddArg(x)
  6520  		v.AddArg(v0)
  6521  		v1 := b.NewValue0(v.Pos, OpSlicePtr, config.fe.TypeBytePtr())
  6522  		v1.AddArg(y)
  6523  		v.AddArg(v1)
  6524  		return true
  6525  	}
  6526  }
  6527  func rewriteValuegeneric_OpNilCheck(v *Value, config *Config) bool {
  6528  	b := v.Block
  6529  	_ = b
  6530  	// match: (NilCheck (GetG mem) mem)
  6531  	// cond:
  6532  	// result: mem
  6533  	for {
  6534  		v_0 := v.Args[0]
  6535  		if v_0.Op != OpGetG {
  6536  			break
  6537  		}
  6538  		mem := v_0.Args[0]
  6539  		if mem != v.Args[1] {
  6540  			break
  6541  		}
  6542  		v.reset(OpCopy)
  6543  		v.Type = mem.Type
  6544  		v.AddArg(mem)
  6545  		return true
  6546  	}
  6547  	// match: (NilCheck (Load (OffPtr [c] (SP)) mem) mem)
  6548  	// cond: mem.Op == OpStaticCall 	&& isSameSym(mem.Aux, "runtime.newobject") 	&& c == config.ctxt.FixedFrameSize() + config.RegSize 	&& warnRule(config.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check")
  6549  	// result: (Invalid)
  6550  	for {
  6551  		v_0 := v.Args[0]
  6552  		if v_0.Op != OpLoad {
  6553  			break
  6554  		}
  6555  		v_0_0 := v_0.Args[0]
  6556  		if v_0_0.Op != OpOffPtr {
  6557  			break
  6558  		}
  6559  		c := v_0_0.AuxInt
  6560  		v_0_0_0 := v_0_0.Args[0]
  6561  		if v_0_0_0.Op != OpSP {
  6562  			break
  6563  		}
  6564  		mem := v_0.Args[1]
  6565  		if mem != v.Args[1] {
  6566  			break
  6567  		}
  6568  		if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(config.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check")) {
  6569  			break
  6570  		}
  6571  		v.reset(OpInvalid)
  6572  		return true
  6573  	}
  6574  	// match: (NilCheck (OffPtr (Load (OffPtr [c] (SP)) mem)) mem)
  6575  	// cond: mem.Op == OpStaticCall 	&& isSameSym(mem.Aux, "runtime.newobject") 	&& c == config.ctxt.FixedFrameSize() + config.RegSize 	&& warnRule(config.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check")
  6576  	// result: (Invalid)
  6577  	for {
  6578  		v_0 := v.Args[0]
  6579  		if v_0.Op != OpOffPtr {
  6580  			break
  6581  		}
  6582  		v_0_0 := v_0.Args[0]
  6583  		if v_0_0.Op != OpLoad {
  6584  			break
  6585  		}
  6586  		v_0_0_0 := v_0_0.Args[0]
  6587  		if v_0_0_0.Op != OpOffPtr {
  6588  			break
  6589  		}
  6590  		c := v_0_0_0.AuxInt
  6591  		v_0_0_0_0 := v_0_0_0.Args[0]
  6592  		if v_0_0_0_0.Op != OpSP {
  6593  			break
  6594  		}
  6595  		mem := v_0_0.Args[1]
  6596  		if mem != v.Args[1] {
  6597  			break
  6598  		}
  6599  		if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(config.Debug_checknil() && v.Pos.Line() > 1, v, "removed nil check")) {
  6600  			break
  6601  		}
  6602  		v.reset(OpInvalid)
  6603  		return true
  6604  	}
  6605  	return false
  6606  }
  6607  func rewriteValuegeneric_OpNot(v *Value, config *Config) bool {
  6608  	b := v.Block
  6609  	_ = b
  6610  	// match: (Not (Eq64 x y))
  6611  	// cond:
  6612  	// result: (Neq64 x y)
  6613  	for {
  6614  		v_0 := v.Args[0]
  6615  		if v_0.Op != OpEq64 {
  6616  			break
  6617  		}
  6618  		x := v_0.Args[0]
  6619  		y := v_0.Args[1]
  6620  		v.reset(OpNeq64)
  6621  		v.AddArg(x)
  6622  		v.AddArg(y)
  6623  		return true
  6624  	}
  6625  	// match: (Not (Eq32 x y))
  6626  	// cond:
  6627  	// result: (Neq32 x y)
  6628  	for {
  6629  		v_0 := v.Args[0]
  6630  		if v_0.Op != OpEq32 {
  6631  			break
  6632  		}
  6633  		x := v_0.Args[0]
  6634  		y := v_0.Args[1]
  6635  		v.reset(OpNeq32)
  6636  		v.AddArg(x)
  6637  		v.AddArg(y)
  6638  		return true
  6639  	}
  6640  	// match: (Not (Eq16 x y))
  6641  	// cond:
  6642  	// result: (Neq16 x y)
  6643  	for {
  6644  		v_0 := v.Args[0]
  6645  		if v_0.Op != OpEq16 {
  6646  			break
  6647  		}
  6648  		x := v_0.Args[0]
  6649  		y := v_0.Args[1]
  6650  		v.reset(OpNeq16)
  6651  		v.AddArg(x)
  6652  		v.AddArg(y)
  6653  		return true
  6654  	}
  6655  	// match: (Not (Eq8  x y))
  6656  	// cond:
  6657  	// result: (Neq8  x y)
  6658  	for {
  6659  		v_0 := v.Args[0]
  6660  		if v_0.Op != OpEq8 {
  6661  			break
  6662  		}
  6663  		x := v_0.Args[0]
  6664  		y := v_0.Args[1]
  6665  		v.reset(OpNeq8)
  6666  		v.AddArg(x)
  6667  		v.AddArg(y)
  6668  		return true
  6669  	}
  6670  	// match: (Not (EqB  x y))
  6671  	// cond:
  6672  	// result: (NeqB  x y)
  6673  	for {
  6674  		v_0 := v.Args[0]
  6675  		if v_0.Op != OpEqB {
  6676  			break
  6677  		}
  6678  		x := v_0.Args[0]
  6679  		y := v_0.Args[1]
  6680  		v.reset(OpNeqB)
  6681  		v.AddArg(x)
  6682  		v.AddArg(y)
  6683  		return true
  6684  	}
  6685  	// match: (Not (Neq64 x y))
  6686  	// cond:
  6687  	// result: (Eq64 x y)
  6688  	for {
  6689  		v_0 := v.Args[0]
  6690  		if v_0.Op != OpNeq64 {
  6691  			break
  6692  		}
  6693  		x := v_0.Args[0]
  6694  		y := v_0.Args[1]
  6695  		v.reset(OpEq64)
  6696  		v.AddArg(x)
  6697  		v.AddArg(y)
  6698  		return true
  6699  	}
  6700  	// match: (Not (Neq32 x y))
  6701  	// cond:
  6702  	// result: (Eq32 x y)
  6703  	for {
  6704  		v_0 := v.Args[0]
  6705  		if v_0.Op != OpNeq32 {
  6706  			break
  6707  		}
  6708  		x := v_0.Args[0]
  6709  		y := v_0.Args[1]
  6710  		v.reset(OpEq32)
  6711  		v.AddArg(x)
  6712  		v.AddArg(y)
  6713  		return true
  6714  	}
  6715  	// match: (Not (Neq16 x y))
  6716  	// cond:
  6717  	// result: (Eq16 x y)
  6718  	for {
  6719  		v_0 := v.Args[0]
  6720  		if v_0.Op != OpNeq16 {
  6721  			break
  6722  		}
  6723  		x := v_0.Args[0]
  6724  		y := v_0.Args[1]
  6725  		v.reset(OpEq16)
  6726  		v.AddArg(x)
  6727  		v.AddArg(y)
  6728  		return true
  6729  	}
  6730  	// match: (Not (Neq8  x y))
  6731  	// cond:
  6732  	// result: (Eq8  x y)
  6733  	for {
  6734  		v_0 := v.Args[0]
  6735  		if v_0.Op != OpNeq8 {
  6736  			break
  6737  		}
  6738  		x := v_0.Args[0]
  6739  		y := v_0.Args[1]
  6740  		v.reset(OpEq8)
  6741  		v.AddArg(x)
  6742  		v.AddArg(y)
  6743  		return true
  6744  	}
  6745  	// match: (Not (NeqB  x y))
  6746  	// cond:
  6747  	// result: (EqB  x y)
  6748  	for {
  6749  		v_0 := v.Args[0]
  6750  		if v_0.Op != OpNeqB {
  6751  			break
  6752  		}
  6753  		x := v_0.Args[0]
  6754  		y := v_0.Args[1]
  6755  		v.reset(OpEqB)
  6756  		v.AddArg(x)
  6757  		v.AddArg(y)
  6758  		return true
  6759  	}
  6760  	// match: (Not (Greater64 x y))
  6761  	// cond:
  6762  	// result: (Leq64 x y)
  6763  	for {
  6764  		v_0 := v.Args[0]
  6765  		if v_0.Op != OpGreater64 {
  6766  			break
  6767  		}
  6768  		x := v_0.Args[0]
  6769  		y := v_0.Args[1]
  6770  		v.reset(OpLeq64)
  6771  		v.AddArg(x)
  6772  		v.AddArg(y)
  6773  		return true
  6774  	}
  6775  	// match: (Not (Greater32 x y))
  6776  	// cond:
  6777  	// result: (Leq32 x y)
  6778  	for {
  6779  		v_0 := v.Args[0]
  6780  		if v_0.Op != OpGreater32 {
  6781  			break
  6782  		}
  6783  		x := v_0.Args[0]
  6784  		y := v_0.Args[1]
  6785  		v.reset(OpLeq32)
  6786  		v.AddArg(x)
  6787  		v.AddArg(y)
  6788  		return true
  6789  	}
  6790  	// match: (Not (Greater16 x y))
  6791  	// cond:
  6792  	// result: (Leq16 x y)
  6793  	for {
  6794  		v_0 := v.Args[0]
  6795  		if v_0.Op != OpGreater16 {
  6796  			break
  6797  		}
  6798  		x := v_0.Args[0]
  6799  		y := v_0.Args[1]
  6800  		v.reset(OpLeq16)
  6801  		v.AddArg(x)
  6802  		v.AddArg(y)
  6803  		return true
  6804  	}
  6805  	// match: (Not (Greater8  x y))
  6806  	// cond:
  6807  	// result: (Leq8  x y)
  6808  	for {
  6809  		v_0 := v.Args[0]
  6810  		if v_0.Op != OpGreater8 {
  6811  			break
  6812  		}
  6813  		x := v_0.Args[0]
  6814  		y := v_0.Args[1]
  6815  		v.reset(OpLeq8)
  6816  		v.AddArg(x)
  6817  		v.AddArg(y)
  6818  		return true
  6819  	}
  6820  	// match: (Not (Greater64U x y))
  6821  	// cond:
  6822  	// result: (Leq64U x y)
  6823  	for {
  6824  		v_0 := v.Args[0]
  6825  		if v_0.Op != OpGreater64U {
  6826  			break
  6827  		}
  6828  		x := v_0.Args[0]
  6829  		y := v_0.Args[1]
  6830  		v.reset(OpLeq64U)
  6831  		v.AddArg(x)
  6832  		v.AddArg(y)
  6833  		return true
  6834  	}
  6835  	// match: (Not (Greater32U x y))
  6836  	// cond:
  6837  	// result: (Leq32U x y)
  6838  	for {
  6839  		v_0 := v.Args[0]
  6840  		if v_0.Op != OpGreater32U {
  6841  			break
  6842  		}
  6843  		x := v_0.Args[0]
  6844  		y := v_0.Args[1]
  6845  		v.reset(OpLeq32U)
  6846  		v.AddArg(x)
  6847  		v.AddArg(y)
  6848  		return true
  6849  	}
  6850  	// match: (Not (Greater16U x y))
  6851  	// cond:
  6852  	// result: (Leq16U x y)
  6853  	for {
  6854  		v_0 := v.Args[0]
  6855  		if v_0.Op != OpGreater16U {
  6856  			break
  6857  		}
  6858  		x := v_0.Args[0]
  6859  		y := v_0.Args[1]
  6860  		v.reset(OpLeq16U)
  6861  		v.AddArg(x)
  6862  		v.AddArg(y)
  6863  		return true
  6864  	}
  6865  	// match: (Not (Greater8U  x y))
  6866  	// cond:
  6867  	// result: (Leq8U  x y)
  6868  	for {
  6869  		v_0 := v.Args[0]
  6870  		if v_0.Op != OpGreater8U {
  6871  			break
  6872  		}
  6873  		x := v_0.Args[0]
  6874  		y := v_0.Args[1]
  6875  		v.reset(OpLeq8U)
  6876  		v.AddArg(x)
  6877  		v.AddArg(y)
  6878  		return true
  6879  	}
  6880  	// match: (Not (Geq64 x y))
  6881  	// cond:
  6882  	// result: (Less64 x y)
  6883  	for {
  6884  		v_0 := v.Args[0]
  6885  		if v_0.Op != OpGeq64 {
  6886  			break
  6887  		}
  6888  		x := v_0.Args[0]
  6889  		y := v_0.Args[1]
  6890  		v.reset(OpLess64)
  6891  		v.AddArg(x)
  6892  		v.AddArg(y)
  6893  		return true
  6894  	}
  6895  	// match: (Not (Geq32 x y))
  6896  	// cond:
  6897  	// result: (Less32 x y)
  6898  	for {
  6899  		v_0 := v.Args[0]
  6900  		if v_0.Op != OpGeq32 {
  6901  			break
  6902  		}
  6903  		x := v_0.Args[0]
  6904  		y := v_0.Args[1]
  6905  		v.reset(OpLess32)
  6906  		v.AddArg(x)
  6907  		v.AddArg(y)
  6908  		return true
  6909  	}
  6910  	// match: (Not (Geq16 x y))
  6911  	// cond:
  6912  	// result: (Less16 x y)
  6913  	for {
  6914  		v_0 := v.Args[0]
  6915  		if v_0.Op != OpGeq16 {
  6916  			break
  6917  		}
  6918  		x := v_0.Args[0]
  6919  		y := v_0.Args[1]
  6920  		v.reset(OpLess16)
  6921  		v.AddArg(x)
  6922  		v.AddArg(y)
  6923  		return true
  6924  	}
  6925  	// match: (Not (Geq8  x y))
  6926  	// cond:
  6927  	// result: (Less8  x y)
  6928  	for {
  6929  		v_0 := v.Args[0]
  6930  		if v_0.Op != OpGeq8 {
  6931  			break
  6932  		}
  6933  		x := v_0.Args[0]
  6934  		y := v_0.Args[1]
  6935  		v.reset(OpLess8)
  6936  		v.AddArg(x)
  6937  		v.AddArg(y)
  6938  		return true
  6939  	}
  6940  	// match: (Not (Geq64U x y))
  6941  	// cond:
  6942  	// result: (Less64U x y)
  6943  	for {
  6944  		v_0 := v.Args[0]
  6945  		if v_0.Op != OpGeq64U {
  6946  			break
  6947  		}
  6948  		x := v_0.Args[0]
  6949  		y := v_0.Args[1]
  6950  		v.reset(OpLess64U)
  6951  		v.AddArg(x)
  6952  		v.AddArg(y)
  6953  		return true
  6954  	}
  6955  	// match: (Not (Geq32U x y))
  6956  	// cond:
  6957  	// result: (Less32U x y)
  6958  	for {
  6959  		v_0 := v.Args[0]
  6960  		if v_0.Op != OpGeq32U {
  6961  			break
  6962  		}
  6963  		x := v_0.Args[0]
  6964  		y := v_0.Args[1]
  6965  		v.reset(OpLess32U)
  6966  		v.AddArg(x)
  6967  		v.AddArg(y)
  6968  		return true
  6969  	}
  6970  	// match: (Not (Geq16U x y))
  6971  	// cond:
  6972  	// result: (Less16U x y)
  6973  	for {
  6974  		v_0 := v.Args[0]
  6975  		if v_0.Op != OpGeq16U {
  6976  			break
  6977  		}
  6978  		x := v_0.Args[0]
  6979  		y := v_0.Args[1]
  6980  		v.reset(OpLess16U)
  6981  		v.AddArg(x)
  6982  		v.AddArg(y)
  6983  		return true
  6984  	}
  6985  	// match: (Not (Geq8U  x y))
  6986  	// cond:
  6987  	// result: (Less8U  x y)
  6988  	for {
  6989  		v_0 := v.Args[0]
  6990  		if v_0.Op != OpGeq8U {
  6991  			break
  6992  		}
  6993  		x := v_0.Args[0]
  6994  		y := v_0.Args[1]
  6995  		v.reset(OpLess8U)
  6996  		v.AddArg(x)
  6997  		v.AddArg(y)
  6998  		return true
  6999  	}
  7000  	// match: (Not (Less64 x y))
  7001  	// cond:
  7002  	// result: (Geq64 x y)
  7003  	for {
  7004  		v_0 := v.Args[0]
  7005  		if v_0.Op != OpLess64 {
  7006  			break
  7007  		}
  7008  		x := v_0.Args[0]
  7009  		y := v_0.Args[1]
  7010  		v.reset(OpGeq64)
  7011  		v.AddArg(x)
  7012  		v.AddArg(y)
  7013  		return true
  7014  	}
  7015  	// match: (Not (Less32 x y))
  7016  	// cond:
  7017  	// result: (Geq32 x y)
  7018  	for {
  7019  		v_0 := v.Args[0]
  7020  		if v_0.Op != OpLess32 {
  7021  			break
  7022  		}
  7023  		x := v_0.Args[0]
  7024  		y := v_0.Args[1]
  7025  		v.reset(OpGeq32)
  7026  		v.AddArg(x)
  7027  		v.AddArg(y)
  7028  		return true
  7029  	}
  7030  	// match: (Not (Less16 x y))
  7031  	// cond:
  7032  	// result: (Geq16 x y)
  7033  	for {
  7034  		v_0 := v.Args[0]
  7035  		if v_0.Op != OpLess16 {
  7036  			break
  7037  		}
  7038  		x := v_0.Args[0]
  7039  		y := v_0.Args[1]
  7040  		v.reset(OpGeq16)
  7041  		v.AddArg(x)
  7042  		v.AddArg(y)
  7043  		return true
  7044  	}
  7045  	// match: (Not (Less8  x y))
  7046  	// cond:
  7047  	// result: (Geq8  x y)
  7048  	for {
  7049  		v_0 := v.Args[0]
  7050  		if v_0.Op != OpLess8 {
  7051  			break
  7052  		}
  7053  		x := v_0.Args[0]
  7054  		y := v_0.Args[1]
  7055  		v.reset(OpGeq8)
  7056  		v.AddArg(x)
  7057  		v.AddArg(y)
  7058  		return true
  7059  	}
  7060  	// match: (Not (Less64U x y))
  7061  	// cond:
  7062  	// result: (Geq64U x y)
  7063  	for {
  7064  		v_0 := v.Args[0]
  7065  		if v_0.Op != OpLess64U {
  7066  			break
  7067  		}
  7068  		x := v_0.Args[0]
  7069  		y := v_0.Args[1]
  7070  		v.reset(OpGeq64U)
  7071  		v.AddArg(x)
  7072  		v.AddArg(y)
  7073  		return true
  7074  	}
  7075  	// match: (Not (Less32U x y))
  7076  	// cond:
  7077  	// result: (Geq32U x y)
  7078  	for {
  7079  		v_0 := v.Args[0]
  7080  		if v_0.Op != OpLess32U {
  7081  			break
  7082  		}
  7083  		x := v_0.Args[0]
  7084  		y := v_0.Args[1]
  7085  		v.reset(OpGeq32U)
  7086  		v.AddArg(x)
  7087  		v.AddArg(y)
  7088  		return true
  7089  	}
  7090  	// match: (Not (Less16U x y))
  7091  	// cond:
  7092  	// result: (Geq16U x y)
  7093  	for {
  7094  		v_0 := v.Args[0]
  7095  		if v_0.Op != OpLess16U {
  7096  			break
  7097  		}
  7098  		x := v_0.Args[0]
  7099  		y := v_0.Args[1]
  7100  		v.reset(OpGeq16U)
  7101  		v.AddArg(x)
  7102  		v.AddArg(y)
  7103  		return true
  7104  	}
  7105  	// match: (Not (Less8U  x y))
  7106  	// cond:
  7107  	// result: (Geq8U  x y)
  7108  	for {
  7109  		v_0 := v.Args[0]
  7110  		if v_0.Op != OpLess8U {
  7111  			break
  7112  		}
  7113  		x := v_0.Args[0]
  7114  		y := v_0.Args[1]
  7115  		v.reset(OpGeq8U)
  7116  		v.AddArg(x)
  7117  		v.AddArg(y)
  7118  		return true
  7119  	}
  7120  	// match: (Not (Leq64 x y))
  7121  	// cond:
  7122  	// result: (Greater64 x y)
  7123  	for {
  7124  		v_0 := v.Args[0]
  7125  		if v_0.Op != OpLeq64 {
  7126  			break
  7127  		}
  7128  		x := v_0.Args[0]
  7129  		y := v_0.Args[1]
  7130  		v.reset(OpGreater64)
  7131  		v.AddArg(x)
  7132  		v.AddArg(y)
  7133  		return true
  7134  	}
  7135  	// match: (Not (Leq32 x y))
  7136  	// cond:
  7137  	// result: (Greater32 x y)
  7138  	for {
  7139  		v_0 := v.Args[0]
  7140  		if v_0.Op != OpLeq32 {
  7141  			break
  7142  		}
  7143  		x := v_0.Args[0]
  7144  		y := v_0.Args[1]
  7145  		v.reset(OpGreater32)
  7146  		v.AddArg(x)
  7147  		v.AddArg(y)
  7148  		return true
  7149  	}
  7150  	// match: (Not (Leq16 x y))
  7151  	// cond:
  7152  	// result: (Greater16 x y)
  7153  	for {
  7154  		v_0 := v.Args[0]
  7155  		if v_0.Op != OpLeq16 {
  7156  			break
  7157  		}
  7158  		x := v_0.Args[0]
  7159  		y := v_0.Args[1]
  7160  		v.reset(OpGreater16)
  7161  		v.AddArg(x)
  7162  		v.AddArg(y)
  7163  		return true
  7164  	}
  7165  	// match: (Not (Leq8  x y))
  7166  	// cond:
  7167  	// result: (Greater8 x y)
  7168  	for {
  7169  		v_0 := v.Args[0]
  7170  		if v_0.Op != OpLeq8 {
  7171  			break
  7172  		}
  7173  		x := v_0.Args[0]
  7174  		y := v_0.Args[1]
  7175  		v.reset(OpGreater8)
  7176  		v.AddArg(x)
  7177  		v.AddArg(y)
  7178  		return true
  7179  	}
  7180  	// match: (Not (Leq64U x y))
  7181  	// cond:
  7182  	// result: (Greater64U x y)
  7183  	for {
  7184  		v_0 := v.Args[0]
  7185  		if v_0.Op != OpLeq64U {
  7186  			break
  7187  		}
  7188  		x := v_0.Args[0]
  7189  		y := v_0.Args[1]
  7190  		v.reset(OpGreater64U)
  7191  		v.AddArg(x)
  7192  		v.AddArg(y)
  7193  		return true
  7194  	}
  7195  	// match: (Not (Leq32U x y))
  7196  	// cond:
  7197  	// result: (Greater32U x y)
  7198  	for {
  7199  		v_0 := v.Args[0]
  7200  		if v_0.Op != OpLeq32U {
  7201  			break
  7202  		}
  7203  		x := v_0.Args[0]
  7204  		y := v_0.Args[1]
  7205  		v.reset(OpGreater32U)
  7206  		v.AddArg(x)
  7207  		v.AddArg(y)
  7208  		return true
  7209  	}
  7210  	// match: (Not (Leq16U x y))
  7211  	// cond:
  7212  	// result: (Greater16U x y)
  7213  	for {
  7214  		v_0 := v.Args[0]
  7215  		if v_0.Op != OpLeq16U {
  7216  			break
  7217  		}
  7218  		x := v_0.Args[0]
  7219  		y := v_0.Args[1]
  7220  		v.reset(OpGreater16U)
  7221  		v.AddArg(x)
  7222  		v.AddArg(y)
  7223  		return true
  7224  	}
  7225  	// match: (Not (Leq8U  x y))
  7226  	// cond:
  7227  	// result: (Greater8U  x y)
  7228  	for {
  7229  		v_0 := v.Args[0]
  7230  		if v_0.Op != OpLeq8U {
  7231  			break
  7232  		}
  7233  		x := v_0.Args[0]
  7234  		y := v_0.Args[1]
  7235  		v.reset(OpGreater8U)
  7236  		v.AddArg(x)
  7237  		v.AddArg(y)
  7238  		return true
  7239  	}
  7240  	return false
  7241  }
  7242  func rewriteValuegeneric_OpOffPtr(v *Value, config *Config) bool {
  7243  	b := v.Block
  7244  	_ = b
  7245  	// match: (OffPtr (OffPtr p [b]) [a])
  7246  	// cond:
  7247  	// result: (OffPtr p [a+b])
  7248  	for {
  7249  		a := v.AuxInt
  7250  		v_0 := v.Args[0]
  7251  		if v_0.Op != OpOffPtr {
  7252  			break
  7253  		}
  7254  		b := v_0.AuxInt
  7255  		p := v_0.Args[0]
  7256  		v.reset(OpOffPtr)
  7257  		v.AuxInt = a + b
  7258  		v.AddArg(p)
  7259  		return true
  7260  	}
  7261  	// match: (OffPtr p [0])
  7262  	// cond: v.Type.Compare(p.Type) == CMPeq
  7263  	// result: p
  7264  	for {
  7265  		if v.AuxInt != 0 {
  7266  			break
  7267  		}
  7268  		p := v.Args[0]
  7269  		if !(v.Type.Compare(p.Type) == CMPeq) {
  7270  			break
  7271  		}
  7272  		v.reset(OpCopy)
  7273  		v.Type = p.Type
  7274  		v.AddArg(p)
  7275  		return true
  7276  	}
  7277  	return false
  7278  }
  7279  func rewriteValuegeneric_OpOr16(v *Value, config *Config) bool {
  7280  	b := v.Block
  7281  	_ = b
  7282  	// match: (Or16 x (Const16 <t> [c]))
  7283  	// cond: x.Op != OpConst16
  7284  	// result: (Or16 (Const16 <t> [c]) x)
  7285  	for {
  7286  		x := v.Args[0]
  7287  		v_1 := v.Args[1]
  7288  		if v_1.Op != OpConst16 {
  7289  			break
  7290  		}
  7291  		t := v_1.Type
  7292  		c := v_1.AuxInt
  7293  		if !(x.Op != OpConst16) {
  7294  			break
  7295  		}
  7296  		v.reset(OpOr16)
  7297  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  7298  		v0.AuxInt = c
  7299  		v.AddArg(v0)
  7300  		v.AddArg(x)
  7301  		return true
  7302  	}
  7303  	// match: (Or16 x x)
  7304  	// cond:
  7305  	// result: x
  7306  	for {
  7307  		x := v.Args[0]
  7308  		if x != v.Args[1] {
  7309  			break
  7310  		}
  7311  		v.reset(OpCopy)
  7312  		v.Type = x.Type
  7313  		v.AddArg(x)
  7314  		return true
  7315  	}
  7316  	// match: (Or16 (Const16 [0]) x)
  7317  	// cond:
  7318  	// result: x
  7319  	for {
  7320  		v_0 := v.Args[0]
  7321  		if v_0.Op != OpConst16 {
  7322  			break
  7323  		}
  7324  		if v_0.AuxInt != 0 {
  7325  			break
  7326  		}
  7327  		x := v.Args[1]
  7328  		v.reset(OpCopy)
  7329  		v.Type = x.Type
  7330  		v.AddArg(x)
  7331  		return true
  7332  	}
  7333  	// match: (Or16 (Const16 [-1]) _)
  7334  	// cond:
  7335  	// result: (Const16 [-1])
  7336  	for {
  7337  		v_0 := v.Args[0]
  7338  		if v_0.Op != OpConst16 {
  7339  			break
  7340  		}
  7341  		if v_0.AuxInt != -1 {
  7342  			break
  7343  		}
  7344  		v.reset(OpConst16)
  7345  		v.AuxInt = -1
  7346  		return true
  7347  	}
  7348  	// match: (Or16 x (Or16 x y))
  7349  	// cond:
  7350  	// result: (Or16 x y)
  7351  	for {
  7352  		x := v.Args[0]
  7353  		v_1 := v.Args[1]
  7354  		if v_1.Op != OpOr16 {
  7355  			break
  7356  		}
  7357  		if x != v_1.Args[0] {
  7358  			break
  7359  		}
  7360  		y := v_1.Args[1]
  7361  		v.reset(OpOr16)
  7362  		v.AddArg(x)
  7363  		v.AddArg(y)
  7364  		return true
  7365  	}
  7366  	// match: (Or16 x (Or16 y x))
  7367  	// cond:
  7368  	// result: (Or16 x y)
  7369  	for {
  7370  		x := v.Args[0]
  7371  		v_1 := v.Args[1]
  7372  		if v_1.Op != OpOr16 {
  7373  			break
  7374  		}
  7375  		y := v_1.Args[0]
  7376  		if x != v_1.Args[1] {
  7377  			break
  7378  		}
  7379  		v.reset(OpOr16)
  7380  		v.AddArg(x)
  7381  		v.AddArg(y)
  7382  		return true
  7383  	}
  7384  	// match: (Or16 (Or16 x y) x)
  7385  	// cond:
  7386  	// result: (Or16 x y)
  7387  	for {
  7388  		v_0 := v.Args[0]
  7389  		if v_0.Op != OpOr16 {
  7390  			break
  7391  		}
  7392  		x := v_0.Args[0]
  7393  		y := v_0.Args[1]
  7394  		if x != v.Args[1] {
  7395  			break
  7396  		}
  7397  		v.reset(OpOr16)
  7398  		v.AddArg(x)
  7399  		v.AddArg(y)
  7400  		return true
  7401  	}
  7402  	// match: (Or16 (Or16 x y) y)
  7403  	// cond:
  7404  	// result: (Or16 x y)
  7405  	for {
  7406  		v_0 := v.Args[0]
  7407  		if v_0.Op != OpOr16 {
  7408  			break
  7409  		}
  7410  		x := v_0.Args[0]
  7411  		y := v_0.Args[1]
  7412  		if y != v.Args[1] {
  7413  			break
  7414  		}
  7415  		v.reset(OpOr16)
  7416  		v.AddArg(x)
  7417  		v.AddArg(y)
  7418  		return true
  7419  	}
  7420  	return false
  7421  }
  7422  func rewriteValuegeneric_OpOr32(v *Value, config *Config) bool {
  7423  	b := v.Block
  7424  	_ = b
  7425  	// match: (Or32 x (Const32 <t> [c]))
  7426  	// cond: x.Op != OpConst32
  7427  	// result: (Or32 (Const32 <t> [c]) x)
  7428  	for {
  7429  		x := v.Args[0]
  7430  		v_1 := v.Args[1]
  7431  		if v_1.Op != OpConst32 {
  7432  			break
  7433  		}
  7434  		t := v_1.Type
  7435  		c := v_1.AuxInt
  7436  		if !(x.Op != OpConst32) {
  7437  			break
  7438  		}
  7439  		v.reset(OpOr32)
  7440  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  7441  		v0.AuxInt = c
  7442  		v.AddArg(v0)
  7443  		v.AddArg(x)
  7444  		return true
  7445  	}
  7446  	// match: (Or32 x x)
  7447  	// cond:
  7448  	// result: x
  7449  	for {
  7450  		x := v.Args[0]
  7451  		if x != v.Args[1] {
  7452  			break
  7453  		}
  7454  		v.reset(OpCopy)
  7455  		v.Type = x.Type
  7456  		v.AddArg(x)
  7457  		return true
  7458  	}
  7459  	// match: (Or32 (Const32 [0]) x)
  7460  	// cond:
  7461  	// result: x
  7462  	for {
  7463  		v_0 := v.Args[0]
  7464  		if v_0.Op != OpConst32 {
  7465  			break
  7466  		}
  7467  		if v_0.AuxInt != 0 {
  7468  			break
  7469  		}
  7470  		x := v.Args[1]
  7471  		v.reset(OpCopy)
  7472  		v.Type = x.Type
  7473  		v.AddArg(x)
  7474  		return true
  7475  	}
  7476  	// match: (Or32 (Const32 [-1]) _)
  7477  	// cond:
  7478  	// result: (Const32 [-1])
  7479  	for {
  7480  		v_0 := v.Args[0]
  7481  		if v_0.Op != OpConst32 {
  7482  			break
  7483  		}
  7484  		if v_0.AuxInt != -1 {
  7485  			break
  7486  		}
  7487  		v.reset(OpConst32)
  7488  		v.AuxInt = -1
  7489  		return true
  7490  	}
  7491  	// match: (Or32 x (Or32 x y))
  7492  	// cond:
  7493  	// result: (Or32 x y)
  7494  	for {
  7495  		x := v.Args[0]
  7496  		v_1 := v.Args[1]
  7497  		if v_1.Op != OpOr32 {
  7498  			break
  7499  		}
  7500  		if x != v_1.Args[0] {
  7501  			break
  7502  		}
  7503  		y := v_1.Args[1]
  7504  		v.reset(OpOr32)
  7505  		v.AddArg(x)
  7506  		v.AddArg(y)
  7507  		return true
  7508  	}
  7509  	// match: (Or32 x (Or32 y x))
  7510  	// cond:
  7511  	// result: (Or32 x y)
  7512  	for {
  7513  		x := v.Args[0]
  7514  		v_1 := v.Args[1]
  7515  		if v_1.Op != OpOr32 {
  7516  			break
  7517  		}
  7518  		y := v_1.Args[0]
  7519  		if x != v_1.Args[1] {
  7520  			break
  7521  		}
  7522  		v.reset(OpOr32)
  7523  		v.AddArg(x)
  7524  		v.AddArg(y)
  7525  		return true
  7526  	}
  7527  	// match: (Or32 (Or32 x y) x)
  7528  	// cond:
  7529  	// result: (Or32 x y)
  7530  	for {
  7531  		v_0 := v.Args[0]
  7532  		if v_0.Op != OpOr32 {
  7533  			break
  7534  		}
  7535  		x := v_0.Args[0]
  7536  		y := v_0.Args[1]
  7537  		if x != v.Args[1] {
  7538  			break
  7539  		}
  7540  		v.reset(OpOr32)
  7541  		v.AddArg(x)
  7542  		v.AddArg(y)
  7543  		return true
  7544  	}
  7545  	// match: (Or32 (Or32 x y) y)
  7546  	// cond:
  7547  	// result: (Or32 x y)
  7548  	for {
  7549  		v_0 := v.Args[0]
  7550  		if v_0.Op != OpOr32 {
  7551  			break
  7552  		}
  7553  		x := v_0.Args[0]
  7554  		y := v_0.Args[1]
  7555  		if y != v.Args[1] {
  7556  			break
  7557  		}
  7558  		v.reset(OpOr32)
  7559  		v.AddArg(x)
  7560  		v.AddArg(y)
  7561  		return true
  7562  	}
  7563  	return false
  7564  }
  7565  func rewriteValuegeneric_OpOr64(v *Value, config *Config) bool {
  7566  	b := v.Block
  7567  	_ = b
  7568  	// match: (Or64 x (Const64 <t> [c]))
  7569  	// cond: x.Op != OpConst64
  7570  	// result: (Or64 (Const64 <t> [c]) x)
  7571  	for {
  7572  		x := v.Args[0]
  7573  		v_1 := v.Args[1]
  7574  		if v_1.Op != OpConst64 {
  7575  			break
  7576  		}
  7577  		t := v_1.Type
  7578  		c := v_1.AuxInt
  7579  		if !(x.Op != OpConst64) {
  7580  			break
  7581  		}
  7582  		v.reset(OpOr64)
  7583  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  7584  		v0.AuxInt = c
  7585  		v.AddArg(v0)
  7586  		v.AddArg(x)
  7587  		return true
  7588  	}
  7589  	// match: (Or64 x x)
  7590  	// cond:
  7591  	// result: x
  7592  	for {
  7593  		x := v.Args[0]
  7594  		if x != v.Args[1] {
  7595  			break
  7596  		}
  7597  		v.reset(OpCopy)
  7598  		v.Type = x.Type
  7599  		v.AddArg(x)
  7600  		return true
  7601  	}
  7602  	// match: (Or64 (Const64 [0]) x)
  7603  	// cond:
  7604  	// result: x
  7605  	for {
  7606  		v_0 := v.Args[0]
  7607  		if v_0.Op != OpConst64 {
  7608  			break
  7609  		}
  7610  		if v_0.AuxInt != 0 {
  7611  			break
  7612  		}
  7613  		x := v.Args[1]
  7614  		v.reset(OpCopy)
  7615  		v.Type = x.Type
  7616  		v.AddArg(x)
  7617  		return true
  7618  	}
  7619  	// match: (Or64 (Const64 [-1]) _)
  7620  	// cond:
  7621  	// result: (Const64 [-1])
  7622  	for {
  7623  		v_0 := v.Args[0]
  7624  		if v_0.Op != OpConst64 {
  7625  			break
  7626  		}
  7627  		if v_0.AuxInt != -1 {
  7628  			break
  7629  		}
  7630  		v.reset(OpConst64)
  7631  		v.AuxInt = -1
  7632  		return true
  7633  	}
  7634  	// match: (Or64 x (Or64 x y))
  7635  	// cond:
  7636  	// result: (Or64 x y)
  7637  	for {
  7638  		x := v.Args[0]
  7639  		v_1 := v.Args[1]
  7640  		if v_1.Op != OpOr64 {
  7641  			break
  7642  		}
  7643  		if x != v_1.Args[0] {
  7644  			break
  7645  		}
  7646  		y := v_1.Args[1]
  7647  		v.reset(OpOr64)
  7648  		v.AddArg(x)
  7649  		v.AddArg(y)
  7650  		return true
  7651  	}
  7652  	// match: (Or64 x (Or64 y x))
  7653  	// cond:
  7654  	// result: (Or64 x y)
  7655  	for {
  7656  		x := v.Args[0]
  7657  		v_1 := v.Args[1]
  7658  		if v_1.Op != OpOr64 {
  7659  			break
  7660  		}
  7661  		y := v_1.Args[0]
  7662  		if x != v_1.Args[1] {
  7663  			break
  7664  		}
  7665  		v.reset(OpOr64)
  7666  		v.AddArg(x)
  7667  		v.AddArg(y)
  7668  		return true
  7669  	}
  7670  	// match: (Or64 (Or64 x y) x)
  7671  	// cond:
  7672  	// result: (Or64 x y)
  7673  	for {
  7674  		v_0 := v.Args[0]
  7675  		if v_0.Op != OpOr64 {
  7676  			break
  7677  		}
  7678  		x := v_0.Args[0]
  7679  		y := v_0.Args[1]
  7680  		if x != v.Args[1] {
  7681  			break
  7682  		}
  7683  		v.reset(OpOr64)
  7684  		v.AddArg(x)
  7685  		v.AddArg(y)
  7686  		return true
  7687  	}
  7688  	// match: (Or64 (Or64 x y) y)
  7689  	// cond:
  7690  	// result: (Or64 x y)
  7691  	for {
  7692  		v_0 := v.Args[0]
  7693  		if v_0.Op != OpOr64 {
  7694  			break
  7695  		}
  7696  		x := v_0.Args[0]
  7697  		y := v_0.Args[1]
  7698  		if y != v.Args[1] {
  7699  			break
  7700  		}
  7701  		v.reset(OpOr64)
  7702  		v.AddArg(x)
  7703  		v.AddArg(y)
  7704  		return true
  7705  	}
  7706  	return false
  7707  }
  7708  func rewriteValuegeneric_OpOr8(v *Value, config *Config) bool {
  7709  	b := v.Block
  7710  	_ = b
  7711  	// match: (Or8  x (Const8  <t> [c]))
  7712  	// cond: x.Op != OpConst8
  7713  	// result: (Or8  (Const8  <t> [c]) x)
  7714  	for {
  7715  		x := v.Args[0]
  7716  		v_1 := v.Args[1]
  7717  		if v_1.Op != OpConst8 {
  7718  			break
  7719  		}
  7720  		t := v_1.Type
  7721  		c := v_1.AuxInt
  7722  		if !(x.Op != OpConst8) {
  7723  			break
  7724  		}
  7725  		v.reset(OpOr8)
  7726  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  7727  		v0.AuxInt = c
  7728  		v.AddArg(v0)
  7729  		v.AddArg(x)
  7730  		return true
  7731  	}
  7732  	// match: (Or8  x x)
  7733  	// cond:
  7734  	// result: x
  7735  	for {
  7736  		x := v.Args[0]
  7737  		if x != v.Args[1] {
  7738  			break
  7739  		}
  7740  		v.reset(OpCopy)
  7741  		v.Type = x.Type
  7742  		v.AddArg(x)
  7743  		return true
  7744  	}
  7745  	// match: (Or8  (Const8  [0]) x)
  7746  	// cond:
  7747  	// result: x
  7748  	for {
  7749  		v_0 := v.Args[0]
  7750  		if v_0.Op != OpConst8 {
  7751  			break
  7752  		}
  7753  		if v_0.AuxInt != 0 {
  7754  			break
  7755  		}
  7756  		x := v.Args[1]
  7757  		v.reset(OpCopy)
  7758  		v.Type = x.Type
  7759  		v.AddArg(x)
  7760  		return true
  7761  	}
  7762  	// match: (Or8  (Const8  [-1]) _)
  7763  	// cond:
  7764  	// result: (Const8  [-1])
  7765  	for {
  7766  		v_0 := v.Args[0]
  7767  		if v_0.Op != OpConst8 {
  7768  			break
  7769  		}
  7770  		if v_0.AuxInt != -1 {
  7771  			break
  7772  		}
  7773  		v.reset(OpConst8)
  7774  		v.AuxInt = -1
  7775  		return true
  7776  	}
  7777  	// match: (Or8  x (Or8  x y))
  7778  	// cond:
  7779  	// result: (Or8  x y)
  7780  	for {
  7781  		x := v.Args[0]
  7782  		v_1 := v.Args[1]
  7783  		if v_1.Op != OpOr8 {
  7784  			break
  7785  		}
  7786  		if x != v_1.Args[0] {
  7787  			break
  7788  		}
  7789  		y := v_1.Args[1]
  7790  		v.reset(OpOr8)
  7791  		v.AddArg(x)
  7792  		v.AddArg(y)
  7793  		return true
  7794  	}
  7795  	// match: (Or8  x (Or8  y x))
  7796  	// cond:
  7797  	// result: (Or8  x y)
  7798  	for {
  7799  		x := v.Args[0]
  7800  		v_1 := v.Args[1]
  7801  		if v_1.Op != OpOr8 {
  7802  			break
  7803  		}
  7804  		y := v_1.Args[0]
  7805  		if x != v_1.Args[1] {
  7806  			break
  7807  		}
  7808  		v.reset(OpOr8)
  7809  		v.AddArg(x)
  7810  		v.AddArg(y)
  7811  		return true
  7812  	}
  7813  	// match: (Or8  (Or8  x y) x)
  7814  	// cond:
  7815  	// result: (Or8  x y)
  7816  	for {
  7817  		v_0 := v.Args[0]
  7818  		if v_0.Op != OpOr8 {
  7819  			break
  7820  		}
  7821  		x := v_0.Args[0]
  7822  		y := v_0.Args[1]
  7823  		if x != v.Args[1] {
  7824  			break
  7825  		}
  7826  		v.reset(OpOr8)
  7827  		v.AddArg(x)
  7828  		v.AddArg(y)
  7829  		return true
  7830  	}
  7831  	// match: (Or8  (Or8  x y) y)
  7832  	// cond:
  7833  	// result: (Or8  x y)
  7834  	for {
  7835  		v_0 := v.Args[0]
  7836  		if v_0.Op != OpOr8 {
  7837  			break
  7838  		}
  7839  		x := v_0.Args[0]
  7840  		y := v_0.Args[1]
  7841  		if y != v.Args[1] {
  7842  			break
  7843  		}
  7844  		v.reset(OpOr8)
  7845  		v.AddArg(x)
  7846  		v.AddArg(y)
  7847  		return true
  7848  	}
  7849  	return false
  7850  }
  7851  func rewriteValuegeneric_OpPhi(v *Value, config *Config) bool {
  7852  	b := v.Block
  7853  	_ = b
  7854  	// match: (Phi (Const8  [c]) (Const8  [c]))
  7855  	// cond:
  7856  	// result: (Const8  [c])
  7857  	for {
  7858  		v_0 := v.Args[0]
  7859  		if v_0.Op != OpConst8 {
  7860  			break
  7861  		}
  7862  		c := v_0.AuxInt
  7863  		v_1 := v.Args[1]
  7864  		if v_1.Op != OpConst8 {
  7865  			break
  7866  		}
  7867  		if v_1.AuxInt != c {
  7868  			break
  7869  		}
  7870  		if len(v.Args) != 2 {
  7871  			break
  7872  		}
  7873  		v.reset(OpConst8)
  7874  		v.AuxInt = c
  7875  		return true
  7876  	}
  7877  	// match: (Phi (Const16 [c]) (Const16 [c]))
  7878  	// cond:
  7879  	// result: (Const16 [c])
  7880  	for {
  7881  		v_0 := v.Args[0]
  7882  		if v_0.Op != OpConst16 {
  7883  			break
  7884  		}
  7885  		c := v_0.AuxInt
  7886  		v_1 := v.Args[1]
  7887  		if v_1.Op != OpConst16 {
  7888  			break
  7889  		}
  7890  		if v_1.AuxInt != c {
  7891  			break
  7892  		}
  7893  		if len(v.Args) != 2 {
  7894  			break
  7895  		}
  7896  		v.reset(OpConst16)
  7897  		v.AuxInt = c
  7898  		return true
  7899  	}
  7900  	// match: (Phi (Const32 [c]) (Const32 [c]))
  7901  	// cond:
  7902  	// result: (Const32 [c])
  7903  	for {
  7904  		v_0 := v.Args[0]
  7905  		if v_0.Op != OpConst32 {
  7906  			break
  7907  		}
  7908  		c := v_0.AuxInt
  7909  		v_1 := v.Args[1]
  7910  		if v_1.Op != OpConst32 {
  7911  			break
  7912  		}
  7913  		if v_1.AuxInt != c {
  7914  			break
  7915  		}
  7916  		if len(v.Args) != 2 {
  7917  			break
  7918  		}
  7919  		v.reset(OpConst32)
  7920  		v.AuxInt = c
  7921  		return true
  7922  	}
  7923  	// match: (Phi (Const64 [c]) (Const64 [c]))
  7924  	// cond:
  7925  	// result: (Const64 [c])
  7926  	for {
  7927  		v_0 := v.Args[0]
  7928  		if v_0.Op != OpConst64 {
  7929  			break
  7930  		}
  7931  		c := v_0.AuxInt
  7932  		v_1 := v.Args[1]
  7933  		if v_1.Op != OpConst64 {
  7934  			break
  7935  		}
  7936  		if v_1.AuxInt != c {
  7937  			break
  7938  		}
  7939  		if len(v.Args) != 2 {
  7940  			break
  7941  		}
  7942  		v.reset(OpConst64)
  7943  		v.AuxInt = c
  7944  		return true
  7945  	}
  7946  	return false
  7947  }
  7948  func rewriteValuegeneric_OpPtrIndex(v *Value, config *Config) bool {
  7949  	b := v.Block
  7950  	_ = b
  7951  	// match: (PtrIndex <t> ptr idx)
  7952  	// cond: config.PtrSize == 4
  7953  	// result: (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.ElemType().Size()])))
  7954  	for {
  7955  		t := v.Type
  7956  		ptr := v.Args[0]
  7957  		idx := v.Args[1]
  7958  		if !(config.PtrSize == 4) {
  7959  			break
  7960  		}
  7961  		v.reset(OpAddPtr)
  7962  		v.AddArg(ptr)
  7963  		v0 := b.NewValue0(v.Pos, OpMul32, config.fe.TypeInt())
  7964  		v0.AddArg(idx)
  7965  		v1 := b.NewValue0(v.Pos, OpConst32, config.fe.TypeInt())
  7966  		v1.AuxInt = t.ElemType().Size()
  7967  		v0.AddArg(v1)
  7968  		v.AddArg(v0)
  7969  		return true
  7970  	}
  7971  	// match: (PtrIndex <t> ptr idx)
  7972  	// cond: config.PtrSize == 8
  7973  	// result: (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.ElemType().Size()])))
  7974  	for {
  7975  		t := v.Type
  7976  		ptr := v.Args[0]
  7977  		idx := v.Args[1]
  7978  		if !(config.PtrSize == 8) {
  7979  			break
  7980  		}
  7981  		v.reset(OpAddPtr)
  7982  		v.AddArg(ptr)
  7983  		v0 := b.NewValue0(v.Pos, OpMul64, config.fe.TypeInt())
  7984  		v0.AddArg(idx)
  7985  		v1 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeInt())
  7986  		v1.AuxInt = t.ElemType().Size()
  7987  		v0.AddArg(v1)
  7988  		v.AddArg(v0)
  7989  		return true
  7990  	}
  7991  	return false
  7992  }
  7993  func rewriteValuegeneric_OpRsh16Ux16(v *Value, config *Config) bool {
  7994  	b := v.Block
  7995  	_ = b
  7996  	// match: (Rsh16Ux16 <t> x (Const16 [c]))
  7997  	// cond:
  7998  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))]))
  7999  	for {
  8000  		t := v.Type
  8001  		x := v.Args[0]
  8002  		v_1 := v.Args[1]
  8003  		if v_1.Op != OpConst16 {
  8004  			break
  8005  		}
  8006  		c := v_1.AuxInt
  8007  		v.reset(OpRsh16Ux64)
  8008  		v.AddArg(x)
  8009  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8010  		v0.AuxInt = int64(uint16(c))
  8011  		v.AddArg(v0)
  8012  		return true
  8013  	}
  8014  	// match: (Rsh16Ux16 (Const16 [0]) _)
  8015  	// cond:
  8016  	// result: (Const16 [0])
  8017  	for {
  8018  		v_0 := v.Args[0]
  8019  		if v_0.Op != OpConst16 {
  8020  			break
  8021  		}
  8022  		if v_0.AuxInt != 0 {
  8023  			break
  8024  		}
  8025  		v.reset(OpConst16)
  8026  		v.AuxInt = 0
  8027  		return true
  8028  	}
  8029  	return false
  8030  }
  8031  func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool {
  8032  	b := v.Block
  8033  	_ = b
  8034  	// match: (Rsh16Ux32 <t> x (Const32 [c]))
  8035  	// cond:
  8036  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))]))
  8037  	for {
  8038  		t := v.Type
  8039  		x := v.Args[0]
  8040  		v_1 := v.Args[1]
  8041  		if v_1.Op != OpConst32 {
  8042  			break
  8043  		}
  8044  		c := v_1.AuxInt
  8045  		v.reset(OpRsh16Ux64)
  8046  		v.AddArg(x)
  8047  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8048  		v0.AuxInt = int64(uint32(c))
  8049  		v.AddArg(v0)
  8050  		return true
  8051  	}
  8052  	// match: (Rsh16Ux32 (Const16 [0]) _)
  8053  	// cond:
  8054  	// result: (Const16 [0])
  8055  	for {
  8056  		v_0 := v.Args[0]
  8057  		if v_0.Op != OpConst16 {
  8058  			break
  8059  		}
  8060  		if v_0.AuxInt != 0 {
  8061  			break
  8062  		}
  8063  		v.reset(OpConst16)
  8064  		v.AuxInt = 0
  8065  		return true
  8066  	}
  8067  	return false
  8068  }
  8069  func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool {
  8070  	b := v.Block
  8071  	_ = b
  8072  	// match: (Rsh16Ux64 (Const16 [c]) (Const64 [d]))
  8073  	// cond:
  8074  	// result: (Const16 [int64(int16(uint16(c) >> uint64(d)))])
  8075  	for {
  8076  		v_0 := v.Args[0]
  8077  		if v_0.Op != OpConst16 {
  8078  			break
  8079  		}
  8080  		c := v_0.AuxInt
  8081  		v_1 := v.Args[1]
  8082  		if v_1.Op != OpConst64 {
  8083  			break
  8084  		}
  8085  		d := v_1.AuxInt
  8086  		v.reset(OpConst16)
  8087  		v.AuxInt = int64(int16(uint16(c) >> uint64(d)))
  8088  		return true
  8089  	}
  8090  	// match: (Rsh16Ux64 x (Const64 [0]))
  8091  	// cond:
  8092  	// result: x
  8093  	for {
  8094  		x := v.Args[0]
  8095  		v_1 := v.Args[1]
  8096  		if v_1.Op != OpConst64 {
  8097  			break
  8098  		}
  8099  		if v_1.AuxInt != 0 {
  8100  			break
  8101  		}
  8102  		v.reset(OpCopy)
  8103  		v.Type = x.Type
  8104  		v.AddArg(x)
  8105  		return true
  8106  	}
  8107  	// match: (Rsh16Ux64 (Const16 [0]) _)
  8108  	// cond:
  8109  	// result: (Const16 [0])
  8110  	for {
  8111  		v_0 := v.Args[0]
  8112  		if v_0.Op != OpConst16 {
  8113  			break
  8114  		}
  8115  		if v_0.AuxInt != 0 {
  8116  			break
  8117  		}
  8118  		v.reset(OpConst16)
  8119  		v.AuxInt = 0
  8120  		return true
  8121  	}
  8122  	// match: (Rsh16Ux64 _ (Const64 [c]))
  8123  	// cond: uint64(c) >= 16
  8124  	// result: (Const16 [0])
  8125  	for {
  8126  		v_1 := v.Args[1]
  8127  		if v_1.Op != OpConst64 {
  8128  			break
  8129  		}
  8130  		c := v_1.AuxInt
  8131  		if !(uint64(c) >= 16) {
  8132  			break
  8133  		}
  8134  		v.reset(OpConst16)
  8135  		v.AuxInt = 0
  8136  		return true
  8137  	}
  8138  	// match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d]))
  8139  	// cond: !uaddOvf(c,d)
  8140  	// result: (Rsh16Ux64 x (Const64 <t> [c+d]))
  8141  	for {
  8142  		t := v.Type
  8143  		v_0 := v.Args[0]
  8144  		if v_0.Op != OpRsh16Ux64 {
  8145  			break
  8146  		}
  8147  		x := v_0.Args[0]
  8148  		v_0_1 := v_0.Args[1]
  8149  		if v_0_1.Op != OpConst64 {
  8150  			break
  8151  		}
  8152  		c := v_0_1.AuxInt
  8153  		v_1 := v.Args[1]
  8154  		if v_1.Op != OpConst64 {
  8155  			break
  8156  		}
  8157  		d := v_1.AuxInt
  8158  		if !(!uaddOvf(c, d)) {
  8159  			break
  8160  		}
  8161  		v.reset(OpRsh16Ux64)
  8162  		v.AddArg(x)
  8163  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8164  		v0.AuxInt = c + d
  8165  		v.AddArg(v0)
  8166  		return true
  8167  	}
  8168  	// match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  8169  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  8170  	// result: (Rsh16Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  8171  	for {
  8172  		v_0 := v.Args[0]
  8173  		if v_0.Op != OpLsh16x64 {
  8174  			break
  8175  		}
  8176  		v_0_0 := v_0.Args[0]
  8177  		if v_0_0.Op != OpRsh16Ux64 {
  8178  			break
  8179  		}
  8180  		x := v_0_0.Args[0]
  8181  		v_0_0_1 := v_0_0.Args[1]
  8182  		if v_0_0_1.Op != OpConst64 {
  8183  			break
  8184  		}
  8185  		c1 := v_0_0_1.AuxInt
  8186  		v_0_1 := v_0.Args[1]
  8187  		if v_0_1.Op != OpConst64 {
  8188  			break
  8189  		}
  8190  		c2 := v_0_1.AuxInt
  8191  		v_1 := v.Args[1]
  8192  		if v_1.Op != OpConst64 {
  8193  			break
  8194  		}
  8195  		c3 := v_1.AuxInt
  8196  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  8197  			break
  8198  		}
  8199  		v.reset(OpRsh16Ux64)
  8200  		v.AddArg(x)
  8201  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  8202  		v0.AuxInt = c1 - c2 + c3
  8203  		v.AddArg(v0)
  8204  		return true
  8205  	}
  8206  	return false
  8207  }
  8208  func rewriteValuegeneric_OpRsh16Ux8(v *Value, config *Config) bool {
  8209  	b := v.Block
  8210  	_ = b
  8211  	// match: (Rsh16Ux8  <t> x (Const8  [c]))
  8212  	// cond:
  8213  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))]))
  8214  	for {
  8215  		t := v.Type
  8216  		x := v.Args[0]
  8217  		v_1 := v.Args[1]
  8218  		if v_1.Op != OpConst8 {
  8219  			break
  8220  		}
  8221  		c := v_1.AuxInt
  8222  		v.reset(OpRsh16Ux64)
  8223  		v.AddArg(x)
  8224  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8225  		v0.AuxInt = int64(uint8(c))
  8226  		v.AddArg(v0)
  8227  		return true
  8228  	}
  8229  	// match: (Rsh16Ux8 (Const16 [0]) _)
  8230  	// cond:
  8231  	// result: (Const16 [0])
  8232  	for {
  8233  		v_0 := v.Args[0]
  8234  		if v_0.Op != OpConst16 {
  8235  			break
  8236  		}
  8237  		if v_0.AuxInt != 0 {
  8238  			break
  8239  		}
  8240  		v.reset(OpConst16)
  8241  		v.AuxInt = 0
  8242  		return true
  8243  	}
  8244  	return false
  8245  }
  8246  func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool {
  8247  	b := v.Block
  8248  	_ = b
  8249  	// match: (Rsh16x16  <t> x (Const16 [c]))
  8250  	// cond:
  8251  	// result: (Rsh16x64  x (Const64 <t> [int64(uint16(c))]))
  8252  	for {
  8253  		t := v.Type
  8254  		x := v.Args[0]
  8255  		v_1 := v.Args[1]
  8256  		if v_1.Op != OpConst16 {
  8257  			break
  8258  		}
  8259  		c := v_1.AuxInt
  8260  		v.reset(OpRsh16x64)
  8261  		v.AddArg(x)
  8262  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8263  		v0.AuxInt = int64(uint16(c))
  8264  		v.AddArg(v0)
  8265  		return true
  8266  	}
  8267  	// match: (Rsh16x16  (Const16 [0]) _)
  8268  	// cond:
  8269  	// result: (Const16 [0])
  8270  	for {
  8271  		v_0 := v.Args[0]
  8272  		if v_0.Op != OpConst16 {
  8273  			break
  8274  		}
  8275  		if v_0.AuxInt != 0 {
  8276  			break
  8277  		}
  8278  		v.reset(OpConst16)
  8279  		v.AuxInt = 0
  8280  		return true
  8281  	}
  8282  	return false
  8283  }
  8284  func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool {
  8285  	b := v.Block
  8286  	_ = b
  8287  	// match: (Rsh16x32  <t> x (Const32 [c]))
  8288  	// cond:
  8289  	// result: (Rsh16x64  x (Const64 <t> [int64(uint32(c))]))
  8290  	for {
  8291  		t := v.Type
  8292  		x := v.Args[0]
  8293  		v_1 := v.Args[1]
  8294  		if v_1.Op != OpConst32 {
  8295  			break
  8296  		}
  8297  		c := v_1.AuxInt
  8298  		v.reset(OpRsh16x64)
  8299  		v.AddArg(x)
  8300  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8301  		v0.AuxInt = int64(uint32(c))
  8302  		v.AddArg(v0)
  8303  		return true
  8304  	}
  8305  	// match: (Rsh16x32  (Const16 [0]) _)
  8306  	// cond:
  8307  	// result: (Const16 [0])
  8308  	for {
  8309  		v_0 := v.Args[0]
  8310  		if v_0.Op != OpConst16 {
  8311  			break
  8312  		}
  8313  		if v_0.AuxInt != 0 {
  8314  			break
  8315  		}
  8316  		v.reset(OpConst16)
  8317  		v.AuxInt = 0
  8318  		return true
  8319  	}
  8320  	return false
  8321  }
  8322  func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool {
  8323  	b := v.Block
  8324  	_ = b
  8325  	// match: (Rsh16x64  (Const16 [c]) (Const64 [d]))
  8326  	// cond:
  8327  	// result: (Const16 [int64(int16(c) >> uint64(d))])
  8328  	for {
  8329  		v_0 := v.Args[0]
  8330  		if v_0.Op != OpConst16 {
  8331  			break
  8332  		}
  8333  		c := v_0.AuxInt
  8334  		v_1 := v.Args[1]
  8335  		if v_1.Op != OpConst64 {
  8336  			break
  8337  		}
  8338  		d := v_1.AuxInt
  8339  		v.reset(OpConst16)
  8340  		v.AuxInt = int64(int16(c) >> uint64(d))
  8341  		return true
  8342  	}
  8343  	// match: (Rsh16x64  x (Const64 [0]))
  8344  	// cond:
  8345  	// result: x
  8346  	for {
  8347  		x := v.Args[0]
  8348  		v_1 := v.Args[1]
  8349  		if v_1.Op != OpConst64 {
  8350  			break
  8351  		}
  8352  		if v_1.AuxInt != 0 {
  8353  			break
  8354  		}
  8355  		v.reset(OpCopy)
  8356  		v.Type = x.Type
  8357  		v.AddArg(x)
  8358  		return true
  8359  	}
  8360  	// match: (Rsh16x64  (Const16 [0]) _)
  8361  	// cond:
  8362  	// result: (Const16 [0])
  8363  	for {
  8364  		v_0 := v.Args[0]
  8365  		if v_0.Op != OpConst16 {
  8366  			break
  8367  		}
  8368  		if v_0.AuxInt != 0 {
  8369  			break
  8370  		}
  8371  		v.reset(OpConst16)
  8372  		v.AuxInt = 0
  8373  		return true
  8374  	}
  8375  	// match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d]))
  8376  	// cond: !uaddOvf(c,d)
  8377  	// result: (Rsh16x64 x (Const64 <t> [c+d]))
  8378  	for {
  8379  		t := v.Type
  8380  		v_0 := v.Args[0]
  8381  		if v_0.Op != OpRsh16x64 {
  8382  			break
  8383  		}
  8384  		x := v_0.Args[0]
  8385  		v_0_1 := v_0.Args[1]
  8386  		if v_0_1.Op != OpConst64 {
  8387  			break
  8388  		}
  8389  		c := v_0_1.AuxInt
  8390  		v_1 := v.Args[1]
  8391  		if v_1.Op != OpConst64 {
  8392  			break
  8393  		}
  8394  		d := v_1.AuxInt
  8395  		if !(!uaddOvf(c, d)) {
  8396  			break
  8397  		}
  8398  		v.reset(OpRsh16x64)
  8399  		v.AddArg(x)
  8400  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8401  		v0.AuxInt = c + d
  8402  		v.AddArg(v0)
  8403  		return true
  8404  	}
  8405  	return false
  8406  }
  8407  func rewriteValuegeneric_OpRsh16x8(v *Value, config *Config) bool {
  8408  	b := v.Block
  8409  	_ = b
  8410  	// match: (Rsh16x8   <t> x (Const8  [c]))
  8411  	// cond:
  8412  	// result: (Rsh16x64  x (Const64 <t> [int64(uint8(c))]))
  8413  	for {
  8414  		t := v.Type
  8415  		x := v.Args[0]
  8416  		v_1 := v.Args[1]
  8417  		if v_1.Op != OpConst8 {
  8418  			break
  8419  		}
  8420  		c := v_1.AuxInt
  8421  		v.reset(OpRsh16x64)
  8422  		v.AddArg(x)
  8423  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8424  		v0.AuxInt = int64(uint8(c))
  8425  		v.AddArg(v0)
  8426  		return true
  8427  	}
  8428  	// match: (Rsh16x8  (Const16 [0]) _)
  8429  	// cond:
  8430  	// result: (Const16 [0])
  8431  	for {
  8432  		v_0 := v.Args[0]
  8433  		if v_0.Op != OpConst16 {
  8434  			break
  8435  		}
  8436  		if v_0.AuxInt != 0 {
  8437  			break
  8438  		}
  8439  		v.reset(OpConst16)
  8440  		v.AuxInt = 0
  8441  		return true
  8442  	}
  8443  	return false
  8444  }
  8445  func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool {
  8446  	b := v.Block
  8447  	_ = b
  8448  	// match: (Rsh32Ux16 <t> x (Const16 [c]))
  8449  	// cond:
  8450  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))]))
  8451  	for {
  8452  		t := v.Type
  8453  		x := v.Args[0]
  8454  		v_1 := v.Args[1]
  8455  		if v_1.Op != OpConst16 {
  8456  			break
  8457  		}
  8458  		c := v_1.AuxInt
  8459  		v.reset(OpRsh32Ux64)
  8460  		v.AddArg(x)
  8461  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8462  		v0.AuxInt = int64(uint16(c))
  8463  		v.AddArg(v0)
  8464  		return true
  8465  	}
  8466  	// match: (Rsh32Ux16 (Const32 [0]) _)
  8467  	// cond:
  8468  	// result: (Const32 [0])
  8469  	for {
  8470  		v_0 := v.Args[0]
  8471  		if v_0.Op != OpConst32 {
  8472  			break
  8473  		}
  8474  		if v_0.AuxInt != 0 {
  8475  			break
  8476  		}
  8477  		v.reset(OpConst32)
  8478  		v.AuxInt = 0
  8479  		return true
  8480  	}
  8481  	return false
  8482  }
  8483  func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool {
  8484  	b := v.Block
  8485  	_ = b
  8486  	// match: (Rsh32Ux32 <t> x (Const32 [c]))
  8487  	// cond:
  8488  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))]))
  8489  	for {
  8490  		t := v.Type
  8491  		x := v.Args[0]
  8492  		v_1 := v.Args[1]
  8493  		if v_1.Op != OpConst32 {
  8494  			break
  8495  		}
  8496  		c := v_1.AuxInt
  8497  		v.reset(OpRsh32Ux64)
  8498  		v.AddArg(x)
  8499  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8500  		v0.AuxInt = int64(uint32(c))
  8501  		v.AddArg(v0)
  8502  		return true
  8503  	}
  8504  	// match: (Rsh32Ux32 (Const32 [0]) _)
  8505  	// cond:
  8506  	// result: (Const32 [0])
  8507  	for {
  8508  		v_0 := v.Args[0]
  8509  		if v_0.Op != OpConst32 {
  8510  			break
  8511  		}
  8512  		if v_0.AuxInt != 0 {
  8513  			break
  8514  		}
  8515  		v.reset(OpConst32)
  8516  		v.AuxInt = 0
  8517  		return true
  8518  	}
  8519  	return false
  8520  }
  8521  func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool {
  8522  	b := v.Block
  8523  	_ = b
  8524  	// match: (Rsh32Ux64 (Const32 [c]) (Const64 [d]))
  8525  	// cond:
  8526  	// result: (Const32 [int64(int32(uint32(c) >> uint64(d)))])
  8527  	for {
  8528  		v_0 := v.Args[0]
  8529  		if v_0.Op != OpConst32 {
  8530  			break
  8531  		}
  8532  		c := v_0.AuxInt
  8533  		v_1 := v.Args[1]
  8534  		if v_1.Op != OpConst64 {
  8535  			break
  8536  		}
  8537  		d := v_1.AuxInt
  8538  		v.reset(OpConst32)
  8539  		v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
  8540  		return true
  8541  	}
  8542  	// match: (Rsh32Ux64 x (Const64 [0]))
  8543  	// cond:
  8544  	// result: x
  8545  	for {
  8546  		x := v.Args[0]
  8547  		v_1 := v.Args[1]
  8548  		if v_1.Op != OpConst64 {
  8549  			break
  8550  		}
  8551  		if v_1.AuxInt != 0 {
  8552  			break
  8553  		}
  8554  		v.reset(OpCopy)
  8555  		v.Type = x.Type
  8556  		v.AddArg(x)
  8557  		return true
  8558  	}
  8559  	// match: (Rsh32Ux64 (Const32 [0]) _)
  8560  	// cond:
  8561  	// result: (Const32 [0])
  8562  	for {
  8563  		v_0 := v.Args[0]
  8564  		if v_0.Op != OpConst32 {
  8565  			break
  8566  		}
  8567  		if v_0.AuxInt != 0 {
  8568  			break
  8569  		}
  8570  		v.reset(OpConst32)
  8571  		v.AuxInt = 0
  8572  		return true
  8573  	}
  8574  	// match: (Rsh32Ux64 _ (Const64 [c]))
  8575  	// cond: uint64(c) >= 32
  8576  	// result: (Const32 [0])
  8577  	for {
  8578  		v_1 := v.Args[1]
  8579  		if v_1.Op != OpConst64 {
  8580  			break
  8581  		}
  8582  		c := v_1.AuxInt
  8583  		if !(uint64(c) >= 32) {
  8584  			break
  8585  		}
  8586  		v.reset(OpConst32)
  8587  		v.AuxInt = 0
  8588  		return true
  8589  	}
  8590  	// match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d]))
  8591  	// cond: !uaddOvf(c,d)
  8592  	// result: (Rsh32Ux64 x (Const64 <t> [c+d]))
  8593  	for {
  8594  		t := v.Type
  8595  		v_0 := v.Args[0]
  8596  		if v_0.Op != OpRsh32Ux64 {
  8597  			break
  8598  		}
  8599  		x := v_0.Args[0]
  8600  		v_0_1 := v_0.Args[1]
  8601  		if v_0_1.Op != OpConst64 {
  8602  			break
  8603  		}
  8604  		c := v_0_1.AuxInt
  8605  		v_1 := v.Args[1]
  8606  		if v_1.Op != OpConst64 {
  8607  			break
  8608  		}
  8609  		d := v_1.AuxInt
  8610  		if !(!uaddOvf(c, d)) {
  8611  			break
  8612  		}
  8613  		v.reset(OpRsh32Ux64)
  8614  		v.AddArg(x)
  8615  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8616  		v0.AuxInt = c + d
  8617  		v.AddArg(v0)
  8618  		return true
  8619  	}
  8620  	// match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  8621  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  8622  	// result: (Rsh32Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  8623  	for {
  8624  		v_0 := v.Args[0]
  8625  		if v_0.Op != OpLsh32x64 {
  8626  			break
  8627  		}
  8628  		v_0_0 := v_0.Args[0]
  8629  		if v_0_0.Op != OpRsh32Ux64 {
  8630  			break
  8631  		}
  8632  		x := v_0_0.Args[0]
  8633  		v_0_0_1 := v_0_0.Args[1]
  8634  		if v_0_0_1.Op != OpConst64 {
  8635  			break
  8636  		}
  8637  		c1 := v_0_0_1.AuxInt
  8638  		v_0_1 := v_0.Args[1]
  8639  		if v_0_1.Op != OpConst64 {
  8640  			break
  8641  		}
  8642  		c2 := v_0_1.AuxInt
  8643  		v_1 := v.Args[1]
  8644  		if v_1.Op != OpConst64 {
  8645  			break
  8646  		}
  8647  		c3 := v_1.AuxInt
  8648  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  8649  			break
  8650  		}
  8651  		v.reset(OpRsh32Ux64)
  8652  		v.AddArg(x)
  8653  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  8654  		v0.AuxInt = c1 - c2 + c3
  8655  		v.AddArg(v0)
  8656  		return true
  8657  	}
  8658  	return false
  8659  }
  8660  func rewriteValuegeneric_OpRsh32Ux8(v *Value, config *Config) bool {
  8661  	b := v.Block
  8662  	_ = b
  8663  	// match: (Rsh32Ux8  <t> x (Const8  [c]))
  8664  	// cond:
  8665  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))]))
  8666  	for {
  8667  		t := v.Type
  8668  		x := v.Args[0]
  8669  		v_1 := v.Args[1]
  8670  		if v_1.Op != OpConst8 {
  8671  			break
  8672  		}
  8673  		c := v_1.AuxInt
  8674  		v.reset(OpRsh32Ux64)
  8675  		v.AddArg(x)
  8676  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8677  		v0.AuxInt = int64(uint8(c))
  8678  		v.AddArg(v0)
  8679  		return true
  8680  	}
  8681  	// match: (Rsh32Ux8 (Const32 [0]) _)
  8682  	// cond:
  8683  	// result: (Const32 [0])
  8684  	for {
  8685  		v_0 := v.Args[0]
  8686  		if v_0.Op != OpConst32 {
  8687  			break
  8688  		}
  8689  		if v_0.AuxInt != 0 {
  8690  			break
  8691  		}
  8692  		v.reset(OpConst32)
  8693  		v.AuxInt = 0
  8694  		return true
  8695  	}
  8696  	return false
  8697  }
  8698  func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool {
  8699  	b := v.Block
  8700  	_ = b
  8701  	// match: (Rsh32x16  <t> x (Const16 [c]))
  8702  	// cond:
  8703  	// result: (Rsh32x64  x (Const64 <t> [int64(uint16(c))]))
  8704  	for {
  8705  		t := v.Type
  8706  		x := v.Args[0]
  8707  		v_1 := v.Args[1]
  8708  		if v_1.Op != OpConst16 {
  8709  			break
  8710  		}
  8711  		c := v_1.AuxInt
  8712  		v.reset(OpRsh32x64)
  8713  		v.AddArg(x)
  8714  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8715  		v0.AuxInt = int64(uint16(c))
  8716  		v.AddArg(v0)
  8717  		return true
  8718  	}
  8719  	// match: (Rsh32x16  (Const32 [0]) _)
  8720  	// cond:
  8721  	// result: (Const32 [0])
  8722  	for {
  8723  		v_0 := v.Args[0]
  8724  		if v_0.Op != OpConst32 {
  8725  			break
  8726  		}
  8727  		if v_0.AuxInt != 0 {
  8728  			break
  8729  		}
  8730  		v.reset(OpConst32)
  8731  		v.AuxInt = 0
  8732  		return true
  8733  	}
  8734  	return false
  8735  }
  8736  func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool {
  8737  	b := v.Block
  8738  	_ = b
  8739  	// match: (Rsh32x32  <t> x (Const32 [c]))
  8740  	// cond:
  8741  	// result: (Rsh32x64  x (Const64 <t> [int64(uint32(c))]))
  8742  	for {
  8743  		t := v.Type
  8744  		x := v.Args[0]
  8745  		v_1 := v.Args[1]
  8746  		if v_1.Op != OpConst32 {
  8747  			break
  8748  		}
  8749  		c := v_1.AuxInt
  8750  		v.reset(OpRsh32x64)
  8751  		v.AddArg(x)
  8752  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8753  		v0.AuxInt = int64(uint32(c))
  8754  		v.AddArg(v0)
  8755  		return true
  8756  	}
  8757  	// match: (Rsh32x32  (Const32 [0]) _)
  8758  	// cond:
  8759  	// result: (Const32 [0])
  8760  	for {
  8761  		v_0 := v.Args[0]
  8762  		if v_0.Op != OpConst32 {
  8763  			break
  8764  		}
  8765  		if v_0.AuxInt != 0 {
  8766  			break
  8767  		}
  8768  		v.reset(OpConst32)
  8769  		v.AuxInt = 0
  8770  		return true
  8771  	}
  8772  	return false
  8773  }
  8774  func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool {
  8775  	b := v.Block
  8776  	_ = b
  8777  	// match: (Rsh32x64  (Const32 [c]) (Const64 [d]))
  8778  	// cond:
  8779  	// result: (Const32 [int64(int32(c) >> uint64(d))])
  8780  	for {
  8781  		v_0 := v.Args[0]
  8782  		if v_0.Op != OpConst32 {
  8783  			break
  8784  		}
  8785  		c := v_0.AuxInt
  8786  		v_1 := v.Args[1]
  8787  		if v_1.Op != OpConst64 {
  8788  			break
  8789  		}
  8790  		d := v_1.AuxInt
  8791  		v.reset(OpConst32)
  8792  		v.AuxInt = int64(int32(c) >> uint64(d))
  8793  		return true
  8794  	}
  8795  	// match: (Rsh32x64  x (Const64 [0]))
  8796  	// cond:
  8797  	// result: x
  8798  	for {
  8799  		x := v.Args[0]
  8800  		v_1 := v.Args[1]
  8801  		if v_1.Op != OpConst64 {
  8802  			break
  8803  		}
  8804  		if v_1.AuxInt != 0 {
  8805  			break
  8806  		}
  8807  		v.reset(OpCopy)
  8808  		v.Type = x.Type
  8809  		v.AddArg(x)
  8810  		return true
  8811  	}
  8812  	// match: (Rsh32x64  (Const32 [0]) _)
  8813  	// cond:
  8814  	// result: (Const32 [0])
  8815  	for {
  8816  		v_0 := v.Args[0]
  8817  		if v_0.Op != OpConst32 {
  8818  			break
  8819  		}
  8820  		if v_0.AuxInt != 0 {
  8821  			break
  8822  		}
  8823  		v.reset(OpConst32)
  8824  		v.AuxInt = 0
  8825  		return true
  8826  	}
  8827  	// match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d]))
  8828  	// cond: !uaddOvf(c,d)
  8829  	// result: (Rsh32x64 x (Const64 <t> [c+d]))
  8830  	for {
  8831  		t := v.Type
  8832  		v_0 := v.Args[0]
  8833  		if v_0.Op != OpRsh32x64 {
  8834  			break
  8835  		}
  8836  		x := v_0.Args[0]
  8837  		v_0_1 := v_0.Args[1]
  8838  		if v_0_1.Op != OpConst64 {
  8839  			break
  8840  		}
  8841  		c := v_0_1.AuxInt
  8842  		v_1 := v.Args[1]
  8843  		if v_1.Op != OpConst64 {
  8844  			break
  8845  		}
  8846  		d := v_1.AuxInt
  8847  		if !(!uaddOvf(c, d)) {
  8848  			break
  8849  		}
  8850  		v.reset(OpRsh32x64)
  8851  		v.AddArg(x)
  8852  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8853  		v0.AuxInt = c + d
  8854  		v.AddArg(v0)
  8855  		return true
  8856  	}
  8857  	return false
  8858  }
  8859  func rewriteValuegeneric_OpRsh32x8(v *Value, config *Config) bool {
  8860  	b := v.Block
  8861  	_ = b
  8862  	// match: (Rsh32x8   <t> x (Const8  [c]))
  8863  	// cond:
  8864  	// result: (Rsh32x64  x (Const64 <t> [int64(uint8(c))]))
  8865  	for {
  8866  		t := v.Type
  8867  		x := v.Args[0]
  8868  		v_1 := v.Args[1]
  8869  		if v_1.Op != OpConst8 {
  8870  			break
  8871  		}
  8872  		c := v_1.AuxInt
  8873  		v.reset(OpRsh32x64)
  8874  		v.AddArg(x)
  8875  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8876  		v0.AuxInt = int64(uint8(c))
  8877  		v.AddArg(v0)
  8878  		return true
  8879  	}
  8880  	// match: (Rsh32x8  (Const32 [0]) _)
  8881  	// cond:
  8882  	// result: (Const32 [0])
  8883  	for {
  8884  		v_0 := v.Args[0]
  8885  		if v_0.Op != OpConst32 {
  8886  			break
  8887  		}
  8888  		if v_0.AuxInt != 0 {
  8889  			break
  8890  		}
  8891  		v.reset(OpConst32)
  8892  		v.AuxInt = 0
  8893  		return true
  8894  	}
  8895  	return false
  8896  }
  8897  func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool {
  8898  	b := v.Block
  8899  	_ = b
  8900  	// match: (Rsh64Ux16 <t> x (Const16 [c]))
  8901  	// cond:
  8902  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))]))
  8903  	for {
  8904  		t := v.Type
  8905  		x := v.Args[0]
  8906  		v_1 := v.Args[1]
  8907  		if v_1.Op != OpConst16 {
  8908  			break
  8909  		}
  8910  		c := v_1.AuxInt
  8911  		v.reset(OpRsh64Ux64)
  8912  		v.AddArg(x)
  8913  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8914  		v0.AuxInt = int64(uint16(c))
  8915  		v.AddArg(v0)
  8916  		return true
  8917  	}
  8918  	// match: (Rsh64Ux16 (Const64 [0]) _)
  8919  	// cond:
  8920  	// result: (Const64 [0])
  8921  	for {
  8922  		v_0 := v.Args[0]
  8923  		if v_0.Op != OpConst64 {
  8924  			break
  8925  		}
  8926  		if v_0.AuxInt != 0 {
  8927  			break
  8928  		}
  8929  		v.reset(OpConst64)
  8930  		v.AuxInt = 0
  8931  		return true
  8932  	}
  8933  	return false
  8934  }
  8935  func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool {
  8936  	b := v.Block
  8937  	_ = b
  8938  	// match: (Rsh64Ux32 <t> x (Const32 [c]))
  8939  	// cond:
  8940  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))]))
  8941  	for {
  8942  		t := v.Type
  8943  		x := v.Args[0]
  8944  		v_1 := v.Args[1]
  8945  		if v_1.Op != OpConst32 {
  8946  			break
  8947  		}
  8948  		c := v_1.AuxInt
  8949  		v.reset(OpRsh64Ux64)
  8950  		v.AddArg(x)
  8951  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  8952  		v0.AuxInt = int64(uint32(c))
  8953  		v.AddArg(v0)
  8954  		return true
  8955  	}
  8956  	// match: (Rsh64Ux32 (Const64 [0]) _)
  8957  	// cond:
  8958  	// result: (Const64 [0])
  8959  	for {
  8960  		v_0 := v.Args[0]
  8961  		if v_0.Op != OpConst64 {
  8962  			break
  8963  		}
  8964  		if v_0.AuxInt != 0 {
  8965  			break
  8966  		}
  8967  		v.reset(OpConst64)
  8968  		v.AuxInt = 0
  8969  		return true
  8970  	}
  8971  	return false
  8972  }
  8973  func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool {
  8974  	b := v.Block
  8975  	_ = b
  8976  	// match: (Rsh64Ux64 (Const64 [c]) (Const64 [d]))
  8977  	// cond:
  8978  	// result: (Const64 [int64(uint64(c) >> uint64(d))])
  8979  	for {
  8980  		v_0 := v.Args[0]
  8981  		if v_0.Op != OpConst64 {
  8982  			break
  8983  		}
  8984  		c := v_0.AuxInt
  8985  		v_1 := v.Args[1]
  8986  		if v_1.Op != OpConst64 {
  8987  			break
  8988  		}
  8989  		d := v_1.AuxInt
  8990  		v.reset(OpConst64)
  8991  		v.AuxInt = int64(uint64(c) >> uint64(d))
  8992  		return true
  8993  	}
  8994  	// match: (Rsh64Ux64 x (Const64 [0]))
  8995  	// cond:
  8996  	// result: x
  8997  	for {
  8998  		x := v.Args[0]
  8999  		v_1 := v.Args[1]
  9000  		if v_1.Op != OpConst64 {
  9001  			break
  9002  		}
  9003  		if v_1.AuxInt != 0 {
  9004  			break
  9005  		}
  9006  		v.reset(OpCopy)
  9007  		v.Type = x.Type
  9008  		v.AddArg(x)
  9009  		return true
  9010  	}
  9011  	// match: (Rsh64Ux64 (Const64 [0]) _)
  9012  	// cond:
  9013  	// result: (Const64 [0])
  9014  	for {
  9015  		v_0 := v.Args[0]
  9016  		if v_0.Op != OpConst64 {
  9017  			break
  9018  		}
  9019  		if v_0.AuxInt != 0 {
  9020  			break
  9021  		}
  9022  		v.reset(OpConst64)
  9023  		v.AuxInt = 0
  9024  		return true
  9025  	}
  9026  	// match: (Rsh64Ux64 _ (Const64 [c]))
  9027  	// cond: uint64(c) >= 64
  9028  	// result: (Const64 [0])
  9029  	for {
  9030  		v_1 := v.Args[1]
  9031  		if v_1.Op != OpConst64 {
  9032  			break
  9033  		}
  9034  		c := v_1.AuxInt
  9035  		if !(uint64(c) >= 64) {
  9036  			break
  9037  		}
  9038  		v.reset(OpConst64)
  9039  		v.AuxInt = 0
  9040  		return true
  9041  	}
  9042  	// match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d]))
  9043  	// cond: !uaddOvf(c,d)
  9044  	// result: (Rsh64Ux64 x (Const64 <t> [c+d]))
  9045  	for {
  9046  		t := v.Type
  9047  		v_0 := v.Args[0]
  9048  		if v_0.Op != OpRsh64Ux64 {
  9049  			break
  9050  		}
  9051  		x := v_0.Args[0]
  9052  		v_0_1 := v_0.Args[1]
  9053  		if v_0_1.Op != OpConst64 {
  9054  			break
  9055  		}
  9056  		c := v_0_1.AuxInt
  9057  		v_1 := v.Args[1]
  9058  		if v_1.Op != OpConst64 {
  9059  			break
  9060  		}
  9061  		d := v_1.AuxInt
  9062  		if !(!uaddOvf(c, d)) {
  9063  			break
  9064  		}
  9065  		v.reset(OpRsh64Ux64)
  9066  		v.AddArg(x)
  9067  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9068  		v0.AuxInt = c + d
  9069  		v.AddArg(v0)
  9070  		return true
  9071  	}
  9072  	// match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  9073  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  9074  	// result: (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  9075  	for {
  9076  		v_0 := v.Args[0]
  9077  		if v_0.Op != OpLsh64x64 {
  9078  			break
  9079  		}
  9080  		v_0_0 := v_0.Args[0]
  9081  		if v_0_0.Op != OpRsh64Ux64 {
  9082  			break
  9083  		}
  9084  		x := v_0_0.Args[0]
  9085  		v_0_0_1 := v_0_0.Args[1]
  9086  		if v_0_0_1.Op != OpConst64 {
  9087  			break
  9088  		}
  9089  		c1 := v_0_0_1.AuxInt
  9090  		v_0_1 := v_0.Args[1]
  9091  		if v_0_1.Op != OpConst64 {
  9092  			break
  9093  		}
  9094  		c2 := v_0_1.AuxInt
  9095  		v_1 := v.Args[1]
  9096  		if v_1.Op != OpConst64 {
  9097  			break
  9098  		}
  9099  		c3 := v_1.AuxInt
  9100  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  9101  			break
  9102  		}
  9103  		v.reset(OpRsh64Ux64)
  9104  		v.AddArg(x)
  9105  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  9106  		v0.AuxInt = c1 - c2 + c3
  9107  		v.AddArg(v0)
  9108  		return true
  9109  	}
  9110  	return false
  9111  }
  9112  func rewriteValuegeneric_OpRsh64Ux8(v *Value, config *Config) bool {
  9113  	b := v.Block
  9114  	_ = b
  9115  	// match: (Rsh64Ux8  <t> x (Const8  [c]))
  9116  	// cond:
  9117  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))]))
  9118  	for {
  9119  		t := v.Type
  9120  		x := v.Args[0]
  9121  		v_1 := v.Args[1]
  9122  		if v_1.Op != OpConst8 {
  9123  			break
  9124  		}
  9125  		c := v_1.AuxInt
  9126  		v.reset(OpRsh64Ux64)
  9127  		v.AddArg(x)
  9128  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9129  		v0.AuxInt = int64(uint8(c))
  9130  		v.AddArg(v0)
  9131  		return true
  9132  	}
  9133  	// match: (Rsh64Ux8 (Const64 [0]) _)
  9134  	// cond:
  9135  	// result: (Const64 [0])
  9136  	for {
  9137  		v_0 := v.Args[0]
  9138  		if v_0.Op != OpConst64 {
  9139  			break
  9140  		}
  9141  		if v_0.AuxInt != 0 {
  9142  			break
  9143  		}
  9144  		v.reset(OpConst64)
  9145  		v.AuxInt = 0
  9146  		return true
  9147  	}
  9148  	return false
  9149  }
  9150  func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool {
  9151  	b := v.Block
  9152  	_ = b
  9153  	// match: (Rsh64x16  <t> x (Const16 [c]))
  9154  	// cond:
  9155  	// result: (Rsh64x64  x (Const64 <t> [int64(uint16(c))]))
  9156  	for {
  9157  		t := v.Type
  9158  		x := v.Args[0]
  9159  		v_1 := v.Args[1]
  9160  		if v_1.Op != OpConst16 {
  9161  			break
  9162  		}
  9163  		c := v_1.AuxInt
  9164  		v.reset(OpRsh64x64)
  9165  		v.AddArg(x)
  9166  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9167  		v0.AuxInt = int64(uint16(c))
  9168  		v.AddArg(v0)
  9169  		return true
  9170  	}
  9171  	// match: (Rsh64x16  (Const64 [0]) _)
  9172  	// cond:
  9173  	// result: (Const64 [0])
  9174  	for {
  9175  		v_0 := v.Args[0]
  9176  		if v_0.Op != OpConst64 {
  9177  			break
  9178  		}
  9179  		if v_0.AuxInt != 0 {
  9180  			break
  9181  		}
  9182  		v.reset(OpConst64)
  9183  		v.AuxInt = 0
  9184  		return true
  9185  	}
  9186  	return false
  9187  }
  9188  func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool {
  9189  	b := v.Block
  9190  	_ = b
  9191  	// match: (Rsh64x32  <t> x (Const32 [c]))
  9192  	// cond:
  9193  	// result: (Rsh64x64  x (Const64 <t> [int64(uint32(c))]))
  9194  	for {
  9195  		t := v.Type
  9196  		x := v.Args[0]
  9197  		v_1 := v.Args[1]
  9198  		if v_1.Op != OpConst32 {
  9199  			break
  9200  		}
  9201  		c := v_1.AuxInt
  9202  		v.reset(OpRsh64x64)
  9203  		v.AddArg(x)
  9204  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9205  		v0.AuxInt = int64(uint32(c))
  9206  		v.AddArg(v0)
  9207  		return true
  9208  	}
  9209  	// match: (Rsh64x32  (Const64 [0]) _)
  9210  	// cond:
  9211  	// result: (Const64 [0])
  9212  	for {
  9213  		v_0 := v.Args[0]
  9214  		if v_0.Op != OpConst64 {
  9215  			break
  9216  		}
  9217  		if v_0.AuxInt != 0 {
  9218  			break
  9219  		}
  9220  		v.reset(OpConst64)
  9221  		v.AuxInt = 0
  9222  		return true
  9223  	}
  9224  	return false
  9225  }
  9226  func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool {
  9227  	b := v.Block
  9228  	_ = b
  9229  	// match: (Rsh64x64  (Const64 [c]) (Const64 [d]))
  9230  	// cond:
  9231  	// result: (Const64 [c >> uint64(d)])
  9232  	for {
  9233  		v_0 := v.Args[0]
  9234  		if v_0.Op != OpConst64 {
  9235  			break
  9236  		}
  9237  		c := v_0.AuxInt
  9238  		v_1 := v.Args[1]
  9239  		if v_1.Op != OpConst64 {
  9240  			break
  9241  		}
  9242  		d := v_1.AuxInt
  9243  		v.reset(OpConst64)
  9244  		v.AuxInt = c >> uint64(d)
  9245  		return true
  9246  	}
  9247  	// match: (Rsh64x64  x (Const64 [0]))
  9248  	// cond:
  9249  	// result: x
  9250  	for {
  9251  		x := v.Args[0]
  9252  		v_1 := v.Args[1]
  9253  		if v_1.Op != OpConst64 {
  9254  			break
  9255  		}
  9256  		if v_1.AuxInt != 0 {
  9257  			break
  9258  		}
  9259  		v.reset(OpCopy)
  9260  		v.Type = x.Type
  9261  		v.AddArg(x)
  9262  		return true
  9263  	}
  9264  	// match: (Rsh64x64  (Const64 [0]) _)
  9265  	// cond:
  9266  	// result: (Const64 [0])
  9267  	for {
  9268  		v_0 := v.Args[0]
  9269  		if v_0.Op != OpConst64 {
  9270  			break
  9271  		}
  9272  		if v_0.AuxInt != 0 {
  9273  			break
  9274  		}
  9275  		v.reset(OpConst64)
  9276  		v.AuxInt = 0
  9277  		return true
  9278  	}
  9279  	// match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d]))
  9280  	// cond: !uaddOvf(c,d)
  9281  	// result: (Rsh64x64 x (Const64 <t> [c+d]))
  9282  	for {
  9283  		t := v.Type
  9284  		v_0 := v.Args[0]
  9285  		if v_0.Op != OpRsh64x64 {
  9286  			break
  9287  		}
  9288  		x := v_0.Args[0]
  9289  		v_0_1 := v_0.Args[1]
  9290  		if v_0_1.Op != OpConst64 {
  9291  			break
  9292  		}
  9293  		c := v_0_1.AuxInt
  9294  		v_1 := v.Args[1]
  9295  		if v_1.Op != OpConst64 {
  9296  			break
  9297  		}
  9298  		d := v_1.AuxInt
  9299  		if !(!uaddOvf(c, d)) {
  9300  			break
  9301  		}
  9302  		v.reset(OpRsh64x64)
  9303  		v.AddArg(x)
  9304  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9305  		v0.AuxInt = c + d
  9306  		v.AddArg(v0)
  9307  		return true
  9308  	}
  9309  	return false
  9310  }
  9311  func rewriteValuegeneric_OpRsh64x8(v *Value, config *Config) bool {
  9312  	b := v.Block
  9313  	_ = b
  9314  	// match: (Rsh64x8   <t> x (Const8  [c]))
  9315  	// cond:
  9316  	// result: (Rsh64x64  x (Const64 <t> [int64(uint8(c))]))
  9317  	for {
  9318  		t := v.Type
  9319  		x := v.Args[0]
  9320  		v_1 := v.Args[1]
  9321  		if v_1.Op != OpConst8 {
  9322  			break
  9323  		}
  9324  		c := v_1.AuxInt
  9325  		v.reset(OpRsh64x64)
  9326  		v.AddArg(x)
  9327  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9328  		v0.AuxInt = int64(uint8(c))
  9329  		v.AddArg(v0)
  9330  		return true
  9331  	}
  9332  	// match: (Rsh64x8  (Const64 [0]) _)
  9333  	// cond:
  9334  	// result: (Const64 [0])
  9335  	for {
  9336  		v_0 := v.Args[0]
  9337  		if v_0.Op != OpConst64 {
  9338  			break
  9339  		}
  9340  		if v_0.AuxInt != 0 {
  9341  			break
  9342  		}
  9343  		v.reset(OpConst64)
  9344  		v.AuxInt = 0
  9345  		return true
  9346  	}
  9347  	return false
  9348  }
  9349  func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool {
  9350  	b := v.Block
  9351  	_ = b
  9352  	// match: (Rsh8Ux16 <t> x (Const16 [c]))
  9353  	// cond:
  9354  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))]))
  9355  	for {
  9356  		t := v.Type
  9357  		x := v.Args[0]
  9358  		v_1 := v.Args[1]
  9359  		if v_1.Op != OpConst16 {
  9360  			break
  9361  		}
  9362  		c := v_1.AuxInt
  9363  		v.reset(OpRsh8Ux64)
  9364  		v.AddArg(x)
  9365  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9366  		v0.AuxInt = int64(uint16(c))
  9367  		v.AddArg(v0)
  9368  		return true
  9369  	}
  9370  	// match: (Rsh8Ux16  (Const8 [0]) _)
  9371  	// cond:
  9372  	// result: (Const8  [0])
  9373  	for {
  9374  		v_0 := v.Args[0]
  9375  		if v_0.Op != OpConst8 {
  9376  			break
  9377  		}
  9378  		if v_0.AuxInt != 0 {
  9379  			break
  9380  		}
  9381  		v.reset(OpConst8)
  9382  		v.AuxInt = 0
  9383  		return true
  9384  	}
  9385  	return false
  9386  }
  9387  func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool {
  9388  	b := v.Block
  9389  	_ = b
  9390  	// match: (Rsh8Ux32 <t> x (Const32 [c]))
  9391  	// cond:
  9392  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))]))
  9393  	for {
  9394  		t := v.Type
  9395  		x := v.Args[0]
  9396  		v_1 := v.Args[1]
  9397  		if v_1.Op != OpConst32 {
  9398  			break
  9399  		}
  9400  		c := v_1.AuxInt
  9401  		v.reset(OpRsh8Ux64)
  9402  		v.AddArg(x)
  9403  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9404  		v0.AuxInt = int64(uint32(c))
  9405  		v.AddArg(v0)
  9406  		return true
  9407  	}
  9408  	// match: (Rsh8Ux32  (Const8 [0]) _)
  9409  	// cond:
  9410  	// result: (Const8  [0])
  9411  	for {
  9412  		v_0 := v.Args[0]
  9413  		if v_0.Op != OpConst8 {
  9414  			break
  9415  		}
  9416  		if v_0.AuxInt != 0 {
  9417  			break
  9418  		}
  9419  		v.reset(OpConst8)
  9420  		v.AuxInt = 0
  9421  		return true
  9422  	}
  9423  	return false
  9424  }
  9425  func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool {
  9426  	b := v.Block
  9427  	_ = b
  9428  	// match: (Rsh8Ux64  (Const8  [c]) (Const64 [d]))
  9429  	// cond:
  9430  	// result: (Const8  [int64(int8(uint8(c) >> uint64(d)))])
  9431  	for {
  9432  		v_0 := v.Args[0]
  9433  		if v_0.Op != OpConst8 {
  9434  			break
  9435  		}
  9436  		c := v_0.AuxInt
  9437  		v_1 := v.Args[1]
  9438  		if v_1.Op != OpConst64 {
  9439  			break
  9440  		}
  9441  		d := v_1.AuxInt
  9442  		v.reset(OpConst8)
  9443  		v.AuxInt = int64(int8(uint8(c) >> uint64(d)))
  9444  		return true
  9445  	}
  9446  	// match: (Rsh8Ux64  x (Const64 [0]))
  9447  	// cond:
  9448  	// result: x
  9449  	for {
  9450  		x := v.Args[0]
  9451  		v_1 := v.Args[1]
  9452  		if v_1.Op != OpConst64 {
  9453  			break
  9454  		}
  9455  		if v_1.AuxInt != 0 {
  9456  			break
  9457  		}
  9458  		v.reset(OpCopy)
  9459  		v.Type = x.Type
  9460  		v.AddArg(x)
  9461  		return true
  9462  	}
  9463  	// match: (Rsh8Ux64  (Const8 [0]) _)
  9464  	// cond:
  9465  	// result: (Const8  [0])
  9466  	for {
  9467  		v_0 := v.Args[0]
  9468  		if v_0.Op != OpConst8 {
  9469  			break
  9470  		}
  9471  		if v_0.AuxInt != 0 {
  9472  			break
  9473  		}
  9474  		v.reset(OpConst8)
  9475  		v.AuxInt = 0
  9476  		return true
  9477  	}
  9478  	// match: (Rsh8Ux64  _ (Const64 [c]))
  9479  	// cond: uint64(c) >= 8
  9480  	// result: (Const8  [0])
  9481  	for {
  9482  		v_1 := v.Args[1]
  9483  		if v_1.Op != OpConst64 {
  9484  			break
  9485  		}
  9486  		c := v_1.AuxInt
  9487  		if !(uint64(c) >= 8) {
  9488  			break
  9489  		}
  9490  		v.reset(OpConst8)
  9491  		v.AuxInt = 0
  9492  		return true
  9493  	}
  9494  	// match: (Rsh8Ux64  <t> (Rsh8Ux64  x (Const64 [c])) (Const64 [d]))
  9495  	// cond: !uaddOvf(c,d)
  9496  	// result: (Rsh8Ux64  x (Const64 <t> [c+d]))
  9497  	for {
  9498  		t := v.Type
  9499  		v_0 := v.Args[0]
  9500  		if v_0.Op != OpRsh8Ux64 {
  9501  			break
  9502  		}
  9503  		x := v_0.Args[0]
  9504  		v_0_1 := v_0.Args[1]
  9505  		if v_0_1.Op != OpConst64 {
  9506  			break
  9507  		}
  9508  		c := v_0_1.AuxInt
  9509  		v_1 := v.Args[1]
  9510  		if v_1.Op != OpConst64 {
  9511  			break
  9512  		}
  9513  		d := v_1.AuxInt
  9514  		if !(!uaddOvf(c, d)) {
  9515  			break
  9516  		}
  9517  		v.reset(OpRsh8Ux64)
  9518  		v.AddArg(x)
  9519  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9520  		v0.AuxInt = c + d
  9521  		v.AddArg(v0)
  9522  		return true
  9523  	}
  9524  	// match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
  9525  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
  9526  	// result: (Rsh8Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
  9527  	for {
  9528  		v_0 := v.Args[0]
  9529  		if v_0.Op != OpLsh8x64 {
  9530  			break
  9531  		}
  9532  		v_0_0 := v_0.Args[0]
  9533  		if v_0_0.Op != OpRsh8Ux64 {
  9534  			break
  9535  		}
  9536  		x := v_0_0.Args[0]
  9537  		v_0_0_1 := v_0_0.Args[1]
  9538  		if v_0_0_1.Op != OpConst64 {
  9539  			break
  9540  		}
  9541  		c1 := v_0_0_1.AuxInt
  9542  		v_0_1 := v_0.Args[1]
  9543  		if v_0_1.Op != OpConst64 {
  9544  			break
  9545  		}
  9546  		c2 := v_0_1.AuxInt
  9547  		v_1 := v.Args[1]
  9548  		if v_1.Op != OpConst64 {
  9549  			break
  9550  		}
  9551  		c3 := v_1.AuxInt
  9552  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
  9553  			break
  9554  		}
  9555  		v.reset(OpRsh8Ux64)
  9556  		v.AddArg(x)
  9557  		v0 := b.NewValue0(v.Pos, OpConst64, config.fe.TypeUInt64())
  9558  		v0.AuxInt = c1 - c2 + c3
  9559  		v.AddArg(v0)
  9560  		return true
  9561  	}
  9562  	return false
  9563  }
  9564  func rewriteValuegeneric_OpRsh8Ux8(v *Value, config *Config) bool {
  9565  	b := v.Block
  9566  	_ = b
  9567  	// match: (Rsh8Ux8  <t> x (Const8  [c]))
  9568  	// cond:
  9569  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))]))
  9570  	for {
  9571  		t := v.Type
  9572  		x := v.Args[0]
  9573  		v_1 := v.Args[1]
  9574  		if v_1.Op != OpConst8 {
  9575  			break
  9576  		}
  9577  		c := v_1.AuxInt
  9578  		v.reset(OpRsh8Ux64)
  9579  		v.AddArg(x)
  9580  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9581  		v0.AuxInt = int64(uint8(c))
  9582  		v.AddArg(v0)
  9583  		return true
  9584  	}
  9585  	// match: (Rsh8Ux8  (Const8 [0]) _)
  9586  	// cond:
  9587  	// result: (Const8  [0])
  9588  	for {
  9589  		v_0 := v.Args[0]
  9590  		if v_0.Op != OpConst8 {
  9591  			break
  9592  		}
  9593  		if v_0.AuxInt != 0 {
  9594  			break
  9595  		}
  9596  		v.reset(OpConst8)
  9597  		v.AuxInt = 0
  9598  		return true
  9599  	}
  9600  	return false
  9601  }
  9602  func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool {
  9603  	b := v.Block
  9604  	_ = b
  9605  	// match: (Rsh8x16  <t> x (Const16 [c]))
  9606  	// cond:
  9607  	// result: (Rsh8x64  x (Const64 <t> [int64(uint16(c))]))
  9608  	for {
  9609  		t := v.Type
  9610  		x := v.Args[0]
  9611  		v_1 := v.Args[1]
  9612  		if v_1.Op != OpConst16 {
  9613  			break
  9614  		}
  9615  		c := v_1.AuxInt
  9616  		v.reset(OpRsh8x64)
  9617  		v.AddArg(x)
  9618  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9619  		v0.AuxInt = int64(uint16(c))
  9620  		v.AddArg(v0)
  9621  		return true
  9622  	}
  9623  	// match: (Rsh8x16   (Const8 [0]) _)
  9624  	// cond:
  9625  	// result: (Const8  [0])
  9626  	for {
  9627  		v_0 := v.Args[0]
  9628  		if v_0.Op != OpConst8 {
  9629  			break
  9630  		}
  9631  		if v_0.AuxInt != 0 {
  9632  			break
  9633  		}
  9634  		v.reset(OpConst8)
  9635  		v.AuxInt = 0
  9636  		return true
  9637  	}
  9638  	return false
  9639  }
  9640  func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool {
  9641  	b := v.Block
  9642  	_ = b
  9643  	// match: (Rsh8x32  <t> x (Const32 [c]))
  9644  	// cond:
  9645  	// result: (Rsh8x64  x (Const64 <t> [int64(uint32(c))]))
  9646  	for {
  9647  		t := v.Type
  9648  		x := v.Args[0]
  9649  		v_1 := v.Args[1]
  9650  		if v_1.Op != OpConst32 {
  9651  			break
  9652  		}
  9653  		c := v_1.AuxInt
  9654  		v.reset(OpRsh8x64)
  9655  		v.AddArg(x)
  9656  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9657  		v0.AuxInt = int64(uint32(c))
  9658  		v.AddArg(v0)
  9659  		return true
  9660  	}
  9661  	// match: (Rsh8x32   (Const8 [0]) _)
  9662  	// cond:
  9663  	// result: (Const8  [0])
  9664  	for {
  9665  		v_0 := v.Args[0]
  9666  		if v_0.Op != OpConst8 {
  9667  			break
  9668  		}
  9669  		if v_0.AuxInt != 0 {
  9670  			break
  9671  		}
  9672  		v.reset(OpConst8)
  9673  		v.AuxInt = 0
  9674  		return true
  9675  	}
  9676  	return false
  9677  }
  9678  func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool {
  9679  	b := v.Block
  9680  	_ = b
  9681  	// match: (Rsh8x64   (Const8  [c]) (Const64 [d]))
  9682  	// cond:
  9683  	// result: (Const8  [int64(int8(c) >> uint64(d))])
  9684  	for {
  9685  		v_0 := v.Args[0]
  9686  		if v_0.Op != OpConst8 {
  9687  			break
  9688  		}
  9689  		c := v_0.AuxInt
  9690  		v_1 := v.Args[1]
  9691  		if v_1.Op != OpConst64 {
  9692  			break
  9693  		}
  9694  		d := v_1.AuxInt
  9695  		v.reset(OpConst8)
  9696  		v.AuxInt = int64(int8(c) >> uint64(d))
  9697  		return true
  9698  	}
  9699  	// match: (Rsh8x64   x (Const64 [0]))
  9700  	// cond:
  9701  	// result: x
  9702  	for {
  9703  		x := v.Args[0]
  9704  		v_1 := v.Args[1]
  9705  		if v_1.Op != OpConst64 {
  9706  			break
  9707  		}
  9708  		if v_1.AuxInt != 0 {
  9709  			break
  9710  		}
  9711  		v.reset(OpCopy)
  9712  		v.Type = x.Type
  9713  		v.AddArg(x)
  9714  		return true
  9715  	}
  9716  	// match: (Rsh8x64   (Const8 [0]) _)
  9717  	// cond:
  9718  	// result: (Const8  [0])
  9719  	for {
  9720  		v_0 := v.Args[0]
  9721  		if v_0.Op != OpConst8 {
  9722  			break
  9723  		}
  9724  		if v_0.AuxInt != 0 {
  9725  			break
  9726  		}
  9727  		v.reset(OpConst8)
  9728  		v.AuxInt = 0
  9729  		return true
  9730  	}
  9731  	// match: (Rsh8x64  <t> (Rsh8x64  x (Const64 [c])) (Const64 [d]))
  9732  	// cond: !uaddOvf(c,d)
  9733  	// result: (Rsh8x64  x (Const64 <t> [c+d]))
  9734  	for {
  9735  		t := v.Type
  9736  		v_0 := v.Args[0]
  9737  		if v_0.Op != OpRsh8x64 {
  9738  			break
  9739  		}
  9740  		x := v_0.Args[0]
  9741  		v_0_1 := v_0.Args[1]
  9742  		if v_0_1.Op != OpConst64 {
  9743  			break
  9744  		}
  9745  		c := v_0_1.AuxInt
  9746  		v_1 := v.Args[1]
  9747  		if v_1.Op != OpConst64 {
  9748  			break
  9749  		}
  9750  		d := v_1.AuxInt
  9751  		if !(!uaddOvf(c, d)) {
  9752  			break
  9753  		}
  9754  		v.reset(OpRsh8x64)
  9755  		v.AddArg(x)
  9756  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9757  		v0.AuxInt = c + d
  9758  		v.AddArg(v0)
  9759  		return true
  9760  	}
  9761  	return false
  9762  }
  9763  func rewriteValuegeneric_OpRsh8x8(v *Value, config *Config) bool {
  9764  	b := v.Block
  9765  	_ = b
  9766  	// match: (Rsh8x8   <t> x (Const8  [c]))
  9767  	// cond:
  9768  	// result: (Rsh8x64  x (Const64 <t> [int64(uint8(c))]))
  9769  	for {
  9770  		t := v.Type
  9771  		x := v.Args[0]
  9772  		v_1 := v.Args[1]
  9773  		if v_1.Op != OpConst8 {
  9774  			break
  9775  		}
  9776  		c := v_1.AuxInt
  9777  		v.reset(OpRsh8x64)
  9778  		v.AddArg(x)
  9779  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  9780  		v0.AuxInt = int64(uint8(c))
  9781  		v.AddArg(v0)
  9782  		return true
  9783  	}
  9784  	// match: (Rsh8x8   (Const8 [0]) _)
  9785  	// cond:
  9786  	// result: (Const8  [0])
  9787  	for {
  9788  		v_0 := v.Args[0]
  9789  		if v_0.Op != OpConst8 {
  9790  			break
  9791  		}
  9792  		if v_0.AuxInt != 0 {
  9793  			break
  9794  		}
  9795  		v.reset(OpConst8)
  9796  		v.AuxInt = 0
  9797  		return true
  9798  	}
  9799  	return false
  9800  }
  9801  func rewriteValuegeneric_OpSignExt16to32(v *Value, config *Config) bool {
  9802  	b := v.Block
  9803  	_ = b
  9804  	// match: (SignExt16to32 (Const16 [c]))
  9805  	// cond:
  9806  	// result: (Const32 [int64( int16(c))])
  9807  	for {
  9808  		v_0 := v.Args[0]
  9809  		if v_0.Op != OpConst16 {
  9810  			break
  9811  		}
  9812  		c := v_0.AuxInt
  9813  		v.reset(OpConst32)
  9814  		v.AuxInt = int64(int16(c))
  9815  		return true
  9816  	}
  9817  	// match: (SignExt16to32 (Trunc32to16 x:(Rsh32x64 _ (Const64 [s]))))
  9818  	// cond: s >= 16
  9819  	// result: x
  9820  	for {
  9821  		v_0 := v.Args[0]
  9822  		if v_0.Op != OpTrunc32to16 {
  9823  			break
  9824  		}
  9825  		x := v_0.Args[0]
  9826  		if x.Op != OpRsh32x64 {
  9827  			break
  9828  		}
  9829  		x_1 := x.Args[1]
  9830  		if x_1.Op != OpConst64 {
  9831  			break
  9832  		}
  9833  		s := x_1.AuxInt
  9834  		if !(s >= 16) {
  9835  			break
  9836  		}
  9837  		v.reset(OpCopy)
  9838  		v.Type = x.Type
  9839  		v.AddArg(x)
  9840  		return true
  9841  	}
  9842  	return false
  9843  }
  9844  func rewriteValuegeneric_OpSignExt16to64(v *Value, config *Config) bool {
  9845  	b := v.Block
  9846  	_ = b
  9847  	// match: (SignExt16to64 (Const16 [c]))
  9848  	// cond:
  9849  	// result: (Const64 [int64( int16(c))])
  9850  	for {
  9851  		v_0 := v.Args[0]
  9852  		if v_0.Op != OpConst16 {
  9853  			break
  9854  		}
  9855  		c := v_0.AuxInt
  9856  		v.reset(OpConst64)
  9857  		v.AuxInt = int64(int16(c))
  9858  		return true
  9859  	}
  9860  	// match: (SignExt16to64 (Trunc64to16 x:(Rsh64x64 _ (Const64 [s]))))
  9861  	// cond: s >= 48
  9862  	// result: x
  9863  	for {
  9864  		v_0 := v.Args[0]
  9865  		if v_0.Op != OpTrunc64to16 {
  9866  			break
  9867  		}
  9868  		x := v_0.Args[0]
  9869  		if x.Op != OpRsh64x64 {
  9870  			break
  9871  		}
  9872  		x_1 := x.Args[1]
  9873  		if x_1.Op != OpConst64 {
  9874  			break
  9875  		}
  9876  		s := x_1.AuxInt
  9877  		if !(s >= 48) {
  9878  			break
  9879  		}
  9880  		v.reset(OpCopy)
  9881  		v.Type = x.Type
  9882  		v.AddArg(x)
  9883  		return true
  9884  	}
  9885  	return false
  9886  }
  9887  func rewriteValuegeneric_OpSignExt32to64(v *Value, config *Config) bool {
  9888  	b := v.Block
  9889  	_ = b
  9890  	// match: (SignExt32to64 (Const32 [c]))
  9891  	// cond:
  9892  	// result: (Const64 [int64( int32(c))])
  9893  	for {
  9894  		v_0 := v.Args[0]
  9895  		if v_0.Op != OpConst32 {
  9896  			break
  9897  		}
  9898  		c := v_0.AuxInt
  9899  		v.reset(OpConst64)
  9900  		v.AuxInt = int64(int32(c))
  9901  		return true
  9902  	}
  9903  	// match: (SignExt32to64 (Trunc64to32 x:(Rsh64x64 _ (Const64 [s]))))
  9904  	// cond: s >= 32
  9905  	// result: x
  9906  	for {
  9907  		v_0 := v.Args[0]
  9908  		if v_0.Op != OpTrunc64to32 {
  9909  			break
  9910  		}
  9911  		x := v_0.Args[0]
  9912  		if x.Op != OpRsh64x64 {
  9913  			break
  9914  		}
  9915  		x_1 := x.Args[1]
  9916  		if x_1.Op != OpConst64 {
  9917  			break
  9918  		}
  9919  		s := x_1.AuxInt
  9920  		if !(s >= 32) {
  9921  			break
  9922  		}
  9923  		v.reset(OpCopy)
  9924  		v.Type = x.Type
  9925  		v.AddArg(x)
  9926  		return true
  9927  	}
  9928  	return false
  9929  }
  9930  func rewriteValuegeneric_OpSignExt8to16(v *Value, config *Config) bool {
  9931  	b := v.Block
  9932  	_ = b
  9933  	// match: (SignExt8to16  (Const8  [c]))
  9934  	// cond:
  9935  	// result: (Const16 [int64(  int8(c))])
  9936  	for {
  9937  		v_0 := v.Args[0]
  9938  		if v_0.Op != OpConst8 {
  9939  			break
  9940  		}
  9941  		c := v_0.AuxInt
  9942  		v.reset(OpConst16)
  9943  		v.AuxInt = int64(int8(c))
  9944  		return true
  9945  	}
  9946  	// match: (SignExt8to16  (Trunc16to8  x:(Rsh16x64 _ (Const64 [s]))))
  9947  	// cond: s >= 8
  9948  	// result: x
  9949  	for {
  9950  		v_0 := v.Args[0]
  9951  		if v_0.Op != OpTrunc16to8 {
  9952  			break
  9953  		}
  9954  		x := v_0.Args[0]
  9955  		if x.Op != OpRsh16x64 {
  9956  			break
  9957  		}
  9958  		x_1 := x.Args[1]
  9959  		if x_1.Op != OpConst64 {
  9960  			break
  9961  		}
  9962  		s := x_1.AuxInt
  9963  		if !(s >= 8) {
  9964  			break
  9965  		}
  9966  		v.reset(OpCopy)
  9967  		v.Type = x.Type
  9968  		v.AddArg(x)
  9969  		return true
  9970  	}
  9971  	return false
  9972  }
  9973  func rewriteValuegeneric_OpSignExt8to32(v *Value, config *Config) bool {
  9974  	b := v.Block
  9975  	_ = b
  9976  	// match: (SignExt8to32  (Const8  [c]))
  9977  	// cond:
  9978  	// result: (Const32 [int64(  int8(c))])
  9979  	for {
  9980  		v_0 := v.Args[0]
  9981  		if v_0.Op != OpConst8 {
  9982  			break
  9983  		}
  9984  		c := v_0.AuxInt
  9985  		v.reset(OpConst32)
  9986  		v.AuxInt = int64(int8(c))
  9987  		return true
  9988  	}
  9989  	// match: (SignExt8to32  (Trunc32to8  x:(Rsh32x64 _ (Const64 [s]))))
  9990  	// cond: s >= 24
  9991  	// result: x
  9992  	for {
  9993  		v_0 := v.Args[0]
  9994  		if v_0.Op != OpTrunc32to8 {
  9995  			break
  9996  		}
  9997  		x := v_0.Args[0]
  9998  		if x.Op != OpRsh32x64 {
  9999  			break
 10000  		}
 10001  		x_1 := x.Args[1]
 10002  		if x_1.Op != OpConst64 {
 10003  			break
 10004  		}
 10005  		s := x_1.AuxInt
 10006  		if !(s >= 24) {
 10007  			break
 10008  		}
 10009  		v.reset(OpCopy)
 10010  		v.Type = x.Type
 10011  		v.AddArg(x)
 10012  		return true
 10013  	}
 10014  	return false
 10015  }
 10016  func rewriteValuegeneric_OpSignExt8to64(v *Value, config *Config) bool {
 10017  	b := v.Block
 10018  	_ = b
 10019  	// match: (SignExt8to64  (Const8  [c]))
 10020  	// cond:
 10021  	// result: (Const64 [int64(  int8(c))])
 10022  	for {
 10023  		v_0 := v.Args[0]
 10024  		if v_0.Op != OpConst8 {
 10025  			break
 10026  		}
 10027  		c := v_0.AuxInt
 10028  		v.reset(OpConst64)
 10029  		v.AuxInt = int64(int8(c))
 10030  		return true
 10031  	}
 10032  	// match: (SignExt8to64  (Trunc64to8  x:(Rsh64x64 _ (Const64 [s]))))
 10033  	// cond: s >= 56
 10034  	// result: x
 10035  	for {
 10036  		v_0 := v.Args[0]
 10037  		if v_0.Op != OpTrunc64to8 {
 10038  			break
 10039  		}
 10040  		x := v_0.Args[0]
 10041  		if x.Op != OpRsh64x64 {
 10042  			break
 10043  		}
 10044  		x_1 := x.Args[1]
 10045  		if x_1.Op != OpConst64 {
 10046  			break
 10047  		}
 10048  		s := x_1.AuxInt
 10049  		if !(s >= 56) {
 10050  			break
 10051  		}
 10052  		v.reset(OpCopy)
 10053  		v.Type = x.Type
 10054  		v.AddArg(x)
 10055  		return true
 10056  	}
 10057  	return false
 10058  }
 10059  func rewriteValuegeneric_OpSliceCap(v *Value, config *Config) bool {
 10060  	b := v.Block
 10061  	_ = b
 10062  	// match: (SliceCap (SliceMake _ _ (Const64 <t> [c])))
 10063  	// cond:
 10064  	// result: (Const64 <t> [c])
 10065  	for {
 10066  		v_0 := v.Args[0]
 10067  		if v_0.Op != OpSliceMake {
 10068  			break
 10069  		}
 10070  		v_0_2 := v_0.Args[2]
 10071  		if v_0_2.Op != OpConst64 {
 10072  			break
 10073  		}
 10074  		t := v_0_2.Type
 10075  		c := v_0_2.AuxInt
 10076  		v.reset(OpConst64)
 10077  		v.Type = t
 10078  		v.AuxInt = c
 10079  		return true
 10080  	}
 10081  	// match: (SliceCap (SliceMake _ _ (Const32 <t> [c])))
 10082  	// cond:
 10083  	// result: (Const32 <t> [c])
 10084  	for {
 10085  		v_0 := v.Args[0]
 10086  		if v_0.Op != OpSliceMake {
 10087  			break
 10088  		}
 10089  		v_0_2 := v_0.Args[2]
 10090  		if v_0_2.Op != OpConst32 {
 10091  			break
 10092  		}
 10093  		t := v_0_2.Type
 10094  		c := v_0_2.AuxInt
 10095  		v.reset(OpConst32)
 10096  		v.Type = t
 10097  		v.AuxInt = c
 10098  		return true
 10099  	}
 10100  	// match: (SliceCap (SliceMake _ _ (SliceCap x)))
 10101  	// cond:
 10102  	// result: (SliceCap x)
 10103  	for {
 10104  		v_0 := v.Args[0]
 10105  		if v_0.Op != OpSliceMake {
 10106  			break
 10107  		}
 10108  		v_0_2 := v_0.Args[2]
 10109  		if v_0_2.Op != OpSliceCap {
 10110  			break
 10111  		}
 10112  		x := v_0_2.Args[0]
 10113  		v.reset(OpSliceCap)
 10114  		v.AddArg(x)
 10115  		return true
 10116  	}
 10117  	// match: (SliceCap (SliceMake _ _ (SliceLen x)))
 10118  	// cond:
 10119  	// result: (SliceLen x)
 10120  	for {
 10121  		v_0 := v.Args[0]
 10122  		if v_0.Op != OpSliceMake {
 10123  			break
 10124  		}
 10125  		v_0_2 := v_0.Args[2]
 10126  		if v_0_2.Op != OpSliceLen {
 10127  			break
 10128  		}
 10129  		x := v_0_2.Args[0]
 10130  		v.reset(OpSliceLen)
 10131  		v.AddArg(x)
 10132  		return true
 10133  	}
 10134  	return false
 10135  }
 10136  func rewriteValuegeneric_OpSliceLen(v *Value, config *Config) bool {
 10137  	b := v.Block
 10138  	_ = b
 10139  	// match: (SliceLen (SliceMake _ (Const64 <t> [c]) _))
 10140  	// cond:
 10141  	// result: (Const64 <t> [c])
 10142  	for {
 10143  		v_0 := v.Args[0]
 10144  		if v_0.Op != OpSliceMake {
 10145  			break
 10146  		}
 10147  		v_0_1 := v_0.Args[1]
 10148  		if v_0_1.Op != OpConst64 {
 10149  			break
 10150  		}
 10151  		t := v_0_1.Type
 10152  		c := v_0_1.AuxInt
 10153  		v.reset(OpConst64)
 10154  		v.Type = t
 10155  		v.AuxInt = c
 10156  		return true
 10157  	}
 10158  	// match: (SliceLen (SliceMake _ (Const32 <t> [c]) _))
 10159  	// cond:
 10160  	// result: (Const32 <t> [c])
 10161  	for {
 10162  		v_0 := v.Args[0]
 10163  		if v_0.Op != OpSliceMake {
 10164  			break
 10165  		}
 10166  		v_0_1 := v_0.Args[1]
 10167  		if v_0_1.Op != OpConst32 {
 10168  			break
 10169  		}
 10170  		t := v_0_1.Type
 10171  		c := v_0_1.AuxInt
 10172  		v.reset(OpConst32)
 10173  		v.Type = t
 10174  		v.AuxInt = c
 10175  		return true
 10176  	}
 10177  	// match: (SliceLen (SliceMake _ (SliceLen x) _))
 10178  	// cond:
 10179  	// result: (SliceLen x)
 10180  	for {
 10181  		v_0 := v.Args[0]
 10182  		if v_0.Op != OpSliceMake {
 10183  			break
 10184  		}
 10185  		v_0_1 := v_0.Args[1]
 10186  		if v_0_1.Op != OpSliceLen {
 10187  			break
 10188  		}
 10189  		x := v_0_1.Args[0]
 10190  		v.reset(OpSliceLen)
 10191  		v.AddArg(x)
 10192  		return true
 10193  	}
 10194  	return false
 10195  }
 10196  func rewriteValuegeneric_OpSlicePtr(v *Value, config *Config) bool {
 10197  	b := v.Block
 10198  	_ = b
 10199  	// match: (SlicePtr (SliceMake (SlicePtr x) _ _))
 10200  	// cond:
 10201  	// result: (SlicePtr x)
 10202  	for {
 10203  		v_0 := v.Args[0]
 10204  		if v_0.Op != OpSliceMake {
 10205  			break
 10206  		}
 10207  		v_0_0 := v_0.Args[0]
 10208  		if v_0_0.Op != OpSlicePtr {
 10209  			break
 10210  		}
 10211  		x := v_0_0.Args[0]
 10212  		v.reset(OpSlicePtr)
 10213  		v.AddArg(x)
 10214  		return true
 10215  	}
 10216  	return false
 10217  }
 10218  func rewriteValuegeneric_OpSlicemask(v *Value, config *Config) bool {
 10219  	b := v.Block
 10220  	_ = b
 10221  	// match: (Slicemask (Const32 [x]))
 10222  	// cond: x > 0
 10223  	// result: (Const32 [-1])
 10224  	for {
 10225  		v_0 := v.Args[0]
 10226  		if v_0.Op != OpConst32 {
 10227  			break
 10228  		}
 10229  		x := v_0.AuxInt
 10230  		if !(x > 0) {
 10231  			break
 10232  		}
 10233  		v.reset(OpConst32)
 10234  		v.AuxInt = -1
 10235  		return true
 10236  	}
 10237  	// match: (Slicemask (Const32 [0]))
 10238  	// cond:
 10239  	// result: (Const32 [0])
 10240  	for {
 10241  		v_0 := v.Args[0]
 10242  		if v_0.Op != OpConst32 {
 10243  			break
 10244  		}
 10245  		if v_0.AuxInt != 0 {
 10246  			break
 10247  		}
 10248  		v.reset(OpConst32)
 10249  		v.AuxInt = 0
 10250  		return true
 10251  	}
 10252  	// match: (Slicemask (Const64 [x]))
 10253  	// cond: x > 0
 10254  	// result: (Const64 [-1])
 10255  	for {
 10256  		v_0 := v.Args[0]
 10257  		if v_0.Op != OpConst64 {
 10258  			break
 10259  		}
 10260  		x := v_0.AuxInt
 10261  		if !(x > 0) {
 10262  			break
 10263  		}
 10264  		v.reset(OpConst64)
 10265  		v.AuxInt = -1
 10266  		return true
 10267  	}
 10268  	// match: (Slicemask (Const64 [0]))
 10269  	// cond:
 10270  	// result: (Const64 [0])
 10271  	for {
 10272  		v_0 := v.Args[0]
 10273  		if v_0.Op != OpConst64 {
 10274  			break
 10275  		}
 10276  		if v_0.AuxInt != 0 {
 10277  			break
 10278  		}
 10279  		v.reset(OpConst64)
 10280  		v.AuxInt = 0
 10281  		return true
 10282  	}
 10283  	return false
 10284  }
 10285  func rewriteValuegeneric_OpSqrt(v *Value, config *Config) bool {
 10286  	b := v.Block
 10287  	_ = b
 10288  	// match: (Sqrt (Const64F [c]))
 10289  	// cond:
 10290  	// result: (Const64F [f2i(math.Sqrt(i2f(c)))])
 10291  	for {
 10292  		v_0 := v.Args[0]
 10293  		if v_0.Op != OpConst64F {
 10294  			break
 10295  		}
 10296  		c := v_0.AuxInt
 10297  		v.reset(OpConst64F)
 10298  		v.AuxInt = f2i(math.Sqrt(i2f(c)))
 10299  		return true
 10300  	}
 10301  	return false
 10302  }
 10303  func rewriteValuegeneric_OpStore(v *Value, config *Config) bool {
 10304  	b := v.Block
 10305  	_ = b
 10306  	// match: (Store _ (StructMake0) mem)
 10307  	// cond:
 10308  	// result: mem
 10309  	for {
 10310  		v_1 := v.Args[1]
 10311  		if v_1.Op != OpStructMake0 {
 10312  			break
 10313  		}
 10314  		mem := v.Args[2]
 10315  		v.reset(OpCopy)
 10316  		v.Type = mem.Type
 10317  		v.AddArg(mem)
 10318  		return true
 10319  	}
 10320  	// match: (Store dst (StructMake1 <t> f0) mem)
 10321  	// cond:
 10322  	// result: (Store [t.FieldType(0).Size()] dst f0 mem)
 10323  	for {
 10324  		dst := v.Args[0]
 10325  		v_1 := v.Args[1]
 10326  		if v_1.Op != OpStructMake1 {
 10327  			break
 10328  		}
 10329  		t := v_1.Type
 10330  		f0 := v_1.Args[0]
 10331  		mem := v.Args[2]
 10332  		v.reset(OpStore)
 10333  		v.AuxInt = t.FieldType(0).Size()
 10334  		v.AddArg(dst)
 10335  		v.AddArg(f0)
 10336  		v.AddArg(mem)
 10337  		return true
 10338  	}
 10339  	// match: (Store dst (StructMake2 <t> f0 f1) mem)
 10340  	// cond:
 10341  	// result: (Store [t.FieldType(1).Size()]     (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)     f1     (Store [t.FieldType(0).Size()] dst f0 mem))
 10342  	for {
 10343  		dst := v.Args[0]
 10344  		v_1 := v.Args[1]
 10345  		if v_1.Op != OpStructMake2 {
 10346  			break
 10347  		}
 10348  		t := v_1.Type
 10349  		f0 := v_1.Args[0]
 10350  		f1 := v_1.Args[1]
 10351  		mem := v.Args[2]
 10352  		v.reset(OpStore)
 10353  		v.AuxInt = t.FieldType(1).Size()
 10354  		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 10355  		v0.AuxInt = t.FieldOff(1)
 10356  		v0.AddArg(dst)
 10357  		v.AddArg(v0)
 10358  		v.AddArg(f1)
 10359  		v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10360  		v1.AuxInt = t.FieldType(0).Size()
 10361  		v1.AddArg(dst)
 10362  		v1.AddArg(f0)
 10363  		v1.AddArg(mem)
 10364  		v.AddArg(v1)
 10365  		return true
 10366  	}
 10367  	// match: (Store dst (StructMake3 <t> f0 f1 f2) mem)
 10368  	// cond:
 10369  	// result: (Store [t.FieldType(2).Size()]     (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst)     f2     (Store [t.FieldType(1).Size()]       (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)       f1       (Store [t.FieldType(0).Size()] dst f0 mem)))
 10370  	for {
 10371  		dst := v.Args[0]
 10372  		v_1 := v.Args[1]
 10373  		if v_1.Op != OpStructMake3 {
 10374  			break
 10375  		}
 10376  		t := v_1.Type
 10377  		f0 := v_1.Args[0]
 10378  		f1 := v_1.Args[1]
 10379  		f2 := v_1.Args[2]
 10380  		mem := v.Args[2]
 10381  		v.reset(OpStore)
 10382  		v.AuxInt = t.FieldType(2).Size()
 10383  		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
 10384  		v0.AuxInt = t.FieldOff(2)
 10385  		v0.AddArg(dst)
 10386  		v.AddArg(v0)
 10387  		v.AddArg(f2)
 10388  		v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10389  		v1.AuxInt = t.FieldType(1).Size()
 10390  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 10391  		v2.AuxInt = t.FieldOff(1)
 10392  		v2.AddArg(dst)
 10393  		v1.AddArg(v2)
 10394  		v1.AddArg(f1)
 10395  		v3 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10396  		v3.AuxInt = t.FieldType(0).Size()
 10397  		v3.AddArg(dst)
 10398  		v3.AddArg(f0)
 10399  		v3.AddArg(mem)
 10400  		v1.AddArg(v3)
 10401  		v.AddArg(v1)
 10402  		return true
 10403  	}
 10404  	// match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem)
 10405  	// cond:
 10406  	// result: (Store [t.FieldType(3).Size()]     (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst)     f3     (Store [t.FieldType(2).Size()]       (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst)       f2       (Store [t.FieldType(1).Size()]         (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst)         f1         (Store [t.FieldType(0).Size()] dst f0 mem))))
 10407  	for {
 10408  		dst := v.Args[0]
 10409  		v_1 := v.Args[1]
 10410  		if v_1.Op != OpStructMake4 {
 10411  			break
 10412  		}
 10413  		t := v_1.Type
 10414  		f0 := v_1.Args[0]
 10415  		f1 := v_1.Args[1]
 10416  		f2 := v_1.Args[2]
 10417  		f3 := v_1.Args[3]
 10418  		mem := v.Args[2]
 10419  		v.reset(OpStore)
 10420  		v.AuxInt = t.FieldType(3).Size()
 10421  		v0 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(3).PtrTo())
 10422  		v0.AuxInt = t.FieldOff(3)
 10423  		v0.AddArg(dst)
 10424  		v.AddArg(v0)
 10425  		v.AddArg(f3)
 10426  		v1 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10427  		v1.AuxInt = t.FieldType(2).Size()
 10428  		v2 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(2).PtrTo())
 10429  		v2.AuxInt = t.FieldOff(2)
 10430  		v2.AddArg(dst)
 10431  		v1.AddArg(v2)
 10432  		v1.AddArg(f2)
 10433  		v3 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10434  		v3.AuxInt = t.FieldType(1).Size()
 10435  		v4 := b.NewValue0(v.Pos, OpOffPtr, t.FieldType(1).PtrTo())
 10436  		v4.AuxInt = t.FieldOff(1)
 10437  		v4.AddArg(dst)
 10438  		v3.AddArg(v4)
 10439  		v3.AddArg(f1)
 10440  		v5 := b.NewValue0(v.Pos, OpStore, TypeMem)
 10441  		v5.AuxInt = t.FieldType(0).Size()
 10442  		v5.AddArg(dst)
 10443  		v5.AddArg(f0)
 10444  		v5.AddArg(mem)
 10445  		v3.AddArg(v5)
 10446  		v1.AddArg(v3)
 10447  		v.AddArg(v1)
 10448  		return true
 10449  	}
 10450  	// match: (Store [size] dst (Load <t> src mem) mem)
 10451  	// cond: !config.fe.CanSSA(t)
 10452  	// result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src mem)
 10453  	for {
 10454  		size := v.AuxInt
 10455  		dst := v.Args[0]
 10456  		v_1 := v.Args[1]
 10457  		if v_1.Op != OpLoad {
 10458  			break
 10459  		}
 10460  		t := v_1.Type
 10461  		src := v_1.Args[0]
 10462  		mem := v_1.Args[1]
 10463  		if mem != v.Args[2] {
 10464  			break
 10465  		}
 10466  		if !(!config.fe.CanSSA(t)) {
 10467  			break
 10468  		}
 10469  		v.reset(OpMove)
 10470  		v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64()
 10471  		v.AddArg(dst)
 10472  		v.AddArg(src)
 10473  		v.AddArg(mem)
 10474  		return true
 10475  	}
 10476  	// match: (Store [size] dst (Load <t> src mem) (VarDef {x} mem))
 10477  	// cond: !config.fe.CanSSA(t)
 10478  	// result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src (VarDef {x} mem))
 10479  	for {
 10480  		size := v.AuxInt
 10481  		dst := v.Args[0]
 10482  		v_1 := v.Args[1]
 10483  		if v_1.Op != OpLoad {
 10484  			break
 10485  		}
 10486  		t := v_1.Type
 10487  		src := v_1.Args[0]
 10488  		mem := v_1.Args[1]
 10489  		v_2 := v.Args[2]
 10490  		if v_2.Op != OpVarDef {
 10491  			break
 10492  		}
 10493  		x := v_2.Aux
 10494  		if mem != v_2.Args[0] {
 10495  			break
 10496  		}
 10497  		if !(!config.fe.CanSSA(t)) {
 10498  			break
 10499  		}
 10500  		v.reset(OpMove)
 10501  		v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64()
 10502  		v.AddArg(dst)
 10503  		v.AddArg(src)
 10504  		v0 := b.NewValue0(v.Pos, OpVarDef, TypeMem)
 10505  		v0.Aux = x
 10506  		v0.AddArg(mem)
 10507  		v.AddArg(v0)
 10508  		return true
 10509  	}
 10510  	// match: (Store _ (ArrayMake0) mem)
 10511  	// cond:
 10512  	// result: mem
 10513  	for {
 10514  		v_1 := v.Args[1]
 10515  		if v_1.Op != OpArrayMake0 {
 10516  			break
 10517  		}
 10518  		mem := v.Args[2]
 10519  		v.reset(OpCopy)
 10520  		v.Type = mem.Type
 10521  		v.AddArg(mem)
 10522  		return true
 10523  	}
 10524  	// match: (Store [size] dst (ArrayMake1 e) mem)
 10525  	// cond:
 10526  	// result: (Store [size] dst e mem)
 10527  	for {
 10528  		size := v.AuxInt
 10529  		dst := v.Args[0]
 10530  		v_1 := v.Args[1]
 10531  		if v_1.Op != OpArrayMake1 {
 10532  			break
 10533  		}
 10534  		e := v_1.Args[0]
 10535  		mem := v.Args[2]
 10536  		v.reset(OpStore)
 10537  		v.AuxInt = size
 10538  		v.AddArg(dst)
 10539  		v.AddArg(e)
 10540  		v.AddArg(mem)
 10541  		return true
 10542  	}
 10543  	return false
 10544  }
 10545  func rewriteValuegeneric_OpStringLen(v *Value, config *Config) bool {
 10546  	b := v.Block
 10547  	_ = b
 10548  	// match: (StringLen (StringMake _ (Const64 <t> [c])))
 10549  	// cond:
 10550  	// result: (Const64 <t> [c])
 10551  	for {
 10552  		v_0 := v.Args[0]
 10553  		if v_0.Op != OpStringMake {
 10554  			break
 10555  		}
 10556  		v_0_1 := v_0.Args[1]
 10557  		if v_0_1.Op != OpConst64 {
 10558  			break
 10559  		}
 10560  		t := v_0_1.Type
 10561  		c := v_0_1.AuxInt
 10562  		v.reset(OpConst64)
 10563  		v.Type = t
 10564  		v.AuxInt = c
 10565  		return true
 10566  	}
 10567  	return false
 10568  }
 10569  func rewriteValuegeneric_OpStringPtr(v *Value, config *Config) bool {
 10570  	b := v.Block
 10571  	_ = b
 10572  	// match: (StringPtr (StringMake (Const64 <t> [c]) _))
 10573  	// cond:
 10574  	// result: (Const64 <t> [c])
 10575  	for {
 10576  		v_0 := v.Args[0]
 10577  		if v_0.Op != OpStringMake {
 10578  			break
 10579  		}
 10580  		v_0_0 := v_0.Args[0]
 10581  		if v_0_0.Op != OpConst64 {
 10582  			break
 10583  		}
 10584  		t := v_0_0.Type
 10585  		c := v_0_0.AuxInt
 10586  		v.reset(OpConst64)
 10587  		v.Type = t
 10588  		v.AuxInt = c
 10589  		return true
 10590  	}
 10591  	return false
 10592  }
 10593  func rewriteValuegeneric_OpStructSelect(v *Value, config *Config) bool {
 10594  	b := v.Block
 10595  	_ = b
 10596  	// match: (StructSelect (StructMake1 x))
 10597  	// cond:
 10598  	// result: x
 10599  	for {
 10600  		v_0 := v.Args[0]
 10601  		if v_0.Op != OpStructMake1 {
 10602  			break
 10603  		}
 10604  		x := v_0.Args[0]
 10605  		v.reset(OpCopy)
 10606  		v.Type = x.Type
 10607  		v.AddArg(x)
 10608  		return true
 10609  	}
 10610  	// match: (StructSelect [0] (StructMake2 x _))
 10611  	// cond:
 10612  	// result: x
 10613  	for {
 10614  		if v.AuxInt != 0 {
 10615  			break
 10616  		}
 10617  		v_0 := v.Args[0]
 10618  		if v_0.Op != OpStructMake2 {
 10619  			break
 10620  		}
 10621  		x := v_0.Args[0]
 10622  		v.reset(OpCopy)
 10623  		v.Type = x.Type
 10624  		v.AddArg(x)
 10625  		return true
 10626  	}
 10627  	// match: (StructSelect [1] (StructMake2 _ x))
 10628  	// cond:
 10629  	// result: x
 10630  	for {
 10631  		if v.AuxInt != 1 {
 10632  			break
 10633  		}
 10634  		v_0 := v.Args[0]
 10635  		if v_0.Op != OpStructMake2 {
 10636  			break
 10637  		}
 10638  		x := v_0.Args[1]
 10639  		v.reset(OpCopy)
 10640  		v.Type = x.Type
 10641  		v.AddArg(x)
 10642  		return true
 10643  	}
 10644  	// match: (StructSelect [0] (StructMake3 x _ _))
 10645  	// cond:
 10646  	// result: x
 10647  	for {
 10648  		if v.AuxInt != 0 {
 10649  			break
 10650  		}
 10651  		v_0 := v.Args[0]
 10652  		if v_0.Op != OpStructMake3 {
 10653  			break
 10654  		}
 10655  		x := v_0.Args[0]
 10656  		v.reset(OpCopy)
 10657  		v.Type = x.Type
 10658  		v.AddArg(x)
 10659  		return true
 10660  	}
 10661  	// match: (StructSelect [1] (StructMake3 _ x _))
 10662  	// cond:
 10663  	// result: x
 10664  	for {
 10665  		if v.AuxInt != 1 {
 10666  			break
 10667  		}
 10668  		v_0 := v.Args[0]
 10669  		if v_0.Op != OpStructMake3 {
 10670  			break
 10671  		}
 10672  		x := v_0.Args[1]
 10673  		v.reset(OpCopy)
 10674  		v.Type = x.Type
 10675  		v.AddArg(x)
 10676  		return true
 10677  	}
 10678  	// match: (StructSelect [2] (StructMake3 _ _ x))
 10679  	// cond:
 10680  	// result: x
 10681  	for {
 10682  		if v.AuxInt != 2 {
 10683  			break
 10684  		}
 10685  		v_0 := v.Args[0]
 10686  		if v_0.Op != OpStructMake3 {
 10687  			break
 10688  		}
 10689  		x := v_0.Args[2]
 10690  		v.reset(OpCopy)
 10691  		v.Type = x.Type
 10692  		v.AddArg(x)
 10693  		return true
 10694  	}
 10695  	// match: (StructSelect [0] (StructMake4 x _ _ _))
 10696  	// cond:
 10697  	// result: x
 10698  	for {
 10699  		if v.AuxInt != 0 {
 10700  			break
 10701  		}
 10702  		v_0 := v.Args[0]
 10703  		if v_0.Op != OpStructMake4 {
 10704  			break
 10705  		}
 10706  		x := v_0.Args[0]
 10707  		v.reset(OpCopy)
 10708  		v.Type = x.Type
 10709  		v.AddArg(x)
 10710  		return true
 10711  	}
 10712  	// match: (StructSelect [1] (StructMake4 _ x _ _))
 10713  	// cond:
 10714  	// result: x
 10715  	for {
 10716  		if v.AuxInt != 1 {
 10717  			break
 10718  		}
 10719  		v_0 := v.Args[0]
 10720  		if v_0.Op != OpStructMake4 {
 10721  			break
 10722  		}
 10723  		x := v_0.Args[1]
 10724  		v.reset(OpCopy)
 10725  		v.Type = x.Type
 10726  		v.AddArg(x)
 10727  		return true
 10728  	}
 10729  	// match: (StructSelect [2] (StructMake4 _ _ x _))
 10730  	// cond:
 10731  	// result: x
 10732  	for {
 10733  		if v.AuxInt != 2 {
 10734  			break
 10735  		}
 10736  		v_0 := v.Args[0]
 10737  		if v_0.Op != OpStructMake4 {
 10738  			break
 10739  		}
 10740  		x := v_0.Args[2]
 10741  		v.reset(OpCopy)
 10742  		v.Type = x.Type
 10743  		v.AddArg(x)
 10744  		return true
 10745  	}
 10746  	// match: (StructSelect [3] (StructMake4 _ _ _ x))
 10747  	// cond:
 10748  	// result: x
 10749  	for {
 10750  		if v.AuxInt != 3 {
 10751  			break
 10752  		}
 10753  		v_0 := v.Args[0]
 10754  		if v_0.Op != OpStructMake4 {
 10755  			break
 10756  		}
 10757  		x := v_0.Args[3]
 10758  		v.reset(OpCopy)
 10759  		v.Type = x.Type
 10760  		v.AddArg(x)
 10761  		return true
 10762  	}
 10763  	// match: (StructSelect [i] x:(Load <t> ptr mem))
 10764  	// cond: !config.fe.CanSSA(t)
 10765  	// result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem)
 10766  	for {
 10767  		i := v.AuxInt
 10768  		x := v.Args[0]
 10769  		if x.Op != OpLoad {
 10770  			break
 10771  		}
 10772  		t := x.Type
 10773  		ptr := x.Args[0]
 10774  		mem := x.Args[1]
 10775  		if !(!config.fe.CanSSA(t)) {
 10776  			break
 10777  		}
 10778  		b = x.Block
 10779  		v0 := b.NewValue0(v.Pos, OpLoad, v.Type)
 10780  		v.reset(OpCopy)
 10781  		v.AddArg(v0)
 10782  		v1 := b.NewValue0(v.Pos, OpOffPtr, v.Type.PtrTo())
 10783  		v1.AuxInt = t.FieldOff(int(i))
 10784  		v1.AddArg(ptr)
 10785  		v0.AddArg(v1)
 10786  		v0.AddArg(mem)
 10787  		return true
 10788  	}
 10789  	// match: (StructSelect [0] x:(IData _))
 10790  	// cond:
 10791  	// result: x
 10792  	for {
 10793  		if v.AuxInt != 0 {
 10794  			break
 10795  		}
 10796  		x := v.Args[0]
 10797  		if x.Op != OpIData {
 10798  			break
 10799  		}
 10800  		v.reset(OpCopy)
 10801  		v.Type = x.Type
 10802  		v.AddArg(x)
 10803  		return true
 10804  	}
 10805  	return false
 10806  }
 10807  func rewriteValuegeneric_OpSub16(v *Value, config *Config) bool {
 10808  	b := v.Block
 10809  	_ = b
 10810  	// match: (Sub16  (Const16 [c]) (Const16 [d]))
 10811  	// cond:
 10812  	// result: (Const16 [int64(int16(c-d))])
 10813  	for {
 10814  		v_0 := v.Args[0]
 10815  		if v_0.Op != OpConst16 {
 10816  			break
 10817  		}
 10818  		c := v_0.AuxInt
 10819  		v_1 := v.Args[1]
 10820  		if v_1.Op != OpConst16 {
 10821  			break
 10822  		}
 10823  		d := v_1.AuxInt
 10824  		v.reset(OpConst16)
 10825  		v.AuxInt = int64(int16(c - d))
 10826  		return true
 10827  	}
 10828  	// match: (Sub16 x (Const16 <t> [c]))
 10829  	// cond: x.Op != OpConst16
 10830  	// result: (Add16 (Const16 <t> [int64(int16(-c))]) x)
 10831  	for {
 10832  		x := v.Args[0]
 10833  		v_1 := v.Args[1]
 10834  		if v_1.Op != OpConst16 {
 10835  			break
 10836  		}
 10837  		t := v_1.Type
 10838  		c := v_1.AuxInt
 10839  		if !(x.Op != OpConst16) {
 10840  			break
 10841  		}
 10842  		v.reset(OpAdd16)
 10843  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 10844  		v0.AuxInt = int64(int16(-c))
 10845  		v.AddArg(v0)
 10846  		v.AddArg(x)
 10847  		return true
 10848  	}
 10849  	// match: (Sub16 x x)
 10850  	// cond:
 10851  	// result: (Const16 [0])
 10852  	for {
 10853  		x := v.Args[0]
 10854  		if x != v.Args[1] {
 10855  			break
 10856  		}
 10857  		v.reset(OpConst16)
 10858  		v.AuxInt = 0
 10859  		return true
 10860  	}
 10861  	// match: (Sub16 (Add16 x y) x)
 10862  	// cond:
 10863  	// result: y
 10864  	for {
 10865  		v_0 := v.Args[0]
 10866  		if v_0.Op != OpAdd16 {
 10867  			break
 10868  		}
 10869  		x := v_0.Args[0]
 10870  		y := v_0.Args[1]
 10871  		if x != v.Args[1] {
 10872  			break
 10873  		}
 10874  		v.reset(OpCopy)
 10875  		v.Type = y.Type
 10876  		v.AddArg(y)
 10877  		return true
 10878  	}
 10879  	// match: (Sub16 (Add16 x y) y)
 10880  	// cond:
 10881  	// result: x
 10882  	for {
 10883  		v_0 := v.Args[0]
 10884  		if v_0.Op != OpAdd16 {
 10885  			break
 10886  		}
 10887  		x := v_0.Args[0]
 10888  		y := v_0.Args[1]
 10889  		if y != v.Args[1] {
 10890  			break
 10891  		}
 10892  		v.reset(OpCopy)
 10893  		v.Type = x.Type
 10894  		v.AddArg(x)
 10895  		return true
 10896  	}
 10897  	return false
 10898  }
 10899  func rewriteValuegeneric_OpSub32(v *Value, config *Config) bool {
 10900  	b := v.Block
 10901  	_ = b
 10902  	// match: (Sub32  (Const32 [c]) (Const32 [d]))
 10903  	// cond:
 10904  	// result: (Const32 [int64(int32(c-d))])
 10905  	for {
 10906  		v_0 := v.Args[0]
 10907  		if v_0.Op != OpConst32 {
 10908  			break
 10909  		}
 10910  		c := v_0.AuxInt
 10911  		v_1 := v.Args[1]
 10912  		if v_1.Op != OpConst32 {
 10913  			break
 10914  		}
 10915  		d := v_1.AuxInt
 10916  		v.reset(OpConst32)
 10917  		v.AuxInt = int64(int32(c - d))
 10918  		return true
 10919  	}
 10920  	// match: (Sub32 x (Const32 <t> [c]))
 10921  	// cond: x.Op != OpConst32
 10922  	// result: (Add32 (Const32 <t> [int64(int32(-c))]) x)
 10923  	for {
 10924  		x := v.Args[0]
 10925  		v_1 := v.Args[1]
 10926  		if v_1.Op != OpConst32 {
 10927  			break
 10928  		}
 10929  		t := v_1.Type
 10930  		c := v_1.AuxInt
 10931  		if !(x.Op != OpConst32) {
 10932  			break
 10933  		}
 10934  		v.reset(OpAdd32)
 10935  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 10936  		v0.AuxInt = int64(int32(-c))
 10937  		v.AddArg(v0)
 10938  		v.AddArg(x)
 10939  		return true
 10940  	}
 10941  	// match: (Sub32 x x)
 10942  	// cond:
 10943  	// result: (Const32 [0])
 10944  	for {
 10945  		x := v.Args[0]
 10946  		if x != v.Args[1] {
 10947  			break
 10948  		}
 10949  		v.reset(OpConst32)
 10950  		v.AuxInt = 0
 10951  		return true
 10952  	}
 10953  	// match: (Sub32 (Add32 x y) x)
 10954  	// cond:
 10955  	// result: y
 10956  	for {
 10957  		v_0 := v.Args[0]
 10958  		if v_0.Op != OpAdd32 {
 10959  			break
 10960  		}
 10961  		x := v_0.Args[0]
 10962  		y := v_0.Args[1]
 10963  		if x != v.Args[1] {
 10964  			break
 10965  		}
 10966  		v.reset(OpCopy)
 10967  		v.Type = y.Type
 10968  		v.AddArg(y)
 10969  		return true
 10970  	}
 10971  	// match: (Sub32 (Add32 x y) y)
 10972  	// cond:
 10973  	// result: x
 10974  	for {
 10975  		v_0 := v.Args[0]
 10976  		if v_0.Op != OpAdd32 {
 10977  			break
 10978  		}
 10979  		x := v_0.Args[0]
 10980  		y := v_0.Args[1]
 10981  		if y != v.Args[1] {
 10982  			break
 10983  		}
 10984  		v.reset(OpCopy)
 10985  		v.Type = x.Type
 10986  		v.AddArg(x)
 10987  		return true
 10988  	}
 10989  	return false
 10990  }
 10991  func rewriteValuegeneric_OpSub32F(v *Value, config *Config) bool {
 10992  	b := v.Block
 10993  	_ = b
 10994  	// match: (Sub32F (Const32F [c]) (Const32F [d]))
 10995  	// cond:
 10996  	// result: (Const32F [f2i(float64(i2f32(c) - i2f32(d)))])
 10997  	for {
 10998  		v_0 := v.Args[0]
 10999  		if v_0.Op != OpConst32F {
 11000  			break
 11001  		}
 11002  		c := v_0.AuxInt
 11003  		v_1 := v.Args[1]
 11004  		if v_1.Op != OpConst32F {
 11005  			break
 11006  		}
 11007  		d := v_1.AuxInt
 11008  		v.reset(OpConst32F)
 11009  		v.AuxInt = f2i(float64(i2f32(c) - i2f32(d)))
 11010  		return true
 11011  	}
 11012  	// match: (Sub32F x (Const32F [0]))
 11013  	// cond:
 11014  	// result: x
 11015  	for {
 11016  		x := v.Args[0]
 11017  		v_1 := v.Args[1]
 11018  		if v_1.Op != OpConst32F {
 11019  			break
 11020  		}
 11021  		if v_1.AuxInt != 0 {
 11022  			break
 11023  		}
 11024  		v.reset(OpCopy)
 11025  		v.Type = x.Type
 11026  		v.AddArg(x)
 11027  		return true
 11028  	}
 11029  	return false
 11030  }
 11031  func rewriteValuegeneric_OpSub64(v *Value, config *Config) bool {
 11032  	b := v.Block
 11033  	_ = b
 11034  	// match: (Sub64  (Const64 [c]) (Const64 [d]))
 11035  	// cond:
 11036  	// result: (Const64 [c-d])
 11037  	for {
 11038  		v_0 := v.Args[0]
 11039  		if v_0.Op != OpConst64 {
 11040  			break
 11041  		}
 11042  		c := v_0.AuxInt
 11043  		v_1 := v.Args[1]
 11044  		if v_1.Op != OpConst64 {
 11045  			break
 11046  		}
 11047  		d := v_1.AuxInt
 11048  		v.reset(OpConst64)
 11049  		v.AuxInt = c - d
 11050  		return true
 11051  	}
 11052  	// match: (Sub64 x (Const64 <t> [c]))
 11053  	// cond: x.Op != OpConst64
 11054  	// result: (Add64 (Const64 <t> [-c]) x)
 11055  	for {
 11056  		x := v.Args[0]
 11057  		v_1 := v.Args[1]
 11058  		if v_1.Op != OpConst64 {
 11059  			break
 11060  		}
 11061  		t := v_1.Type
 11062  		c := v_1.AuxInt
 11063  		if !(x.Op != OpConst64) {
 11064  			break
 11065  		}
 11066  		v.reset(OpAdd64)
 11067  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 11068  		v0.AuxInt = -c
 11069  		v.AddArg(v0)
 11070  		v.AddArg(x)
 11071  		return true
 11072  	}
 11073  	// match: (Sub64 x x)
 11074  	// cond:
 11075  	// result: (Const64 [0])
 11076  	for {
 11077  		x := v.Args[0]
 11078  		if x != v.Args[1] {
 11079  			break
 11080  		}
 11081  		v.reset(OpConst64)
 11082  		v.AuxInt = 0
 11083  		return true
 11084  	}
 11085  	// match: (Sub64 (Add64 x y) x)
 11086  	// cond:
 11087  	// result: y
 11088  	for {
 11089  		v_0 := v.Args[0]
 11090  		if v_0.Op != OpAdd64 {
 11091  			break
 11092  		}
 11093  		x := v_0.Args[0]
 11094  		y := v_0.Args[1]
 11095  		if x != v.Args[1] {
 11096  			break
 11097  		}
 11098  		v.reset(OpCopy)
 11099  		v.Type = y.Type
 11100  		v.AddArg(y)
 11101  		return true
 11102  	}
 11103  	// match: (Sub64 (Add64 x y) y)
 11104  	// cond:
 11105  	// result: x
 11106  	for {
 11107  		v_0 := v.Args[0]
 11108  		if v_0.Op != OpAdd64 {
 11109  			break
 11110  		}
 11111  		x := v_0.Args[0]
 11112  		y := v_0.Args[1]
 11113  		if y != v.Args[1] {
 11114  			break
 11115  		}
 11116  		v.reset(OpCopy)
 11117  		v.Type = x.Type
 11118  		v.AddArg(x)
 11119  		return true
 11120  	}
 11121  	return false
 11122  }
 11123  func rewriteValuegeneric_OpSub64F(v *Value, config *Config) bool {
 11124  	b := v.Block
 11125  	_ = b
 11126  	// match: (Sub64F (Const64F [c]) (Const64F [d]))
 11127  	// cond:
 11128  	// result: (Const64F [f2i(i2f(c) - i2f(d))])
 11129  	for {
 11130  		v_0 := v.Args[0]
 11131  		if v_0.Op != OpConst64F {
 11132  			break
 11133  		}
 11134  		c := v_0.AuxInt
 11135  		v_1 := v.Args[1]
 11136  		if v_1.Op != OpConst64F {
 11137  			break
 11138  		}
 11139  		d := v_1.AuxInt
 11140  		v.reset(OpConst64F)
 11141  		v.AuxInt = f2i(i2f(c) - i2f(d))
 11142  		return true
 11143  	}
 11144  	// match: (Sub64F x (Const64F [0]))
 11145  	// cond:
 11146  	// result: x
 11147  	for {
 11148  		x := v.Args[0]
 11149  		v_1 := v.Args[1]
 11150  		if v_1.Op != OpConst64F {
 11151  			break
 11152  		}
 11153  		if v_1.AuxInt != 0 {
 11154  			break
 11155  		}
 11156  		v.reset(OpCopy)
 11157  		v.Type = x.Type
 11158  		v.AddArg(x)
 11159  		return true
 11160  	}
 11161  	return false
 11162  }
 11163  func rewriteValuegeneric_OpSub8(v *Value, config *Config) bool {
 11164  	b := v.Block
 11165  	_ = b
 11166  	// match: (Sub8   (Const8 [c]) (Const8 [d]))
 11167  	// cond:
 11168  	// result: (Const8 [int64(int8(c-d))])
 11169  	for {
 11170  		v_0 := v.Args[0]
 11171  		if v_0.Op != OpConst8 {
 11172  			break
 11173  		}
 11174  		c := v_0.AuxInt
 11175  		v_1 := v.Args[1]
 11176  		if v_1.Op != OpConst8 {
 11177  			break
 11178  		}
 11179  		d := v_1.AuxInt
 11180  		v.reset(OpConst8)
 11181  		v.AuxInt = int64(int8(c - d))
 11182  		return true
 11183  	}
 11184  	// match: (Sub8  x (Const8  <t> [c]))
 11185  	// cond: x.Op != OpConst8
 11186  	// result: (Add8  (Const8  <t> [int64(int8(-c))]) x)
 11187  	for {
 11188  		x := v.Args[0]
 11189  		v_1 := v.Args[1]
 11190  		if v_1.Op != OpConst8 {
 11191  			break
 11192  		}
 11193  		t := v_1.Type
 11194  		c := v_1.AuxInt
 11195  		if !(x.Op != OpConst8) {
 11196  			break
 11197  		}
 11198  		v.reset(OpAdd8)
 11199  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 11200  		v0.AuxInt = int64(int8(-c))
 11201  		v.AddArg(v0)
 11202  		v.AddArg(x)
 11203  		return true
 11204  	}
 11205  	// match: (Sub8  x x)
 11206  	// cond:
 11207  	// result: (Const8  [0])
 11208  	for {
 11209  		x := v.Args[0]
 11210  		if x != v.Args[1] {
 11211  			break
 11212  		}
 11213  		v.reset(OpConst8)
 11214  		v.AuxInt = 0
 11215  		return true
 11216  	}
 11217  	// match: (Sub8  (Add8  x y) x)
 11218  	// cond:
 11219  	// result: y
 11220  	for {
 11221  		v_0 := v.Args[0]
 11222  		if v_0.Op != OpAdd8 {
 11223  			break
 11224  		}
 11225  		x := v_0.Args[0]
 11226  		y := v_0.Args[1]
 11227  		if x != v.Args[1] {
 11228  			break
 11229  		}
 11230  		v.reset(OpCopy)
 11231  		v.Type = y.Type
 11232  		v.AddArg(y)
 11233  		return true
 11234  	}
 11235  	// match: (Sub8  (Add8  x y) y)
 11236  	// cond:
 11237  	// result: x
 11238  	for {
 11239  		v_0 := v.Args[0]
 11240  		if v_0.Op != OpAdd8 {
 11241  			break
 11242  		}
 11243  		x := v_0.Args[0]
 11244  		y := v_0.Args[1]
 11245  		if y != v.Args[1] {
 11246  			break
 11247  		}
 11248  		v.reset(OpCopy)
 11249  		v.Type = x.Type
 11250  		v.AddArg(x)
 11251  		return true
 11252  	}
 11253  	return false
 11254  }
 11255  func rewriteValuegeneric_OpTrunc16to8(v *Value, config *Config) bool {
 11256  	b := v.Block
 11257  	_ = b
 11258  	// match: (Trunc16to8  (Const16 [c]))
 11259  	// cond:
 11260  	// result: (Const8   [int64(int8(c))])
 11261  	for {
 11262  		v_0 := v.Args[0]
 11263  		if v_0.Op != OpConst16 {
 11264  			break
 11265  		}
 11266  		c := v_0.AuxInt
 11267  		v.reset(OpConst8)
 11268  		v.AuxInt = int64(int8(c))
 11269  		return true
 11270  	}
 11271  	// match: (Trunc16to8  (ZeroExt8to16  x))
 11272  	// cond:
 11273  	// result: x
 11274  	for {
 11275  		v_0 := v.Args[0]
 11276  		if v_0.Op != OpZeroExt8to16 {
 11277  			break
 11278  		}
 11279  		x := v_0.Args[0]
 11280  		v.reset(OpCopy)
 11281  		v.Type = x.Type
 11282  		v.AddArg(x)
 11283  		return true
 11284  	}
 11285  	// match: (Trunc16to8  (SignExt8to16  x))
 11286  	// cond:
 11287  	// result: x
 11288  	for {
 11289  		v_0 := v.Args[0]
 11290  		if v_0.Op != OpSignExt8to16 {
 11291  			break
 11292  		}
 11293  		x := v_0.Args[0]
 11294  		v.reset(OpCopy)
 11295  		v.Type = x.Type
 11296  		v.AddArg(x)
 11297  		return true
 11298  	}
 11299  	// match: (Trunc16to8  (And16 (Const16 [y]) x))
 11300  	// cond: y&0xFF == 0xFF
 11301  	// result: (Trunc16to8 x)
 11302  	for {
 11303  		v_0 := v.Args[0]
 11304  		if v_0.Op != OpAnd16 {
 11305  			break
 11306  		}
 11307  		v_0_0 := v_0.Args[0]
 11308  		if v_0_0.Op != OpConst16 {
 11309  			break
 11310  		}
 11311  		y := v_0_0.AuxInt
 11312  		x := v_0.Args[1]
 11313  		if !(y&0xFF == 0xFF) {
 11314  			break
 11315  		}
 11316  		v.reset(OpTrunc16to8)
 11317  		v.AddArg(x)
 11318  		return true
 11319  	}
 11320  	return false
 11321  }
 11322  func rewriteValuegeneric_OpTrunc32to16(v *Value, config *Config) bool {
 11323  	b := v.Block
 11324  	_ = b
 11325  	// match: (Trunc32to16 (Const32 [c]))
 11326  	// cond:
 11327  	// result: (Const16  [int64(int16(c))])
 11328  	for {
 11329  		v_0 := v.Args[0]
 11330  		if v_0.Op != OpConst32 {
 11331  			break
 11332  		}
 11333  		c := v_0.AuxInt
 11334  		v.reset(OpConst16)
 11335  		v.AuxInt = int64(int16(c))
 11336  		return true
 11337  	}
 11338  	// match: (Trunc32to16 (ZeroExt8to32  x))
 11339  	// cond:
 11340  	// result: (ZeroExt8to16  x)
 11341  	for {
 11342  		v_0 := v.Args[0]
 11343  		if v_0.Op != OpZeroExt8to32 {
 11344  			break
 11345  		}
 11346  		x := v_0.Args[0]
 11347  		v.reset(OpZeroExt8to16)
 11348  		v.AddArg(x)
 11349  		return true
 11350  	}
 11351  	// match: (Trunc32to16 (ZeroExt16to32 x))
 11352  	// cond:
 11353  	// result: x
 11354  	for {
 11355  		v_0 := v.Args[0]
 11356  		if v_0.Op != OpZeroExt16to32 {
 11357  			break
 11358  		}
 11359  		x := v_0.Args[0]
 11360  		v.reset(OpCopy)
 11361  		v.Type = x.Type
 11362  		v.AddArg(x)
 11363  		return true
 11364  	}
 11365  	// match: (Trunc32to16 (SignExt8to32  x))
 11366  	// cond:
 11367  	// result: (SignExt8to16  x)
 11368  	for {
 11369  		v_0 := v.Args[0]
 11370  		if v_0.Op != OpSignExt8to32 {
 11371  			break
 11372  		}
 11373  		x := v_0.Args[0]
 11374  		v.reset(OpSignExt8to16)
 11375  		v.AddArg(x)
 11376  		return true
 11377  	}
 11378  	// match: (Trunc32to16 (SignExt16to32 x))
 11379  	// cond:
 11380  	// result: x
 11381  	for {
 11382  		v_0 := v.Args[0]
 11383  		if v_0.Op != OpSignExt16to32 {
 11384  			break
 11385  		}
 11386  		x := v_0.Args[0]
 11387  		v.reset(OpCopy)
 11388  		v.Type = x.Type
 11389  		v.AddArg(x)
 11390  		return true
 11391  	}
 11392  	// match: (Trunc32to16 (And32 (Const32 [y]) x))
 11393  	// cond: y&0xFFFF == 0xFFFF
 11394  	// result: (Trunc32to16 x)
 11395  	for {
 11396  		v_0 := v.Args[0]
 11397  		if v_0.Op != OpAnd32 {
 11398  			break
 11399  		}
 11400  		v_0_0 := v_0.Args[0]
 11401  		if v_0_0.Op != OpConst32 {
 11402  			break
 11403  		}
 11404  		y := v_0_0.AuxInt
 11405  		x := v_0.Args[1]
 11406  		if !(y&0xFFFF == 0xFFFF) {
 11407  			break
 11408  		}
 11409  		v.reset(OpTrunc32to16)
 11410  		v.AddArg(x)
 11411  		return true
 11412  	}
 11413  	return false
 11414  }
 11415  func rewriteValuegeneric_OpTrunc32to8(v *Value, config *Config) bool {
 11416  	b := v.Block
 11417  	_ = b
 11418  	// match: (Trunc32to8  (Const32 [c]))
 11419  	// cond:
 11420  	// result: (Const8   [int64(int8(c))])
 11421  	for {
 11422  		v_0 := v.Args[0]
 11423  		if v_0.Op != OpConst32 {
 11424  			break
 11425  		}
 11426  		c := v_0.AuxInt
 11427  		v.reset(OpConst8)
 11428  		v.AuxInt = int64(int8(c))
 11429  		return true
 11430  	}
 11431  	// match: (Trunc32to8  (ZeroExt8to32  x))
 11432  	// cond:
 11433  	// result: x
 11434  	for {
 11435  		v_0 := v.Args[0]
 11436  		if v_0.Op != OpZeroExt8to32 {
 11437  			break
 11438  		}
 11439  		x := v_0.Args[0]
 11440  		v.reset(OpCopy)
 11441  		v.Type = x.Type
 11442  		v.AddArg(x)
 11443  		return true
 11444  	}
 11445  	// match: (Trunc32to8  (SignExt8to32  x))
 11446  	// cond:
 11447  	// result: x
 11448  	for {
 11449  		v_0 := v.Args[0]
 11450  		if v_0.Op != OpSignExt8to32 {
 11451  			break
 11452  		}
 11453  		x := v_0.Args[0]
 11454  		v.reset(OpCopy)
 11455  		v.Type = x.Type
 11456  		v.AddArg(x)
 11457  		return true
 11458  	}
 11459  	// match: (Trunc32to8  (And32 (Const32 [y]) x))
 11460  	// cond: y&0xFF == 0xFF
 11461  	// result: (Trunc32to8 x)
 11462  	for {
 11463  		v_0 := v.Args[0]
 11464  		if v_0.Op != OpAnd32 {
 11465  			break
 11466  		}
 11467  		v_0_0 := v_0.Args[0]
 11468  		if v_0_0.Op != OpConst32 {
 11469  			break
 11470  		}
 11471  		y := v_0_0.AuxInt
 11472  		x := v_0.Args[1]
 11473  		if !(y&0xFF == 0xFF) {
 11474  			break
 11475  		}
 11476  		v.reset(OpTrunc32to8)
 11477  		v.AddArg(x)
 11478  		return true
 11479  	}
 11480  	return false
 11481  }
 11482  func rewriteValuegeneric_OpTrunc64to16(v *Value, config *Config) bool {
 11483  	b := v.Block
 11484  	_ = b
 11485  	// match: (Trunc64to16 (Const64 [c]))
 11486  	// cond:
 11487  	// result: (Const16  [int64(int16(c))])
 11488  	for {
 11489  		v_0 := v.Args[0]
 11490  		if v_0.Op != OpConst64 {
 11491  			break
 11492  		}
 11493  		c := v_0.AuxInt
 11494  		v.reset(OpConst16)
 11495  		v.AuxInt = int64(int16(c))
 11496  		return true
 11497  	}
 11498  	// match: (Trunc64to16 (ZeroExt8to64  x))
 11499  	// cond:
 11500  	// result: (ZeroExt8to16  x)
 11501  	for {
 11502  		v_0 := v.Args[0]
 11503  		if v_0.Op != OpZeroExt8to64 {
 11504  			break
 11505  		}
 11506  		x := v_0.Args[0]
 11507  		v.reset(OpZeroExt8to16)
 11508  		v.AddArg(x)
 11509  		return true
 11510  	}
 11511  	// match: (Trunc64to16 (ZeroExt16to64 x))
 11512  	// cond:
 11513  	// result: x
 11514  	for {
 11515  		v_0 := v.Args[0]
 11516  		if v_0.Op != OpZeroExt16to64 {
 11517  			break
 11518  		}
 11519  		x := v_0.Args[0]
 11520  		v.reset(OpCopy)
 11521  		v.Type = x.Type
 11522  		v.AddArg(x)
 11523  		return true
 11524  	}
 11525  	// match: (Trunc64to16 (SignExt8to64  x))
 11526  	// cond:
 11527  	// result: (SignExt8to16  x)
 11528  	for {
 11529  		v_0 := v.Args[0]
 11530  		if v_0.Op != OpSignExt8to64 {
 11531  			break
 11532  		}
 11533  		x := v_0.Args[0]
 11534  		v.reset(OpSignExt8to16)
 11535  		v.AddArg(x)
 11536  		return true
 11537  	}
 11538  	// match: (Trunc64to16 (SignExt16to64 x))
 11539  	// cond:
 11540  	// result: x
 11541  	for {
 11542  		v_0 := v.Args[0]
 11543  		if v_0.Op != OpSignExt16to64 {
 11544  			break
 11545  		}
 11546  		x := v_0.Args[0]
 11547  		v.reset(OpCopy)
 11548  		v.Type = x.Type
 11549  		v.AddArg(x)
 11550  		return true
 11551  	}
 11552  	// match: (Trunc64to16 (And64 (Const64 [y]) x))
 11553  	// cond: y&0xFFFF == 0xFFFF
 11554  	// result: (Trunc64to16 x)
 11555  	for {
 11556  		v_0 := v.Args[0]
 11557  		if v_0.Op != OpAnd64 {
 11558  			break
 11559  		}
 11560  		v_0_0 := v_0.Args[0]
 11561  		if v_0_0.Op != OpConst64 {
 11562  			break
 11563  		}
 11564  		y := v_0_0.AuxInt
 11565  		x := v_0.Args[1]
 11566  		if !(y&0xFFFF == 0xFFFF) {
 11567  			break
 11568  		}
 11569  		v.reset(OpTrunc64to16)
 11570  		v.AddArg(x)
 11571  		return true
 11572  	}
 11573  	return false
 11574  }
 11575  func rewriteValuegeneric_OpTrunc64to32(v *Value, config *Config) bool {
 11576  	b := v.Block
 11577  	_ = b
 11578  	// match: (Trunc64to32 (Const64 [c]))
 11579  	// cond:
 11580  	// result: (Const32  [int64(int32(c))])
 11581  	for {
 11582  		v_0 := v.Args[0]
 11583  		if v_0.Op != OpConst64 {
 11584  			break
 11585  		}
 11586  		c := v_0.AuxInt
 11587  		v.reset(OpConst32)
 11588  		v.AuxInt = int64(int32(c))
 11589  		return true
 11590  	}
 11591  	// match: (Trunc64to32 (ZeroExt8to64  x))
 11592  	// cond:
 11593  	// result: (ZeroExt8to32  x)
 11594  	for {
 11595  		v_0 := v.Args[0]
 11596  		if v_0.Op != OpZeroExt8to64 {
 11597  			break
 11598  		}
 11599  		x := v_0.Args[0]
 11600  		v.reset(OpZeroExt8to32)
 11601  		v.AddArg(x)
 11602  		return true
 11603  	}
 11604  	// match: (Trunc64to32 (ZeroExt16to64 x))
 11605  	// cond:
 11606  	// result: (ZeroExt16to32 x)
 11607  	for {
 11608  		v_0 := v.Args[0]
 11609  		if v_0.Op != OpZeroExt16to64 {
 11610  			break
 11611  		}
 11612  		x := v_0.Args[0]
 11613  		v.reset(OpZeroExt16to32)
 11614  		v.AddArg(x)
 11615  		return true
 11616  	}
 11617  	// match: (Trunc64to32 (ZeroExt32to64 x))
 11618  	// cond:
 11619  	// result: x
 11620  	for {
 11621  		v_0 := v.Args[0]
 11622  		if v_0.Op != OpZeroExt32to64 {
 11623  			break
 11624  		}
 11625  		x := v_0.Args[0]
 11626  		v.reset(OpCopy)
 11627  		v.Type = x.Type
 11628  		v.AddArg(x)
 11629  		return true
 11630  	}
 11631  	// match: (Trunc64to32 (SignExt8to64  x))
 11632  	// cond:
 11633  	// result: (SignExt8to32  x)
 11634  	for {
 11635  		v_0 := v.Args[0]
 11636  		if v_0.Op != OpSignExt8to64 {
 11637  			break
 11638  		}
 11639  		x := v_0.Args[0]
 11640  		v.reset(OpSignExt8to32)
 11641  		v.AddArg(x)
 11642  		return true
 11643  	}
 11644  	// match: (Trunc64to32 (SignExt16to64 x))
 11645  	// cond:
 11646  	// result: (SignExt16to32 x)
 11647  	for {
 11648  		v_0 := v.Args[0]
 11649  		if v_0.Op != OpSignExt16to64 {
 11650  			break
 11651  		}
 11652  		x := v_0.Args[0]
 11653  		v.reset(OpSignExt16to32)
 11654  		v.AddArg(x)
 11655  		return true
 11656  	}
 11657  	// match: (Trunc64to32 (SignExt32to64 x))
 11658  	// cond:
 11659  	// result: x
 11660  	for {
 11661  		v_0 := v.Args[0]
 11662  		if v_0.Op != OpSignExt32to64 {
 11663  			break
 11664  		}
 11665  		x := v_0.Args[0]
 11666  		v.reset(OpCopy)
 11667  		v.Type = x.Type
 11668  		v.AddArg(x)
 11669  		return true
 11670  	}
 11671  	// match: (Trunc64to32 (And64 (Const64 [y]) x))
 11672  	// cond: y&0xFFFFFFFF == 0xFFFFFFFF
 11673  	// result: (Trunc64to32 x)
 11674  	for {
 11675  		v_0 := v.Args[0]
 11676  		if v_0.Op != OpAnd64 {
 11677  			break
 11678  		}
 11679  		v_0_0 := v_0.Args[0]
 11680  		if v_0_0.Op != OpConst64 {
 11681  			break
 11682  		}
 11683  		y := v_0_0.AuxInt
 11684  		x := v_0.Args[1]
 11685  		if !(y&0xFFFFFFFF == 0xFFFFFFFF) {
 11686  			break
 11687  		}
 11688  		v.reset(OpTrunc64to32)
 11689  		v.AddArg(x)
 11690  		return true
 11691  	}
 11692  	return false
 11693  }
 11694  func rewriteValuegeneric_OpTrunc64to8(v *Value, config *Config) bool {
 11695  	b := v.Block
 11696  	_ = b
 11697  	// match: (Trunc64to8  (Const64 [c]))
 11698  	// cond:
 11699  	// result: (Const8   [int64(int8(c))])
 11700  	for {
 11701  		v_0 := v.Args[0]
 11702  		if v_0.Op != OpConst64 {
 11703  			break
 11704  		}
 11705  		c := v_0.AuxInt
 11706  		v.reset(OpConst8)
 11707  		v.AuxInt = int64(int8(c))
 11708  		return true
 11709  	}
 11710  	// match: (Trunc64to8  (ZeroExt8to64  x))
 11711  	// cond:
 11712  	// result: x
 11713  	for {
 11714  		v_0 := v.Args[0]
 11715  		if v_0.Op != OpZeroExt8to64 {
 11716  			break
 11717  		}
 11718  		x := v_0.Args[0]
 11719  		v.reset(OpCopy)
 11720  		v.Type = x.Type
 11721  		v.AddArg(x)
 11722  		return true
 11723  	}
 11724  	// match: (Trunc64to8  (SignExt8to64  x))
 11725  	// cond:
 11726  	// result: x
 11727  	for {
 11728  		v_0 := v.Args[0]
 11729  		if v_0.Op != OpSignExt8to64 {
 11730  			break
 11731  		}
 11732  		x := v_0.Args[0]
 11733  		v.reset(OpCopy)
 11734  		v.Type = x.Type
 11735  		v.AddArg(x)
 11736  		return true
 11737  	}
 11738  	// match: (Trunc64to8  (And64 (Const64 [y]) x))
 11739  	// cond: y&0xFF == 0xFF
 11740  	// result: (Trunc64to8 x)
 11741  	for {
 11742  		v_0 := v.Args[0]
 11743  		if v_0.Op != OpAnd64 {
 11744  			break
 11745  		}
 11746  		v_0_0 := v_0.Args[0]
 11747  		if v_0_0.Op != OpConst64 {
 11748  			break
 11749  		}
 11750  		y := v_0_0.AuxInt
 11751  		x := v_0.Args[1]
 11752  		if !(y&0xFF == 0xFF) {
 11753  			break
 11754  		}
 11755  		v.reset(OpTrunc64to8)
 11756  		v.AddArg(x)
 11757  		return true
 11758  	}
 11759  	return false
 11760  }
 11761  func rewriteValuegeneric_OpXor16(v *Value, config *Config) bool {
 11762  	b := v.Block
 11763  	_ = b
 11764  	// match: (Xor16 x (Const16 <t> [c]))
 11765  	// cond: x.Op != OpConst16
 11766  	// result: (Xor16 (Const16 <t> [c]) x)
 11767  	for {
 11768  		x := v.Args[0]
 11769  		v_1 := v.Args[1]
 11770  		if v_1.Op != OpConst16 {
 11771  			break
 11772  		}
 11773  		t := v_1.Type
 11774  		c := v_1.AuxInt
 11775  		if !(x.Op != OpConst16) {
 11776  			break
 11777  		}
 11778  		v.reset(OpXor16)
 11779  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 11780  		v0.AuxInt = c
 11781  		v.AddArg(v0)
 11782  		v.AddArg(x)
 11783  		return true
 11784  	}
 11785  	// match: (Xor16 x x)
 11786  	// cond:
 11787  	// result: (Const16 [0])
 11788  	for {
 11789  		x := v.Args[0]
 11790  		if x != v.Args[1] {
 11791  			break
 11792  		}
 11793  		v.reset(OpConst16)
 11794  		v.AuxInt = 0
 11795  		return true
 11796  	}
 11797  	// match: (Xor16 (Const16 [0]) x)
 11798  	// cond:
 11799  	// result: x
 11800  	for {
 11801  		v_0 := v.Args[0]
 11802  		if v_0.Op != OpConst16 {
 11803  			break
 11804  		}
 11805  		if v_0.AuxInt != 0 {
 11806  			break
 11807  		}
 11808  		x := v.Args[1]
 11809  		v.reset(OpCopy)
 11810  		v.Type = x.Type
 11811  		v.AddArg(x)
 11812  		return true
 11813  	}
 11814  	// match: (Xor16 x (Xor16 x y))
 11815  	// cond:
 11816  	// result: y
 11817  	for {
 11818  		x := v.Args[0]
 11819  		v_1 := v.Args[1]
 11820  		if v_1.Op != OpXor16 {
 11821  			break
 11822  		}
 11823  		if x != v_1.Args[0] {
 11824  			break
 11825  		}
 11826  		y := v_1.Args[1]
 11827  		v.reset(OpCopy)
 11828  		v.Type = y.Type
 11829  		v.AddArg(y)
 11830  		return true
 11831  	}
 11832  	// match: (Xor16 x (Xor16 y x))
 11833  	// cond:
 11834  	// result: y
 11835  	for {
 11836  		x := v.Args[0]
 11837  		v_1 := v.Args[1]
 11838  		if v_1.Op != OpXor16 {
 11839  			break
 11840  		}
 11841  		y := v_1.Args[0]
 11842  		if x != v_1.Args[1] {
 11843  			break
 11844  		}
 11845  		v.reset(OpCopy)
 11846  		v.Type = y.Type
 11847  		v.AddArg(y)
 11848  		return true
 11849  	}
 11850  	// match: (Xor16 (Xor16 x y) x)
 11851  	// cond:
 11852  	// result: y
 11853  	for {
 11854  		v_0 := v.Args[0]
 11855  		if v_0.Op != OpXor16 {
 11856  			break
 11857  		}
 11858  		x := v_0.Args[0]
 11859  		y := v_0.Args[1]
 11860  		if x != v.Args[1] {
 11861  			break
 11862  		}
 11863  		v.reset(OpCopy)
 11864  		v.Type = y.Type
 11865  		v.AddArg(y)
 11866  		return true
 11867  	}
 11868  	// match: (Xor16 (Xor16 x y) y)
 11869  	// cond:
 11870  	// result: x
 11871  	for {
 11872  		v_0 := v.Args[0]
 11873  		if v_0.Op != OpXor16 {
 11874  			break
 11875  		}
 11876  		x := v_0.Args[0]
 11877  		y := v_0.Args[1]
 11878  		if y != v.Args[1] {
 11879  			break
 11880  		}
 11881  		v.reset(OpCopy)
 11882  		v.Type = x.Type
 11883  		v.AddArg(x)
 11884  		return true
 11885  	}
 11886  	return false
 11887  }
 11888  func rewriteValuegeneric_OpXor32(v *Value, config *Config) bool {
 11889  	b := v.Block
 11890  	_ = b
 11891  	// match: (Xor32 x (Const32 <t> [c]))
 11892  	// cond: x.Op != OpConst32
 11893  	// result: (Xor32 (Const32 <t> [c]) x)
 11894  	for {
 11895  		x := v.Args[0]
 11896  		v_1 := v.Args[1]
 11897  		if v_1.Op != OpConst32 {
 11898  			break
 11899  		}
 11900  		t := v_1.Type
 11901  		c := v_1.AuxInt
 11902  		if !(x.Op != OpConst32) {
 11903  			break
 11904  		}
 11905  		v.reset(OpXor32)
 11906  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 11907  		v0.AuxInt = c
 11908  		v.AddArg(v0)
 11909  		v.AddArg(x)
 11910  		return true
 11911  	}
 11912  	// match: (Xor32 x x)
 11913  	// cond:
 11914  	// result: (Const32 [0])
 11915  	for {
 11916  		x := v.Args[0]
 11917  		if x != v.Args[1] {
 11918  			break
 11919  		}
 11920  		v.reset(OpConst32)
 11921  		v.AuxInt = 0
 11922  		return true
 11923  	}
 11924  	// match: (Xor32 (Const32 [0]) x)
 11925  	// cond:
 11926  	// result: x
 11927  	for {
 11928  		v_0 := v.Args[0]
 11929  		if v_0.Op != OpConst32 {
 11930  			break
 11931  		}
 11932  		if v_0.AuxInt != 0 {
 11933  			break
 11934  		}
 11935  		x := v.Args[1]
 11936  		v.reset(OpCopy)
 11937  		v.Type = x.Type
 11938  		v.AddArg(x)
 11939  		return true
 11940  	}
 11941  	// match: (Xor32 x (Xor32 x y))
 11942  	// cond:
 11943  	// result: y
 11944  	for {
 11945  		x := v.Args[0]
 11946  		v_1 := v.Args[1]
 11947  		if v_1.Op != OpXor32 {
 11948  			break
 11949  		}
 11950  		if x != v_1.Args[0] {
 11951  			break
 11952  		}
 11953  		y := v_1.Args[1]
 11954  		v.reset(OpCopy)
 11955  		v.Type = y.Type
 11956  		v.AddArg(y)
 11957  		return true
 11958  	}
 11959  	// match: (Xor32 x (Xor32 y x))
 11960  	// cond:
 11961  	// result: y
 11962  	for {
 11963  		x := v.Args[0]
 11964  		v_1 := v.Args[1]
 11965  		if v_1.Op != OpXor32 {
 11966  			break
 11967  		}
 11968  		y := v_1.Args[0]
 11969  		if x != v_1.Args[1] {
 11970  			break
 11971  		}
 11972  		v.reset(OpCopy)
 11973  		v.Type = y.Type
 11974  		v.AddArg(y)
 11975  		return true
 11976  	}
 11977  	// match: (Xor32 (Xor32 x y) x)
 11978  	// cond:
 11979  	// result: y
 11980  	for {
 11981  		v_0 := v.Args[0]
 11982  		if v_0.Op != OpXor32 {
 11983  			break
 11984  		}
 11985  		x := v_0.Args[0]
 11986  		y := v_0.Args[1]
 11987  		if x != v.Args[1] {
 11988  			break
 11989  		}
 11990  		v.reset(OpCopy)
 11991  		v.Type = y.Type
 11992  		v.AddArg(y)
 11993  		return true
 11994  	}
 11995  	// match: (Xor32 (Xor32 x y) y)
 11996  	// cond:
 11997  	// result: x
 11998  	for {
 11999  		v_0 := v.Args[0]
 12000  		if v_0.Op != OpXor32 {
 12001  			break
 12002  		}
 12003  		x := v_0.Args[0]
 12004  		y := v_0.Args[1]
 12005  		if y != v.Args[1] {
 12006  			break
 12007  		}
 12008  		v.reset(OpCopy)
 12009  		v.Type = x.Type
 12010  		v.AddArg(x)
 12011  		return true
 12012  	}
 12013  	return false
 12014  }
 12015  func rewriteValuegeneric_OpXor64(v *Value, config *Config) bool {
 12016  	b := v.Block
 12017  	_ = b
 12018  	// match: (Xor64 x (Const64 <t> [c]))
 12019  	// cond: x.Op != OpConst64
 12020  	// result: (Xor64 (Const64 <t> [c]) x)
 12021  	for {
 12022  		x := v.Args[0]
 12023  		v_1 := v.Args[1]
 12024  		if v_1.Op != OpConst64 {
 12025  			break
 12026  		}
 12027  		t := v_1.Type
 12028  		c := v_1.AuxInt
 12029  		if !(x.Op != OpConst64) {
 12030  			break
 12031  		}
 12032  		v.reset(OpXor64)
 12033  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12034  		v0.AuxInt = c
 12035  		v.AddArg(v0)
 12036  		v.AddArg(x)
 12037  		return true
 12038  	}
 12039  	// match: (Xor64 x x)
 12040  	// cond:
 12041  	// result: (Const64 [0])
 12042  	for {
 12043  		x := v.Args[0]
 12044  		if x != v.Args[1] {
 12045  			break
 12046  		}
 12047  		v.reset(OpConst64)
 12048  		v.AuxInt = 0
 12049  		return true
 12050  	}
 12051  	// match: (Xor64 (Const64 [0]) x)
 12052  	// cond:
 12053  	// result: x
 12054  	for {
 12055  		v_0 := v.Args[0]
 12056  		if v_0.Op != OpConst64 {
 12057  			break
 12058  		}
 12059  		if v_0.AuxInt != 0 {
 12060  			break
 12061  		}
 12062  		x := v.Args[1]
 12063  		v.reset(OpCopy)
 12064  		v.Type = x.Type
 12065  		v.AddArg(x)
 12066  		return true
 12067  	}
 12068  	// match: (Xor64 x (Xor64 x y))
 12069  	// cond:
 12070  	// result: y
 12071  	for {
 12072  		x := v.Args[0]
 12073  		v_1 := v.Args[1]
 12074  		if v_1.Op != OpXor64 {
 12075  			break
 12076  		}
 12077  		if x != v_1.Args[0] {
 12078  			break
 12079  		}
 12080  		y := v_1.Args[1]
 12081  		v.reset(OpCopy)
 12082  		v.Type = y.Type
 12083  		v.AddArg(y)
 12084  		return true
 12085  	}
 12086  	// match: (Xor64 x (Xor64 y x))
 12087  	// cond:
 12088  	// result: y
 12089  	for {
 12090  		x := v.Args[0]
 12091  		v_1 := v.Args[1]
 12092  		if v_1.Op != OpXor64 {
 12093  			break
 12094  		}
 12095  		y := v_1.Args[0]
 12096  		if x != v_1.Args[1] {
 12097  			break
 12098  		}
 12099  		v.reset(OpCopy)
 12100  		v.Type = y.Type
 12101  		v.AddArg(y)
 12102  		return true
 12103  	}
 12104  	// match: (Xor64 (Xor64 x y) x)
 12105  	// cond:
 12106  	// result: y
 12107  	for {
 12108  		v_0 := v.Args[0]
 12109  		if v_0.Op != OpXor64 {
 12110  			break
 12111  		}
 12112  		x := v_0.Args[0]
 12113  		y := v_0.Args[1]
 12114  		if x != v.Args[1] {
 12115  			break
 12116  		}
 12117  		v.reset(OpCopy)
 12118  		v.Type = y.Type
 12119  		v.AddArg(y)
 12120  		return true
 12121  	}
 12122  	// match: (Xor64 (Xor64 x y) y)
 12123  	// cond:
 12124  	// result: x
 12125  	for {
 12126  		v_0 := v.Args[0]
 12127  		if v_0.Op != OpXor64 {
 12128  			break
 12129  		}
 12130  		x := v_0.Args[0]
 12131  		y := v_0.Args[1]
 12132  		if y != v.Args[1] {
 12133  			break
 12134  		}
 12135  		v.reset(OpCopy)
 12136  		v.Type = x.Type
 12137  		v.AddArg(x)
 12138  		return true
 12139  	}
 12140  	return false
 12141  }
 12142  func rewriteValuegeneric_OpXor8(v *Value, config *Config) bool {
 12143  	b := v.Block
 12144  	_ = b
 12145  	// match: (Xor8  x (Const8  <t> [c]))
 12146  	// cond: x.Op != OpConst8
 12147  	// result: (Xor8  (Const8  <t> [c]) x)
 12148  	for {
 12149  		x := v.Args[0]
 12150  		v_1 := v.Args[1]
 12151  		if v_1.Op != OpConst8 {
 12152  			break
 12153  		}
 12154  		t := v_1.Type
 12155  		c := v_1.AuxInt
 12156  		if !(x.Op != OpConst8) {
 12157  			break
 12158  		}
 12159  		v.reset(OpXor8)
 12160  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 12161  		v0.AuxInt = c
 12162  		v.AddArg(v0)
 12163  		v.AddArg(x)
 12164  		return true
 12165  	}
 12166  	// match: (Xor8  x x)
 12167  	// cond:
 12168  	// result: (Const8  [0])
 12169  	for {
 12170  		x := v.Args[0]
 12171  		if x != v.Args[1] {
 12172  			break
 12173  		}
 12174  		v.reset(OpConst8)
 12175  		v.AuxInt = 0
 12176  		return true
 12177  	}
 12178  	// match: (Xor8  (Const8  [0]) x)
 12179  	// cond:
 12180  	// result: x
 12181  	for {
 12182  		v_0 := v.Args[0]
 12183  		if v_0.Op != OpConst8 {
 12184  			break
 12185  		}
 12186  		if v_0.AuxInt != 0 {
 12187  			break
 12188  		}
 12189  		x := v.Args[1]
 12190  		v.reset(OpCopy)
 12191  		v.Type = x.Type
 12192  		v.AddArg(x)
 12193  		return true
 12194  	}
 12195  	// match: (Xor8  x (Xor8  x y))
 12196  	// cond:
 12197  	// result: y
 12198  	for {
 12199  		x := v.Args[0]
 12200  		v_1 := v.Args[1]
 12201  		if v_1.Op != OpXor8 {
 12202  			break
 12203  		}
 12204  		if x != v_1.Args[0] {
 12205  			break
 12206  		}
 12207  		y := v_1.Args[1]
 12208  		v.reset(OpCopy)
 12209  		v.Type = y.Type
 12210  		v.AddArg(y)
 12211  		return true
 12212  	}
 12213  	// match: (Xor8  x (Xor8  y x))
 12214  	// cond:
 12215  	// result: y
 12216  	for {
 12217  		x := v.Args[0]
 12218  		v_1 := v.Args[1]
 12219  		if v_1.Op != OpXor8 {
 12220  			break
 12221  		}
 12222  		y := v_1.Args[0]
 12223  		if x != v_1.Args[1] {
 12224  			break
 12225  		}
 12226  		v.reset(OpCopy)
 12227  		v.Type = y.Type
 12228  		v.AddArg(y)
 12229  		return true
 12230  	}
 12231  	// match: (Xor8  (Xor8  x y) x)
 12232  	// cond:
 12233  	// result: y
 12234  	for {
 12235  		v_0 := v.Args[0]
 12236  		if v_0.Op != OpXor8 {
 12237  			break
 12238  		}
 12239  		x := v_0.Args[0]
 12240  		y := v_0.Args[1]
 12241  		if x != v.Args[1] {
 12242  			break
 12243  		}
 12244  		v.reset(OpCopy)
 12245  		v.Type = y.Type
 12246  		v.AddArg(y)
 12247  		return true
 12248  	}
 12249  	// match: (Xor8  (Xor8  x y) y)
 12250  	// cond:
 12251  	// result: x
 12252  	for {
 12253  		v_0 := v.Args[0]
 12254  		if v_0.Op != OpXor8 {
 12255  			break
 12256  		}
 12257  		x := v_0.Args[0]
 12258  		y := v_0.Args[1]
 12259  		if y != v.Args[1] {
 12260  			break
 12261  		}
 12262  		v.reset(OpCopy)
 12263  		v.Type = x.Type
 12264  		v.AddArg(x)
 12265  		return true
 12266  	}
 12267  	return false
 12268  }
 12269  func rewriteValuegeneric_OpZero(v *Value, config *Config) bool {
 12270  	b := v.Block
 12271  	_ = b
 12272  	// match: (Zero (Load (OffPtr [c] (SP)) mem) mem)
 12273  	// cond: mem.Op == OpStaticCall 	&& isSameSym(mem.Aux, "runtime.newobject") 	&& c == config.ctxt.FixedFrameSize() + config.PtrSize
 12274  	// result: mem
 12275  	for {
 12276  		v_0 := v.Args[0]
 12277  		if v_0.Op != OpLoad {
 12278  			break
 12279  		}
 12280  		v_0_0 := v_0.Args[0]
 12281  		if v_0_0.Op != OpOffPtr {
 12282  			break
 12283  		}
 12284  		c := v_0_0.AuxInt
 12285  		v_0_0_0 := v_0_0.Args[0]
 12286  		if v_0_0_0.Op != OpSP {
 12287  			break
 12288  		}
 12289  		mem := v_0.Args[1]
 12290  		if mem != v.Args[1] {
 12291  			break
 12292  		}
 12293  		if !(mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.PtrSize) {
 12294  			break
 12295  		}
 12296  		v.reset(OpCopy)
 12297  		v.Type = mem.Type
 12298  		v.AddArg(mem)
 12299  		return true
 12300  	}
 12301  	return false
 12302  }
 12303  func rewriteValuegeneric_OpZeroExt16to32(v *Value, config *Config) bool {
 12304  	b := v.Block
 12305  	_ = b
 12306  	// match: (ZeroExt16to32 (Const16 [c]))
 12307  	// cond:
 12308  	// result: (Const32 [int64(uint16(c))])
 12309  	for {
 12310  		v_0 := v.Args[0]
 12311  		if v_0.Op != OpConst16 {
 12312  			break
 12313  		}
 12314  		c := v_0.AuxInt
 12315  		v.reset(OpConst32)
 12316  		v.AuxInt = int64(uint16(c))
 12317  		return true
 12318  	}
 12319  	// match: (ZeroExt16to32 (Trunc32to16 x:(Rsh32Ux64 _ (Const64 [s]))))
 12320  	// cond: s >= 16
 12321  	// result: x
 12322  	for {
 12323  		v_0 := v.Args[0]
 12324  		if v_0.Op != OpTrunc32to16 {
 12325  			break
 12326  		}
 12327  		x := v_0.Args[0]
 12328  		if x.Op != OpRsh32Ux64 {
 12329  			break
 12330  		}
 12331  		x_1 := x.Args[1]
 12332  		if x_1.Op != OpConst64 {
 12333  			break
 12334  		}
 12335  		s := x_1.AuxInt
 12336  		if !(s >= 16) {
 12337  			break
 12338  		}
 12339  		v.reset(OpCopy)
 12340  		v.Type = x.Type
 12341  		v.AddArg(x)
 12342  		return true
 12343  	}
 12344  	return false
 12345  }
 12346  func rewriteValuegeneric_OpZeroExt16to64(v *Value, config *Config) bool {
 12347  	b := v.Block
 12348  	_ = b
 12349  	// match: (ZeroExt16to64 (Const16 [c]))
 12350  	// cond:
 12351  	// result: (Const64 [int64(uint16(c))])
 12352  	for {
 12353  		v_0 := v.Args[0]
 12354  		if v_0.Op != OpConst16 {
 12355  			break
 12356  		}
 12357  		c := v_0.AuxInt
 12358  		v.reset(OpConst64)
 12359  		v.AuxInt = int64(uint16(c))
 12360  		return true
 12361  	}
 12362  	// match: (ZeroExt16to64 (Trunc64to16 x:(Rsh64Ux64 _ (Const64 [s]))))
 12363  	// cond: s >= 48
 12364  	// result: x
 12365  	for {
 12366  		v_0 := v.Args[0]
 12367  		if v_0.Op != OpTrunc64to16 {
 12368  			break
 12369  		}
 12370  		x := v_0.Args[0]
 12371  		if x.Op != OpRsh64Ux64 {
 12372  			break
 12373  		}
 12374  		x_1 := x.Args[1]
 12375  		if x_1.Op != OpConst64 {
 12376  			break
 12377  		}
 12378  		s := x_1.AuxInt
 12379  		if !(s >= 48) {
 12380  			break
 12381  		}
 12382  		v.reset(OpCopy)
 12383  		v.Type = x.Type
 12384  		v.AddArg(x)
 12385  		return true
 12386  	}
 12387  	return false
 12388  }
 12389  func rewriteValuegeneric_OpZeroExt32to64(v *Value, config *Config) bool {
 12390  	b := v.Block
 12391  	_ = b
 12392  	// match: (ZeroExt32to64 (Const32 [c]))
 12393  	// cond:
 12394  	// result: (Const64 [int64(uint32(c))])
 12395  	for {
 12396  		v_0 := v.Args[0]
 12397  		if v_0.Op != OpConst32 {
 12398  			break
 12399  		}
 12400  		c := v_0.AuxInt
 12401  		v.reset(OpConst64)
 12402  		v.AuxInt = int64(uint32(c))
 12403  		return true
 12404  	}
 12405  	// match: (ZeroExt32to64 (Trunc64to32 x:(Rsh64Ux64 _ (Const64 [s]))))
 12406  	// cond: s >= 32
 12407  	// result: x
 12408  	for {
 12409  		v_0 := v.Args[0]
 12410  		if v_0.Op != OpTrunc64to32 {
 12411  			break
 12412  		}
 12413  		x := v_0.Args[0]
 12414  		if x.Op != OpRsh64Ux64 {
 12415  			break
 12416  		}
 12417  		x_1 := x.Args[1]
 12418  		if x_1.Op != OpConst64 {
 12419  			break
 12420  		}
 12421  		s := x_1.AuxInt
 12422  		if !(s >= 32) {
 12423  			break
 12424  		}
 12425  		v.reset(OpCopy)
 12426  		v.Type = x.Type
 12427  		v.AddArg(x)
 12428  		return true
 12429  	}
 12430  	return false
 12431  }
 12432  func rewriteValuegeneric_OpZeroExt8to16(v *Value, config *Config) bool {
 12433  	b := v.Block
 12434  	_ = b
 12435  	// match: (ZeroExt8to16  (Const8  [c]))
 12436  	// cond:
 12437  	// result: (Const16 [int64( uint8(c))])
 12438  	for {
 12439  		v_0 := v.Args[0]
 12440  		if v_0.Op != OpConst8 {
 12441  			break
 12442  		}
 12443  		c := v_0.AuxInt
 12444  		v.reset(OpConst16)
 12445  		v.AuxInt = int64(uint8(c))
 12446  		return true
 12447  	}
 12448  	// match: (ZeroExt8to16  (Trunc16to8  x:(Rsh16Ux64 _ (Const64 [s]))))
 12449  	// cond: s >= 8
 12450  	// result: x
 12451  	for {
 12452  		v_0 := v.Args[0]
 12453  		if v_0.Op != OpTrunc16to8 {
 12454  			break
 12455  		}
 12456  		x := v_0.Args[0]
 12457  		if x.Op != OpRsh16Ux64 {
 12458  			break
 12459  		}
 12460  		x_1 := x.Args[1]
 12461  		if x_1.Op != OpConst64 {
 12462  			break
 12463  		}
 12464  		s := x_1.AuxInt
 12465  		if !(s >= 8) {
 12466  			break
 12467  		}
 12468  		v.reset(OpCopy)
 12469  		v.Type = x.Type
 12470  		v.AddArg(x)
 12471  		return true
 12472  	}
 12473  	return false
 12474  }
 12475  func rewriteValuegeneric_OpZeroExt8to32(v *Value, config *Config) bool {
 12476  	b := v.Block
 12477  	_ = b
 12478  	// match: (ZeroExt8to32  (Const8  [c]))
 12479  	// cond:
 12480  	// result: (Const32 [int64( uint8(c))])
 12481  	for {
 12482  		v_0 := v.Args[0]
 12483  		if v_0.Op != OpConst8 {
 12484  			break
 12485  		}
 12486  		c := v_0.AuxInt
 12487  		v.reset(OpConst32)
 12488  		v.AuxInt = int64(uint8(c))
 12489  		return true
 12490  	}
 12491  	// match: (ZeroExt8to32  (Trunc32to8  x:(Rsh32Ux64 _ (Const64 [s]))))
 12492  	// cond: s >= 24
 12493  	// result: x
 12494  	for {
 12495  		v_0 := v.Args[0]
 12496  		if v_0.Op != OpTrunc32to8 {
 12497  			break
 12498  		}
 12499  		x := v_0.Args[0]
 12500  		if x.Op != OpRsh32Ux64 {
 12501  			break
 12502  		}
 12503  		x_1 := x.Args[1]
 12504  		if x_1.Op != OpConst64 {
 12505  			break
 12506  		}
 12507  		s := x_1.AuxInt
 12508  		if !(s >= 24) {
 12509  			break
 12510  		}
 12511  		v.reset(OpCopy)
 12512  		v.Type = x.Type
 12513  		v.AddArg(x)
 12514  		return true
 12515  	}
 12516  	return false
 12517  }
 12518  func rewriteValuegeneric_OpZeroExt8to64(v *Value, config *Config) bool {
 12519  	b := v.Block
 12520  	_ = b
 12521  	// match: (ZeroExt8to64  (Const8  [c]))
 12522  	// cond:
 12523  	// result: (Const64 [int64( uint8(c))])
 12524  	for {
 12525  		v_0 := v.Args[0]
 12526  		if v_0.Op != OpConst8 {
 12527  			break
 12528  		}
 12529  		c := v_0.AuxInt
 12530  		v.reset(OpConst64)
 12531  		v.AuxInt = int64(uint8(c))
 12532  		return true
 12533  	}
 12534  	// match: (ZeroExt8to64  (Trunc64to8  x:(Rsh64Ux64 _ (Const64 [s]))))
 12535  	// cond: s >= 56
 12536  	// result: x
 12537  	for {
 12538  		v_0 := v.Args[0]
 12539  		if v_0.Op != OpTrunc64to8 {
 12540  			break
 12541  		}
 12542  		x := v_0.Args[0]
 12543  		if x.Op != OpRsh64Ux64 {
 12544  			break
 12545  		}
 12546  		x_1 := x.Args[1]
 12547  		if x_1.Op != OpConst64 {
 12548  			break
 12549  		}
 12550  		s := x_1.AuxInt
 12551  		if !(s >= 56) {
 12552  			break
 12553  		}
 12554  		v.reset(OpCopy)
 12555  		v.Type = x.Type
 12556  		v.AddArg(x)
 12557  		return true
 12558  	}
 12559  	return false
 12560  }
 12561  func rewriteBlockgeneric(b *Block, config *Config) bool {
 12562  	switch b.Kind {
 12563  	case BlockIf:
 12564  		// match: (If (Not cond) yes no)
 12565  		// cond:
 12566  		// result: (If cond no yes)
 12567  		for {
 12568  			v := b.Control
 12569  			if v.Op != OpNot {
 12570  				break
 12571  			}
 12572  			cond := v.Args[0]
 12573  			yes := b.Succs[0]
 12574  			no := b.Succs[1]
 12575  			b.Kind = BlockIf
 12576  			b.SetControl(cond)
 12577  			b.swapSuccessors()
 12578  			_ = no
 12579  			_ = yes
 12580  			return true
 12581  		}
 12582  		// match: (If (ConstBool [c]) yes no)
 12583  		// cond: c == 1
 12584  		// result: (First nil yes no)
 12585  		for {
 12586  			v := b.Control
 12587  			if v.Op != OpConstBool {
 12588  				break
 12589  			}
 12590  			c := v.AuxInt
 12591  			yes := b.Succs[0]
 12592  			no := b.Succs[1]
 12593  			if !(c == 1) {
 12594  				break
 12595  			}
 12596  			b.Kind = BlockFirst
 12597  			b.SetControl(nil)
 12598  			_ = yes
 12599  			_ = no
 12600  			return true
 12601  		}
 12602  		// match: (If (ConstBool [c]) yes no)
 12603  		// cond: c == 0
 12604  		// result: (First nil no yes)
 12605  		for {
 12606  			v := b.Control
 12607  			if v.Op != OpConstBool {
 12608  				break
 12609  			}
 12610  			c := v.AuxInt
 12611  			yes := b.Succs[0]
 12612  			no := b.Succs[1]
 12613  			if !(c == 0) {
 12614  				break
 12615  			}
 12616  			b.Kind = BlockFirst
 12617  			b.SetControl(nil)
 12618  			b.swapSuccessors()
 12619  			_ = no
 12620  			_ = yes
 12621  			return true
 12622  		}
 12623  	}
 12624  	return false
 12625  }