github.com/slayercat/go@v0.0.0-20170428012452-c51559813f61/src/cmd/compile/internal/ssa/rewriteMIPS.go (about)

     1  // Code generated from gen/MIPS.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  import "cmd/internal/obj"
     8  import "cmd/internal/objabi"
     9  
    10  var _ = math.MinInt8  // in case not otherwise used
    11  var _ = obj.ANOP      // in case not otherwise used
    12  var _ = objabi.GOROOT // in case not otherwise used
    13  
    14  func rewriteValueMIPS(v *Value) bool {
    15  	switch v.Op {
    16  	case OpAdd16:
    17  		return rewriteValueMIPS_OpAdd16_0(v)
    18  	case OpAdd32:
    19  		return rewriteValueMIPS_OpAdd32_0(v)
    20  	case OpAdd32F:
    21  		return rewriteValueMIPS_OpAdd32F_0(v)
    22  	case OpAdd32withcarry:
    23  		return rewriteValueMIPS_OpAdd32withcarry_0(v)
    24  	case OpAdd64F:
    25  		return rewriteValueMIPS_OpAdd64F_0(v)
    26  	case OpAdd8:
    27  		return rewriteValueMIPS_OpAdd8_0(v)
    28  	case OpAddPtr:
    29  		return rewriteValueMIPS_OpAddPtr_0(v)
    30  	case OpAddr:
    31  		return rewriteValueMIPS_OpAddr_0(v)
    32  	case OpAnd16:
    33  		return rewriteValueMIPS_OpAnd16_0(v)
    34  	case OpAnd32:
    35  		return rewriteValueMIPS_OpAnd32_0(v)
    36  	case OpAnd8:
    37  		return rewriteValueMIPS_OpAnd8_0(v)
    38  	case OpAndB:
    39  		return rewriteValueMIPS_OpAndB_0(v)
    40  	case OpAtomicAdd32:
    41  		return rewriteValueMIPS_OpAtomicAdd32_0(v)
    42  	case OpAtomicAnd8:
    43  		return rewriteValueMIPS_OpAtomicAnd8_0(v)
    44  	case OpAtomicCompareAndSwap32:
    45  		return rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v)
    46  	case OpAtomicExchange32:
    47  		return rewriteValueMIPS_OpAtomicExchange32_0(v)
    48  	case OpAtomicLoad32:
    49  		return rewriteValueMIPS_OpAtomicLoad32_0(v)
    50  	case OpAtomicLoadPtr:
    51  		return rewriteValueMIPS_OpAtomicLoadPtr_0(v)
    52  	case OpAtomicOr8:
    53  		return rewriteValueMIPS_OpAtomicOr8_0(v)
    54  	case OpAtomicStore32:
    55  		return rewriteValueMIPS_OpAtomicStore32_0(v)
    56  	case OpAtomicStorePtrNoWB:
    57  		return rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v)
    58  	case OpAvg32u:
    59  		return rewriteValueMIPS_OpAvg32u_0(v)
    60  	case OpBitLen32:
    61  		return rewriteValueMIPS_OpBitLen32_0(v)
    62  	case OpClosureCall:
    63  		return rewriteValueMIPS_OpClosureCall_0(v)
    64  	case OpCom16:
    65  		return rewriteValueMIPS_OpCom16_0(v)
    66  	case OpCom32:
    67  		return rewriteValueMIPS_OpCom32_0(v)
    68  	case OpCom8:
    69  		return rewriteValueMIPS_OpCom8_0(v)
    70  	case OpConst16:
    71  		return rewriteValueMIPS_OpConst16_0(v)
    72  	case OpConst32:
    73  		return rewriteValueMIPS_OpConst32_0(v)
    74  	case OpConst32F:
    75  		return rewriteValueMIPS_OpConst32F_0(v)
    76  	case OpConst64F:
    77  		return rewriteValueMIPS_OpConst64F_0(v)
    78  	case OpConst8:
    79  		return rewriteValueMIPS_OpConst8_0(v)
    80  	case OpConstBool:
    81  		return rewriteValueMIPS_OpConstBool_0(v)
    82  	case OpConstNil:
    83  		return rewriteValueMIPS_OpConstNil_0(v)
    84  	case OpConvert:
    85  		return rewriteValueMIPS_OpConvert_0(v)
    86  	case OpCtz32:
    87  		return rewriteValueMIPS_OpCtz32_0(v)
    88  	case OpCvt32Fto32:
    89  		return rewriteValueMIPS_OpCvt32Fto32_0(v)
    90  	case OpCvt32Fto64F:
    91  		return rewriteValueMIPS_OpCvt32Fto64F_0(v)
    92  	case OpCvt32to32F:
    93  		return rewriteValueMIPS_OpCvt32to32F_0(v)
    94  	case OpCvt32to64F:
    95  		return rewriteValueMIPS_OpCvt32to64F_0(v)
    96  	case OpCvt64Fto32:
    97  		return rewriteValueMIPS_OpCvt64Fto32_0(v)
    98  	case OpCvt64Fto32F:
    99  		return rewriteValueMIPS_OpCvt64Fto32F_0(v)
   100  	case OpDiv16:
   101  		return rewriteValueMIPS_OpDiv16_0(v)
   102  	case OpDiv16u:
   103  		return rewriteValueMIPS_OpDiv16u_0(v)
   104  	case OpDiv32:
   105  		return rewriteValueMIPS_OpDiv32_0(v)
   106  	case OpDiv32F:
   107  		return rewriteValueMIPS_OpDiv32F_0(v)
   108  	case OpDiv32u:
   109  		return rewriteValueMIPS_OpDiv32u_0(v)
   110  	case OpDiv64F:
   111  		return rewriteValueMIPS_OpDiv64F_0(v)
   112  	case OpDiv8:
   113  		return rewriteValueMIPS_OpDiv8_0(v)
   114  	case OpDiv8u:
   115  		return rewriteValueMIPS_OpDiv8u_0(v)
   116  	case OpEq16:
   117  		return rewriteValueMIPS_OpEq16_0(v)
   118  	case OpEq32:
   119  		return rewriteValueMIPS_OpEq32_0(v)
   120  	case OpEq32F:
   121  		return rewriteValueMIPS_OpEq32F_0(v)
   122  	case OpEq64F:
   123  		return rewriteValueMIPS_OpEq64F_0(v)
   124  	case OpEq8:
   125  		return rewriteValueMIPS_OpEq8_0(v)
   126  	case OpEqB:
   127  		return rewriteValueMIPS_OpEqB_0(v)
   128  	case OpEqPtr:
   129  		return rewriteValueMIPS_OpEqPtr_0(v)
   130  	case OpGeq16:
   131  		return rewriteValueMIPS_OpGeq16_0(v)
   132  	case OpGeq16U:
   133  		return rewriteValueMIPS_OpGeq16U_0(v)
   134  	case OpGeq32:
   135  		return rewriteValueMIPS_OpGeq32_0(v)
   136  	case OpGeq32F:
   137  		return rewriteValueMIPS_OpGeq32F_0(v)
   138  	case OpGeq32U:
   139  		return rewriteValueMIPS_OpGeq32U_0(v)
   140  	case OpGeq64F:
   141  		return rewriteValueMIPS_OpGeq64F_0(v)
   142  	case OpGeq8:
   143  		return rewriteValueMIPS_OpGeq8_0(v)
   144  	case OpGeq8U:
   145  		return rewriteValueMIPS_OpGeq8U_0(v)
   146  	case OpGetClosurePtr:
   147  		return rewriteValueMIPS_OpGetClosurePtr_0(v)
   148  	case OpGreater16:
   149  		return rewriteValueMIPS_OpGreater16_0(v)
   150  	case OpGreater16U:
   151  		return rewriteValueMIPS_OpGreater16U_0(v)
   152  	case OpGreater32:
   153  		return rewriteValueMIPS_OpGreater32_0(v)
   154  	case OpGreater32F:
   155  		return rewriteValueMIPS_OpGreater32F_0(v)
   156  	case OpGreater32U:
   157  		return rewriteValueMIPS_OpGreater32U_0(v)
   158  	case OpGreater64F:
   159  		return rewriteValueMIPS_OpGreater64F_0(v)
   160  	case OpGreater8:
   161  		return rewriteValueMIPS_OpGreater8_0(v)
   162  	case OpGreater8U:
   163  		return rewriteValueMIPS_OpGreater8U_0(v)
   164  	case OpHmul32:
   165  		return rewriteValueMIPS_OpHmul32_0(v)
   166  	case OpHmul32u:
   167  		return rewriteValueMIPS_OpHmul32u_0(v)
   168  	case OpInterCall:
   169  		return rewriteValueMIPS_OpInterCall_0(v)
   170  	case OpIsInBounds:
   171  		return rewriteValueMIPS_OpIsInBounds_0(v)
   172  	case OpIsNonNil:
   173  		return rewriteValueMIPS_OpIsNonNil_0(v)
   174  	case OpIsSliceInBounds:
   175  		return rewriteValueMIPS_OpIsSliceInBounds_0(v)
   176  	case OpLeq16:
   177  		return rewriteValueMIPS_OpLeq16_0(v)
   178  	case OpLeq16U:
   179  		return rewriteValueMIPS_OpLeq16U_0(v)
   180  	case OpLeq32:
   181  		return rewriteValueMIPS_OpLeq32_0(v)
   182  	case OpLeq32F:
   183  		return rewriteValueMIPS_OpLeq32F_0(v)
   184  	case OpLeq32U:
   185  		return rewriteValueMIPS_OpLeq32U_0(v)
   186  	case OpLeq64F:
   187  		return rewriteValueMIPS_OpLeq64F_0(v)
   188  	case OpLeq8:
   189  		return rewriteValueMIPS_OpLeq8_0(v)
   190  	case OpLeq8U:
   191  		return rewriteValueMIPS_OpLeq8U_0(v)
   192  	case OpLess16:
   193  		return rewriteValueMIPS_OpLess16_0(v)
   194  	case OpLess16U:
   195  		return rewriteValueMIPS_OpLess16U_0(v)
   196  	case OpLess32:
   197  		return rewriteValueMIPS_OpLess32_0(v)
   198  	case OpLess32F:
   199  		return rewriteValueMIPS_OpLess32F_0(v)
   200  	case OpLess32U:
   201  		return rewriteValueMIPS_OpLess32U_0(v)
   202  	case OpLess64F:
   203  		return rewriteValueMIPS_OpLess64F_0(v)
   204  	case OpLess8:
   205  		return rewriteValueMIPS_OpLess8_0(v)
   206  	case OpLess8U:
   207  		return rewriteValueMIPS_OpLess8U_0(v)
   208  	case OpLoad:
   209  		return rewriteValueMIPS_OpLoad_0(v)
   210  	case OpLsh16x16:
   211  		return rewriteValueMIPS_OpLsh16x16_0(v)
   212  	case OpLsh16x32:
   213  		return rewriteValueMIPS_OpLsh16x32_0(v)
   214  	case OpLsh16x64:
   215  		return rewriteValueMIPS_OpLsh16x64_0(v)
   216  	case OpLsh16x8:
   217  		return rewriteValueMIPS_OpLsh16x8_0(v)
   218  	case OpLsh32x16:
   219  		return rewriteValueMIPS_OpLsh32x16_0(v)
   220  	case OpLsh32x32:
   221  		return rewriteValueMIPS_OpLsh32x32_0(v)
   222  	case OpLsh32x64:
   223  		return rewriteValueMIPS_OpLsh32x64_0(v)
   224  	case OpLsh32x8:
   225  		return rewriteValueMIPS_OpLsh32x8_0(v)
   226  	case OpLsh8x16:
   227  		return rewriteValueMIPS_OpLsh8x16_0(v)
   228  	case OpLsh8x32:
   229  		return rewriteValueMIPS_OpLsh8x32_0(v)
   230  	case OpLsh8x64:
   231  		return rewriteValueMIPS_OpLsh8x64_0(v)
   232  	case OpLsh8x8:
   233  		return rewriteValueMIPS_OpLsh8x8_0(v)
   234  	case OpMIPSADD:
   235  		return rewriteValueMIPS_OpMIPSADD_0(v)
   236  	case OpMIPSADDconst:
   237  		return rewriteValueMIPS_OpMIPSADDconst_0(v)
   238  	case OpMIPSAND:
   239  		return rewriteValueMIPS_OpMIPSAND_0(v)
   240  	case OpMIPSANDconst:
   241  		return rewriteValueMIPS_OpMIPSANDconst_0(v)
   242  	case OpMIPSCMOVZ:
   243  		return rewriteValueMIPS_OpMIPSCMOVZ_0(v)
   244  	case OpMIPSCMOVZzero:
   245  		return rewriteValueMIPS_OpMIPSCMOVZzero_0(v)
   246  	case OpMIPSLoweredAtomicAdd:
   247  		return rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v)
   248  	case OpMIPSLoweredAtomicStore:
   249  		return rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v)
   250  	case OpMIPSMOVBUload:
   251  		return rewriteValueMIPS_OpMIPSMOVBUload_0(v)
   252  	case OpMIPSMOVBUreg:
   253  		return rewriteValueMIPS_OpMIPSMOVBUreg_0(v)
   254  	case OpMIPSMOVBload:
   255  		return rewriteValueMIPS_OpMIPSMOVBload_0(v)
   256  	case OpMIPSMOVBreg:
   257  		return rewriteValueMIPS_OpMIPSMOVBreg_0(v)
   258  	case OpMIPSMOVBstore:
   259  		return rewriteValueMIPS_OpMIPSMOVBstore_0(v)
   260  	case OpMIPSMOVBstorezero:
   261  		return rewriteValueMIPS_OpMIPSMOVBstorezero_0(v)
   262  	case OpMIPSMOVDload:
   263  		return rewriteValueMIPS_OpMIPSMOVDload_0(v)
   264  	case OpMIPSMOVDstore:
   265  		return rewriteValueMIPS_OpMIPSMOVDstore_0(v)
   266  	case OpMIPSMOVFload:
   267  		return rewriteValueMIPS_OpMIPSMOVFload_0(v)
   268  	case OpMIPSMOVFstore:
   269  		return rewriteValueMIPS_OpMIPSMOVFstore_0(v)
   270  	case OpMIPSMOVHUload:
   271  		return rewriteValueMIPS_OpMIPSMOVHUload_0(v)
   272  	case OpMIPSMOVHUreg:
   273  		return rewriteValueMIPS_OpMIPSMOVHUreg_0(v)
   274  	case OpMIPSMOVHload:
   275  		return rewriteValueMIPS_OpMIPSMOVHload_0(v)
   276  	case OpMIPSMOVHreg:
   277  		return rewriteValueMIPS_OpMIPSMOVHreg_0(v)
   278  	case OpMIPSMOVHstore:
   279  		return rewriteValueMIPS_OpMIPSMOVHstore_0(v)
   280  	case OpMIPSMOVHstorezero:
   281  		return rewriteValueMIPS_OpMIPSMOVHstorezero_0(v)
   282  	case OpMIPSMOVWload:
   283  		return rewriteValueMIPS_OpMIPSMOVWload_0(v)
   284  	case OpMIPSMOVWreg:
   285  		return rewriteValueMIPS_OpMIPSMOVWreg_0(v)
   286  	case OpMIPSMOVWstore:
   287  		return rewriteValueMIPS_OpMIPSMOVWstore_0(v)
   288  	case OpMIPSMOVWstorezero:
   289  		return rewriteValueMIPS_OpMIPSMOVWstorezero_0(v)
   290  	case OpMIPSMUL:
   291  		return rewriteValueMIPS_OpMIPSMUL_0(v)
   292  	case OpMIPSNEG:
   293  		return rewriteValueMIPS_OpMIPSNEG_0(v)
   294  	case OpMIPSNOR:
   295  		return rewriteValueMIPS_OpMIPSNOR_0(v)
   296  	case OpMIPSNORconst:
   297  		return rewriteValueMIPS_OpMIPSNORconst_0(v)
   298  	case OpMIPSOR:
   299  		return rewriteValueMIPS_OpMIPSOR_0(v)
   300  	case OpMIPSORconst:
   301  		return rewriteValueMIPS_OpMIPSORconst_0(v)
   302  	case OpMIPSSGT:
   303  		return rewriteValueMIPS_OpMIPSSGT_0(v)
   304  	case OpMIPSSGTU:
   305  		return rewriteValueMIPS_OpMIPSSGTU_0(v)
   306  	case OpMIPSSGTUconst:
   307  		return rewriteValueMIPS_OpMIPSSGTUconst_0(v)
   308  	case OpMIPSSGTUzero:
   309  		return rewriteValueMIPS_OpMIPSSGTUzero_0(v)
   310  	case OpMIPSSGTconst:
   311  		return rewriteValueMIPS_OpMIPSSGTconst_0(v) || rewriteValueMIPS_OpMIPSSGTconst_10(v)
   312  	case OpMIPSSGTzero:
   313  		return rewriteValueMIPS_OpMIPSSGTzero_0(v)
   314  	case OpMIPSSLL:
   315  		return rewriteValueMIPS_OpMIPSSLL_0(v)
   316  	case OpMIPSSLLconst:
   317  		return rewriteValueMIPS_OpMIPSSLLconst_0(v)
   318  	case OpMIPSSRA:
   319  		return rewriteValueMIPS_OpMIPSSRA_0(v)
   320  	case OpMIPSSRAconst:
   321  		return rewriteValueMIPS_OpMIPSSRAconst_0(v)
   322  	case OpMIPSSRL:
   323  		return rewriteValueMIPS_OpMIPSSRL_0(v)
   324  	case OpMIPSSRLconst:
   325  		return rewriteValueMIPS_OpMIPSSRLconst_0(v)
   326  	case OpMIPSSUB:
   327  		return rewriteValueMIPS_OpMIPSSUB_0(v)
   328  	case OpMIPSSUBconst:
   329  		return rewriteValueMIPS_OpMIPSSUBconst_0(v)
   330  	case OpMIPSXOR:
   331  		return rewriteValueMIPS_OpMIPSXOR_0(v)
   332  	case OpMIPSXORconst:
   333  		return rewriteValueMIPS_OpMIPSXORconst_0(v)
   334  	case OpMod16:
   335  		return rewriteValueMIPS_OpMod16_0(v)
   336  	case OpMod16u:
   337  		return rewriteValueMIPS_OpMod16u_0(v)
   338  	case OpMod32:
   339  		return rewriteValueMIPS_OpMod32_0(v)
   340  	case OpMod32u:
   341  		return rewriteValueMIPS_OpMod32u_0(v)
   342  	case OpMod8:
   343  		return rewriteValueMIPS_OpMod8_0(v)
   344  	case OpMod8u:
   345  		return rewriteValueMIPS_OpMod8u_0(v)
   346  	case OpMove:
   347  		return rewriteValueMIPS_OpMove_0(v) || rewriteValueMIPS_OpMove_10(v)
   348  	case OpMul16:
   349  		return rewriteValueMIPS_OpMul16_0(v)
   350  	case OpMul32:
   351  		return rewriteValueMIPS_OpMul32_0(v)
   352  	case OpMul32F:
   353  		return rewriteValueMIPS_OpMul32F_0(v)
   354  	case OpMul32uhilo:
   355  		return rewriteValueMIPS_OpMul32uhilo_0(v)
   356  	case OpMul64F:
   357  		return rewriteValueMIPS_OpMul64F_0(v)
   358  	case OpMul8:
   359  		return rewriteValueMIPS_OpMul8_0(v)
   360  	case OpNeg16:
   361  		return rewriteValueMIPS_OpNeg16_0(v)
   362  	case OpNeg32:
   363  		return rewriteValueMIPS_OpNeg32_0(v)
   364  	case OpNeg32F:
   365  		return rewriteValueMIPS_OpNeg32F_0(v)
   366  	case OpNeg64F:
   367  		return rewriteValueMIPS_OpNeg64F_0(v)
   368  	case OpNeg8:
   369  		return rewriteValueMIPS_OpNeg8_0(v)
   370  	case OpNeq16:
   371  		return rewriteValueMIPS_OpNeq16_0(v)
   372  	case OpNeq32:
   373  		return rewriteValueMIPS_OpNeq32_0(v)
   374  	case OpNeq32F:
   375  		return rewriteValueMIPS_OpNeq32F_0(v)
   376  	case OpNeq64F:
   377  		return rewriteValueMIPS_OpNeq64F_0(v)
   378  	case OpNeq8:
   379  		return rewriteValueMIPS_OpNeq8_0(v)
   380  	case OpNeqB:
   381  		return rewriteValueMIPS_OpNeqB_0(v)
   382  	case OpNeqPtr:
   383  		return rewriteValueMIPS_OpNeqPtr_0(v)
   384  	case OpNilCheck:
   385  		return rewriteValueMIPS_OpNilCheck_0(v)
   386  	case OpNot:
   387  		return rewriteValueMIPS_OpNot_0(v)
   388  	case OpOffPtr:
   389  		return rewriteValueMIPS_OpOffPtr_0(v)
   390  	case OpOr16:
   391  		return rewriteValueMIPS_OpOr16_0(v)
   392  	case OpOr32:
   393  		return rewriteValueMIPS_OpOr32_0(v)
   394  	case OpOr8:
   395  		return rewriteValueMIPS_OpOr8_0(v)
   396  	case OpOrB:
   397  		return rewriteValueMIPS_OpOrB_0(v)
   398  	case OpRound32F:
   399  		return rewriteValueMIPS_OpRound32F_0(v)
   400  	case OpRound64F:
   401  		return rewriteValueMIPS_OpRound64F_0(v)
   402  	case OpRsh16Ux16:
   403  		return rewriteValueMIPS_OpRsh16Ux16_0(v)
   404  	case OpRsh16Ux32:
   405  		return rewriteValueMIPS_OpRsh16Ux32_0(v)
   406  	case OpRsh16Ux64:
   407  		return rewriteValueMIPS_OpRsh16Ux64_0(v)
   408  	case OpRsh16Ux8:
   409  		return rewriteValueMIPS_OpRsh16Ux8_0(v)
   410  	case OpRsh16x16:
   411  		return rewriteValueMIPS_OpRsh16x16_0(v)
   412  	case OpRsh16x32:
   413  		return rewriteValueMIPS_OpRsh16x32_0(v)
   414  	case OpRsh16x64:
   415  		return rewriteValueMIPS_OpRsh16x64_0(v)
   416  	case OpRsh16x8:
   417  		return rewriteValueMIPS_OpRsh16x8_0(v)
   418  	case OpRsh32Ux16:
   419  		return rewriteValueMIPS_OpRsh32Ux16_0(v)
   420  	case OpRsh32Ux32:
   421  		return rewriteValueMIPS_OpRsh32Ux32_0(v)
   422  	case OpRsh32Ux64:
   423  		return rewriteValueMIPS_OpRsh32Ux64_0(v)
   424  	case OpRsh32Ux8:
   425  		return rewriteValueMIPS_OpRsh32Ux8_0(v)
   426  	case OpRsh32x16:
   427  		return rewriteValueMIPS_OpRsh32x16_0(v)
   428  	case OpRsh32x32:
   429  		return rewriteValueMIPS_OpRsh32x32_0(v)
   430  	case OpRsh32x64:
   431  		return rewriteValueMIPS_OpRsh32x64_0(v)
   432  	case OpRsh32x8:
   433  		return rewriteValueMIPS_OpRsh32x8_0(v)
   434  	case OpRsh8Ux16:
   435  		return rewriteValueMIPS_OpRsh8Ux16_0(v)
   436  	case OpRsh8Ux32:
   437  		return rewriteValueMIPS_OpRsh8Ux32_0(v)
   438  	case OpRsh8Ux64:
   439  		return rewriteValueMIPS_OpRsh8Ux64_0(v)
   440  	case OpRsh8Ux8:
   441  		return rewriteValueMIPS_OpRsh8Ux8_0(v)
   442  	case OpRsh8x16:
   443  		return rewriteValueMIPS_OpRsh8x16_0(v)
   444  	case OpRsh8x32:
   445  		return rewriteValueMIPS_OpRsh8x32_0(v)
   446  	case OpRsh8x64:
   447  		return rewriteValueMIPS_OpRsh8x64_0(v)
   448  	case OpRsh8x8:
   449  		return rewriteValueMIPS_OpRsh8x8_0(v)
   450  	case OpSelect0:
   451  		return rewriteValueMIPS_OpSelect0_0(v) || rewriteValueMIPS_OpSelect0_10(v)
   452  	case OpSelect1:
   453  		return rewriteValueMIPS_OpSelect1_0(v) || rewriteValueMIPS_OpSelect1_10(v)
   454  	case OpSignExt16to32:
   455  		return rewriteValueMIPS_OpSignExt16to32_0(v)
   456  	case OpSignExt8to16:
   457  		return rewriteValueMIPS_OpSignExt8to16_0(v)
   458  	case OpSignExt8to32:
   459  		return rewriteValueMIPS_OpSignExt8to32_0(v)
   460  	case OpSignmask:
   461  		return rewriteValueMIPS_OpSignmask_0(v)
   462  	case OpSlicemask:
   463  		return rewriteValueMIPS_OpSlicemask_0(v)
   464  	case OpSqrt:
   465  		return rewriteValueMIPS_OpSqrt_0(v)
   466  	case OpStaticCall:
   467  		return rewriteValueMIPS_OpStaticCall_0(v)
   468  	case OpStore:
   469  		return rewriteValueMIPS_OpStore_0(v)
   470  	case OpSub16:
   471  		return rewriteValueMIPS_OpSub16_0(v)
   472  	case OpSub32:
   473  		return rewriteValueMIPS_OpSub32_0(v)
   474  	case OpSub32F:
   475  		return rewriteValueMIPS_OpSub32F_0(v)
   476  	case OpSub32withcarry:
   477  		return rewriteValueMIPS_OpSub32withcarry_0(v)
   478  	case OpSub64F:
   479  		return rewriteValueMIPS_OpSub64F_0(v)
   480  	case OpSub8:
   481  		return rewriteValueMIPS_OpSub8_0(v)
   482  	case OpSubPtr:
   483  		return rewriteValueMIPS_OpSubPtr_0(v)
   484  	case OpTrunc16to8:
   485  		return rewriteValueMIPS_OpTrunc16to8_0(v)
   486  	case OpTrunc32to16:
   487  		return rewriteValueMIPS_OpTrunc32to16_0(v)
   488  	case OpTrunc32to8:
   489  		return rewriteValueMIPS_OpTrunc32to8_0(v)
   490  	case OpXor16:
   491  		return rewriteValueMIPS_OpXor16_0(v)
   492  	case OpXor32:
   493  		return rewriteValueMIPS_OpXor32_0(v)
   494  	case OpXor8:
   495  		return rewriteValueMIPS_OpXor8_0(v)
   496  	case OpZero:
   497  		return rewriteValueMIPS_OpZero_0(v) || rewriteValueMIPS_OpZero_10(v)
   498  	case OpZeroExt16to32:
   499  		return rewriteValueMIPS_OpZeroExt16to32_0(v)
   500  	case OpZeroExt8to16:
   501  		return rewriteValueMIPS_OpZeroExt8to16_0(v)
   502  	case OpZeroExt8to32:
   503  		return rewriteValueMIPS_OpZeroExt8to32_0(v)
   504  	case OpZeromask:
   505  		return rewriteValueMIPS_OpZeromask_0(v)
   506  	}
   507  	return false
   508  }
   509  func rewriteValueMIPS_OpAdd16_0(v *Value) bool {
   510  	// match: (Add16 x y)
   511  	// cond:
   512  	// result: (ADD x y)
   513  	for {
   514  		x := v.Args[0]
   515  		y := v.Args[1]
   516  		v.reset(OpMIPSADD)
   517  		v.AddArg(x)
   518  		v.AddArg(y)
   519  		return true
   520  	}
   521  }
   522  func rewriteValueMIPS_OpAdd32_0(v *Value) bool {
   523  	// match: (Add32 x y)
   524  	// cond:
   525  	// result: (ADD x y)
   526  	for {
   527  		x := v.Args[0]
   528  		y := v.Args[1]
   529  		v.reset(OpMIPSADD)
   530  		v.AddArg(x)
   531  		v.AddArg(y)
   532  		return true
   533  	}
   534  }
   535  func rewriteValueMIPS_OpAdd32F_0(v *Value) bool {
   536  	// match: (Add32F x y)
   537  	// cond:
   538  	// result: (ADDF x y)
   539  	for {
   540  		x := v.Args[0]
   541  		y := v.Args[1]
   542  		v.reset(OpMIPSADDF)
   543  		v.AddArg(x)
   544  		v.AddArg(y)
   545  		return true
   546  	}
   547  }
   548  func rewriteValueMIPS_OpAdd32withcarry_0(v *Value) bool {
   549  	b := v.Block
   550  	_ = b
   551  	// match: (Add32withcarry <t> x y c)
   552  	// cond:
   553  	// result: (ADD c (ADD <t> x y))
   554  	for {
   555  		t := v.Type
   556  		x := v.Args[0]
   557  		y := v.Args[1]
   558  		c := v.Args[2]
   559  		v.reset(OpMIPSADD)
   560  		v.AddArg(c)
   561  		v0 := b.NewValue0(v.Pos, OpMIPSADD, t)
   562  		v0.AddArg(x)
   563  		v0.AddArg(y)
   564  		v.AddArg(v0)
   565  		return true
   566  	}
   567  }
   568  func rewriteValueMIPS_OpAdd64F_0(v *Value) bool {
   569  	// match: (Add64F x y)
   570  	// cond:
   571  	// result: (ADDD x y)
   572  	for {
   573  		x := v.Args[0]
   574  		y := v.Args[1]
   575  		v.reset(OpMIPSADDD)
   576  		v.AddArg(x)
   577  		v.AddArg(y)
   578  		return true
   579  	}
   580  }
   581  func rewriteValueMIPS_OpAdd8_0(v *Value) bool {
   582  	// match: (Add8 x y)
   583  	// cond:
   584  	// result: (ADD x y)
   585  	for {
   586  		x := v.Args[0]
   587  		y := v.Args[1]
   588  		v.reset(OpMIPSADD)
   589  		v.AddArg(x)
   590  		v.AddArg(y)
   591  		return true
   592  	}
   593  }
   594  func rewriteValueMIPS_OpAddPtr_0(v *Value) bool {
   595  	// match: (AddPtr x y)
   596  	// cond:
   597  	// result: (ADD x y)
   598  	for {
   599  		x := v.Args[0]
   600  		y := v.Args[1]
   601  		v.reset(OpMIPSADD)
   602  		v.AddArg(x)
   603  		v.AddArg(y)
   604  		return true
   605  	}
   606  }
   607  func rewriteValueMIPS_OpAddr_0(v *Value) bool {
   608  	// match: (Addr {sym} base)
   609  	// cond:
   610  	// result: (MOVWaddr {sym} base)
   611  	for {
   612  		sym := v.Aux
   613  		base := v.Args[0]
   614  		v.reset(OpMIPSMOVWaddr)
   615  		v.Aux = sym
   616  		v.AddArg(base)
   617  		return true
   618  	}
   619  }
   620  func rewriteValueMIPS_OpAnd16_0(v *Value) bool {
   621  	// match: (And16 x y)
   622  	// cond:
   623  	// result: (AND x y)
   624  	for {
   625  		x := v.Args[0]
   626  		y := v.Args[1]
   627  		v.reset(OpMIPSAND)
   628  		v.AddArg(x)
   629  		v.AddArg(y)
   630  		return true
   631  	}
   632  }
   633  func rewriteValueMIPS_OpAnd32_0(v *Value) bool {
   634  	// match: (And32 x y)
   635  	// cond:
   636  	// result: (AND x y)
   637  	for {
   638  		x := v.Args[0]
   639  		y := v.Args[1]
   640  		v.reset(OpMIPSAND)
   641  		v.AddArg(x)
   642  		v.AddArg(y)
   643  		return true
   644  	}
   645  }
   646  func rewriteValueMIPS_OpAnd8_0(v *Value) bool {
   647  	// match: (And8 x y)
   648  	// cond:
   649  	// result: (AND x y)
   650  	for {
   651  		x := v.Args[0]
   652  		y := v.Args[1]
   653  		v.reset(OpMIPSAND)
   654  		v.AddArg(x)
   655  		v.AddArg(y)
   656  		return true
   657  	}
   658  }
   659  func rewriteValueMIPS_OpAndB_0(v *Value) bool {
   660  	// match: (AndB x y)
   661  	// cond:
   662  	// result: (AND x y)
   663  	for {
   664  		x := v.Args[0]
   665  		y := v.Args[1]
   666  		v.reset(OpMIPSAND)
   667  		v.AddArg(x)
   668  		v.AddArg(y)
   669  		return true
   670  	}
   671  }
   672  func rewriteValueMIPS_OpAtomicAdd32_0(v *Value) bool {
   673  	// match: (AtomicAdd32 ptr val mem)
   674  	// cond:
   675  	// result: (LoweredAtomicAdd ptr val mem)
   676  	for {
   677  		ptr := v.Args[0]
   678  		val := v.Args[1]
   679  		mem := v.Args[2]
   680  		v.reset(OpMIPSLoweredAtomicAdd)
   681  		v.AddArg(ptr)
   682  		v.AddArg(val)
   683  		v.AddArg(mem)
   684  		return true
   685  	}
   686  }
   687  func rewriteValueMIPS_OpAtomicAnd8_0(v *Value) bool {
   688  	b := v.Block
   689  	_ = b
   690  	config := b.Func.Config
   691  	_ = config
   692  	types := &b.Func.Config.Types
   693  	_ = types
   694  	// match: (AtomicAnd8 ptr val mem)
   695  	// cond: !config.BigEndian
   696  	// result: (LoweredAtomicAnd (AND <types.UInt32Ptr> (MOVWconst [^3]) ptr) 		(OR <types.UInt32> (SLL <types.UInt32> (ZeroExt8to32 val) 			(SLLconst <types.UInt32> [3] 				(ANDconst  <types.UInt32> [3] ptr))) 		(NORconst [0] <types.UInt32> (SLL <types.UInt32> 			(MOVWconst [0xff]) (SLLconst <types.UInt32> [3] 				(ANDconst <types.UInt32> [3] ptr))))) mem)
   697  	for {
   698  		ptr := v.Args[0]
   699  		val := v.Args[1]
   700  		mem := v.Args[2]
   701  		if !(!config.BigEndian) {
   702  			break
   703  		}
   704  		v.reset(OpMIPSLoweredAtomicAnd)
   705  		v0 := b.NewValue0(v.Pos, OpMIPSAND, types.UInt32Ptr)
   706  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
   707  		v1.AuxInt = ^3
   708  		v0.AddArg(v1)
   709  		v0.AddArg(ptr)
   710  		v.AddArg(v0)
   711  		v2 := b.NewValue0(v.Pos, OpMIPSOR, types.UInt32)
   712  		v3 := b.NewValue0(v.Pos, OpMIPSSLL, types.UInt32)
   713  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
   714  		v4.AddArg(val)
   715  		v3.AddArg(v4)
   716  		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
   717  		v5.AuxInt = 3
   718  		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, types.UInt32)
   719  		v6.AuxInt = 3
   720  		v6.AddArg(ptr)
   721  		v5.AddArg(v6)
   722  		v3.AddArg(v5)
   723  		v2.AddArg(v3)
   724  		v7 := b.NewValue0(v.Pos, OpMIPSNORconst, types.UInt32)
   725  		v7.AuxInt = 0
   726  		v8 := b.NewValue0(v.Pos, OpMIPSSLL, types.UInt32)
   727  		v9 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
   728  		v9.AuxInt = 0xff
   729  		v8.AddArg(v9)
   730  		v10 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
   731  		v10.AuxInt = 3
   732  		v11 := b.NewValue0(v.Pos, OpMIPSANDconst, types.UInt32)
   733  		v11.AuxInt = 3
   734  		v11.AddArg(ptr)
   735  		v10.AddArg(v11)
   736  		v8.AddArg(v10)
   737  		v7.AddArg(v8)
   738  		v2.AddArg(v7)
   739  		v.AddArg(v2)
   740  		v.AddArg(mem)
   741  		return true
   742  	}
   743  	// match: (AtomicAnd8 ptr val mem)
   744  	// cond: config.BigEndian
   745  	// result: (LoweredAtomicAnd (AND <types.UInt32Ptr> (MOVWconst [^3]) ptr) 		(OR <types.UInt32> (SLL <types.UInt32> (ZeroExt8to32 val) 			(SLLconst <types.UInt32> [3] 				(ANDconst  <types.UInt32> [3] 					(XORconst <types.UInt32> [3] ptr)))) 		(NORconst [0] <types.UInt32> (SLL <types.UInt32> 			(MOVWconst [0xff]) (SLLconst <types.UInt32> [3] 				(ANDconst <types.UInt32> [3] 					(XORconst <types.UInt32> [3] ptr)))))) mem)
   746  	for {
   747  		ptr := v.Args[0]
   748  		val := v.Args[1]
   749  		mem := v.Args[2]
   750  		if !(config.BigEndian) {
   751  			break
   752  		}
   753  		v.reset(OpMIPSLoweredAtomicAnd)
   754  		v0 := b.NewValue0(v.Pos, OpMIPSAND, types.UInt32Ptr)
   755  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
   756  		v1.AuxInt = ^3
   757  		v0.AddArg(v1)
   758  		v0.AddArg(ptr)
   759  		v.AddArg(v0)
   760  		v2 := b.NewValue0(v.Pos, OpMIPSOR, types.UInt32)
   761  		v3 := b.NewValue0(v.Pos, OpMIPSSLL, types.UInt32)
   762  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
   763  		v4.AddArg(val)
   764  		v3.AddArg(v4)
   765  		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
   766  		v5.AuxInt = 3
   767  		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, types.UInt32)
   768  		v6.AuxInt = 3
   769  		v7 := b.NewValue0(v.Pos, OpMIPSXORconst, types.UInt32)
   770  		v7.AuxInt = 3
   771  		v7.AddArg(ptr)
   772  		v6.AddArg(v7)
   773  		v5.AddArg(v6)
   774  		v3.AddArg(v5)
   775  		v2.AddArg(v3)
   776  		v8 := b.NewValue0(v.Pos, OpMIPSNORconst, types.UInt32)
   777  		v8.AuxInt = 0
   778  		v9 := b.NewValue0(v.Pos, OpMIPSSLL, types.UInt32)
   779  		v10 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
   780  		v10.AuxInt = 0xff
   781  		v9.AddArg(v10)
   782  		v11 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
   783  		v11.AuxInt = 3
   784  		v12 := b.NewValue0(v.Pos, OpMIPSANDconst, types.UInt32)
   785  		v12.AuxInt = 3
   786  		v13 := b.NewValue0(v.Pos, OpMIPSXORconst, types.UInt32)
   787  		v13.AuxInt = 3
   788  		v13.AddArg(ptr)
   789  		v12.AddArg(v13)
   790  		v11.AddArg(v12)
   791  		v9.AddArg(v11)
   792  		v8.AddArg(v9)
   793  		v2.AddArg(v8)
   794  		v.AddArg(v2)
   795  		v.AddArg(mem)
   796  		return true
   797  	}
   798  	return false
   799  }
   800  func rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v *Value) bool {
   801  	// match: (AtomicCompareAndSwap32 ptr old new_ mem)
   802  	// cond:
   803  	// result: (LoweredAtomicCas ptr old new_ mem)
   804  	for {
   805  		ptr := v.Args[0]
   806  		old := v.Args[1]
   807  		new_ := v.Args[2]
   808  		mem := v.Args[3]
   809  		v.reset(OpMIPSLoweredAtomicCas)
   810  		v.AddArg(ptr)
   811  		v.AddArg(old)
   812  		v.AddArg(new_)
   813  		v.AddArg(mem)
   814  		return true
   815  	}
   816  }
   817  func rewriteValueMIPS_OpAtomicExchange32_0(v *Value) bool {
   818  	// match: (AtomicExchange32 ptr val mem)
   819  	// cond:
   820  	// result: (LoweredAtomicExchange ptr val mem)
   821  	for {
   822  		ptr := v.Args[0]
   823  		val := v.Args[1]
   824  		mem := v.Args[2]
   825  		v.reset(OpMIPSLoweredAtomicExchange)
   826  		v.AddArg(ptr)
   827  		v.AddArg(val)
   828  		v.AddArg(mem)
   829  		return true
   830  	}
   831  }
   832  func rewriteValueMIPS_OpAtomicLoad32_0(v *Value) bool {
   833  	// match: (AtomicLoad32 ptr mem)
   834  	// cond:
   835  	// result: (LoweredAtomicLoad ptr mem)
   836  	for {
   837  		ptr := v.Args[0]
   838  		mem := v.Args[1]
   839  		v.reset(OpMIPSLoweredAtomicLoad)
   840  		v.AddArg(ptr)
   841  		v.AddArg(mem)
   842  		return true
   843  	}
   844  }
   845  func rewriteValueMIPS_OpAtomicLoadPtr_0(v *Value) bool {
   846  	// match: (AtomicLoadPtr ptr mem)
   847  	// cond:
   848  	// result: (LoweredAtomicLoad  ptr mem)
   849  	for {
   850  		ptr := v.Args[0]
   851  		mem := v.Args[1]
   852  		v.reset(OpMIPSLoweredAtomicLoad)
   853  		v.AddArg(ptr)
   854  		v.AddArg(mem)
   855  		return true
   856  	}
   857  }
   858  func rewriteValueMIPS_OpAtomicOr8_0(v *Value) bool {
   859  	b := v.Block
   860  	_ = b
   861  	config := b.Func.Config
   862  	_ = config
   863  	types := &b.Func.Config.Types
   864  	_ = types
   865  	// match: (AtomicOr8 ptr val mem)
   866  	// cond: !config.BigEndian
   867  	// result: (LoweredAtomicOr (AND <types.UInt32Ptr> (MOVWconst [^3]) ptr) 		(SLL <types.UInt32> (ZeroExt8to32 val) 			(SLLconst <types.UInt32> [3] 				(ANDconst <types.UInt32> [3] ptr))) mem)
   868  	for {
   869  		ptr := v.Args[0]
   870  		val := v.Args[1]
   871  		mem := v.Args[2]
   872  		if !(!config.BigEndian) {
   873  			break
   874  		}
   875  		v.reset(OpMIPSLoweredAtomicOr)
   876  		v0 := b.NewValue0(v.Pos, OpMIPSAND, types.UInt32Ptr)
   877  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
   878  		v1.AuxInt = ^3
   879  		v0.AddArg(v1)
   880  		v0.AddArg(ptr)
   881  		v.AddArg(v0)
   882  		v2 := b.NewValue0(v.Pos, OpMIPSSLL, types.UInt32)
   883  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
   884  		v3.AddArg(val)
   885  		v2.AddArg(v3)
   886  		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
   887  		v4.AuxInt = 3
   888  		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, types.UInt32)
   889  		v5.AuxInt = 3
   890  		v5.AddArg(ptr)
   891  		v4.AddArg(v5)
   892  		v2.AddArg(v4)
   893  		v.AddArg(v2)
   894  		v.AddArg(mem)
   895  		return true
   896  	}
   897  	// match: (AtomicOr8 ptr val mem)
   898  	// cond: config.BigEndian
   899  	// result: (LoweredAtomicOr (AND <types.UInt32Ptr> (MOVWconst [^3]) ptr) 		(SLL <types.UInt32> (ZeroExt8to32 val) 			(SLLconst <types.UInt32> [3] 				(ANDconst <types.UInt32> [3] 					(XORconst <types.UInt32> [3] ptr)))) mem)
   900  	for {
   901  		ptr := v.Args[0]
   902  		val := v.Args[1]
   903  		mem := v.Args[2]
   904  		if !(config.BigEndian) {
   905  			break
   906  		}
   907  		v.reset(OpMIPSLoweredAtomicOr)
   908  		v0 := b.NewValue0(v.Pos, OpMIPSAND, types.UInt32Ptr)
   909  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
   910  		v1.AuxInt = ^3
   911  		v0.AddArg(v1)
   912  		v0.AddArg(ptr)
   913  		v.AddArg(v0)
   914  		v2 := b.NewValue0(v.Pos, OpMIPSSLL, types.UInt32)
   915  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
   916  		v3.AddArg(val)
   917  		v2.AddArg(v3)
   918  		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
   919  		v4.AuxInt = 3
   920  		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, types.UInt32)
   921  		v5.AuxInt = 3
   922  		v6 := b.NewValue0(v.Pos, OpMIPSXORconst, types.UInt32)
   923  		v6.AuxInt = 3
   924  		v6.AddArg(ptr)
   925  		v5.AddArg(v6)
   926  		v4.AddArg(v5)
   927  		v2.AddArg(v4)
   928  		v.AddArg(v2)
   929  		v.AddArg(mem)
   930  		return true
   931  	}
   932  	return false
   933  }
   934  func rewriteValueMIPS_OpAtomicStore32_0(v *Value) bool {
   935  	// match: (AtomicStore32 ptr val mem)
   936  	// cond:
   937  	// result: (LoweredAtomicStore ptr val mem)
   938  	for {
   939  		ptr := v.Args[0]
   940  		val := v.Args[1]
   941  		mem := v.Args[2]
   942  		v.reset(OpMIPSLoweredAtomicStore)
   943  		v.AddArg(ptr)
   944  		v.AddArg(val)
   945  		v.AddArg(mem)
   946  		return true
   947  	}
   948  }
   949  func rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v *Value) bool {
   950  	// match: (AtomicStorePtrNoWB ptr val mem)
   951  	// cond:
   952  	// result: (LoweredAtomicStore  ptr val mem)
   953  	for {
   954  		ptr := v.Args[0]
   955  		val := v.Args[1]
   956  		mem := v.Args[2]
   957  		v.reset(OpMIPSLoweredAtomicStore)
   958  		v.AddArg(ptr)
   959  		v.AddArg(val)
   960  		v.AddArg(mem)
   961  		return true
   962  	}
   963  }
   964  func rewriteValueMIPS_OpAvg32u_0(v *Value) bool {
   965  	b := v.Block
   966  	_ = b
   967  	// match: (Avg32u <t> x y)
   968  	// cond:
   969  	// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
   970  	for {
   971  		t := v.Type
   972  		x := v.Args[0]
   973  		y := v.Args[1]
   974  		v.reset(OpMIPSADD)
   975  		v0 := b.NewValue0(v.Pos, OpMIPSSRLconst, t)
   976  		v0.AuxInt = 1
   977  		v1 := b.NewValue0(v.Pos, OpMIPSSUB, t)
   978  		v1.AddArg(x)
   979  		v1.AddArg(y)
   980  		v0.AddArg(v1)
   981  		v.AddArg(v0)
   982  		v.AddArg(y)
   983  		return true
   984  	}
   985  }
   986  func rewriteValueMIPS_OpBitLen32_0(v *Value) bool {
   987  	b := v.Block
   988  	_ = b
   989  	types := &b.Func.Config.Types
   990  	_ = types
   991  	// match: (BitLen32 <t> x)
   992  	// cond:
   993  	// result: (SUB (MOVWconst [32]) (CLZ <t> x))
   994  	for {
   995  		t := v.Type
   996  		x := v.Args[0]
   997  		v.reset(OpMIPSSUB)
   998  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
   999  		v0.AuxInt = 32
  1000  		v.AddArg(v0)
  1001  		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1002  		v1.AddArg(x)
  1003  		v.AddArg(v1)
  1004  		return true
  1005  	}
  1006  }
  1007  func rewriteValueMIPS_OpClosureCall_0(v *Value) bool {
  1008  	// match: (ClosureCall [argwid] entry closure mem)
  1009  	// cond:
  1010  	// result: (CALLclosure [argwid] entry closure mem)
  1011  	for {
  1012  		argwid := v.AuxInt
  1013  		entry := v.Args[0]
  1014  		closure := v.Args[1]
  1015  		mem := v.Args[2]
  1016  		v.reset(OpMIPSCALLclosure)
  1017  		v.AuxInt = argwid
  1018  		v.AddArg(entry)
  1019  		v.AddArg(closure)
  1020  		v.AddArg(mem)
  1021  		return true
  1022  	}
  1023  }
  1024  func rewriteValueMIPS_OpCom16_0(v *Value) bool {
  1025  	// match: (Com16 x)
  1026  	// cond:
  1027  	// result: (NORconst [0] x)
  1028  	for {
  1029  		x := v.Args[0]
  1030  		v.reset(OpMIPSNORconst)
  1031  		v.AuxInt = 0
  1032  		v.AddArg(x)
  1033  		return true
  1034  	}
  1035  }
  1036  func rewriteValueMIPS_OpCom32_0(v *Value) bool {
  1037  	// match: (Com32 x)
  1038  	// cond:
  1039  	// result: (NORconst [0] x)
  1040  	for {
  1041  		x := v.Args[0]
  1042  		v.reset(OpMIPSNORconst)
  1043  		v.AuxInt = 0
  1044  		v.AddArg(x)
  1045  		return true
  1046  	}
  1047  }
  1048  func rewriteValueMIPS_OpCom8_0(v *Value) bool {
  1049  	// match: (Com8 x)
  1050  	// cond:
  1051  	// result: (NORconst [0] x)
  1052  	for {
  1053  		x := v.Args[0]
  1054  		v.reset(OpMIPSNORconst)
  1055  		v.AuxInt = 0
  1056  		v.AddArg(x)
  1057  		return true
  1058  	}
  1059  }
  1060  func rewriteValueMIPS_OpConst16_0(v *Value) bool {
  1061  	// match: (Const16 [val])
  1062  	// cond:
  1063  	// result: (MOVWconst [val])
  1064  	for {
  1065  		val := v.AuxInt
  1066  		v.reset(OpMIPSMOVWconst)
  1067  		v.AuxInt = val
  1068  		return true
  1069  	}
  1070  }
  1071  func rewriteValueMIPS_OpConst32_0(v *Value) bool {
  1072  	// match: (Const32 [val])
  1073  	// cond:
  1074  	// result: (MOVWconst [val])
  1075  	for {
  1076  		val := v.AuxInt
  1077  		v.reset(OpMIPSMOVWconst)
  1078  		v.AuxInt = val
  1079  		return true
  1080  	}
  1081  }
  1082  func rewriteValueMIPS_OpConst32F_0(v *Value) bool {
  1083  	// match: (Const32F [val])
  1084  	// cond:
  1085  	// result: (MOVFconst [val])
  1086  	for {
  1087  		val := v.AuxInt
  1088  		v.reset(OpMIPSMOVFconst)
  1089  		v.AuxInt = val
  1090  		return true
  1091  	}
  1092  }
  1093  func rewriteValueMIPS_OpConst64F_0(v *Value) bool {
  1094  	// match: (Const64F [val])
  1095  	// cond:
  1096  	// result: (MOVDconst [val])
  1097  	for {
  1098  		val := v.AuxInt
  1099  		v.reset(OpMIPSMOVDconst)
  1100  		v.AuxInt = val
  1101  		return true
  1102  	}
  1103  }
  1104  func rewriteValueMIPS_OpConst8_0(v *Value) bool {
  1105  	// match: (Const8 [val])
  1106  	// cond:
  1107  	// result: (MOVWconst [val])
  1108  	for {
  1109  		val := v.AuxInt
  1110  		v.reset(OpMIPSMOVWconst)
  1111  		v.AuxInt = val
  1112  		return true
  1113  	}
  1114  }
  1115  func rewriteValueMIPS_OpConstBool_0(v *Value) bool {
  1116  	// match: (ConstBool [b])
  1117  	// cond:
  1118  	// result: (MOVWconst [b])
  1119  	for {
  1120  		b := v.AuxInt
  1121  		v.reset(OpMIPSMOVWconst)
  1122  		v.AuxInt = b
  1123  		return true
  1124  	}
  1125  }
  1126  func rewriteValueMIPS_OpConstNil_0(v *Value) bool {
  1127  	// match: (ConstNil)
  1128  	// cond:
  1129  	// result: (MOVWconst [0])
  1130  	for {
  1131  		v.reset(OpMIPSMOVWconst)
  1132  		v.AuxInt = 0
  1133  		return true
  1134  	}
  1135  }
  1136  func rewriteValueMIPS_OpConvert_0(v *Value) bool {
  1137  	// match: (Convert x mem)
  1138  	// cond:
  1139  	// result: (MOVWconvert x mem)
  1140  	for {
  1141  		x := v.Args[0]
  1142  		mem := v.Args[1]
  1143  		v.reset(OpMIPSMOVWconvert)
  1144  		v.AddArg(x)
  1145  		v.AddArg(mem)
  1146  		return true
  1147  	}
  1148  }
  1149  func rewriteValueMIPS_OpCtz32_0(v *Value) bool {
  1150  	b := v.Block
  1151  	_ = b
  1152  	types := &b.Func.Config.Types
  1153  	_ = types
  1154  	// match: (Ctz32 <t> x)
  1155  	// cond:
  1156  	// result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x)))))
  1157  	for {
  1158  		t := v.Type
  1159  		x := v.Args[0]
  1160  		v.reset(OpMIPSSUB)
  1161  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  1162  		v0.AuxInt = 32
  1163  		v.AddArg(v0)
  1164  		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1165  		v2 := b.NewValue0(v.Pos, OpMIPSSUBconst, t)
  1166  		v2.AuxInt = 1
  1167  		v3 := b.NewValue0(v.Pos, OpMIPSAND, t)
  1168  		v3.AddArg(x)
  1169  		v4 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  1170  		v4.AddArg(x)
  1171  		v3.AddArg(v4)
  1172  		v2.AddArg(v3)
  1173  		v1.AddArg(v2)
  1174  		v.AddArg(v1)
  1175  		return true
  1176  	}
  1177  }
  1178  func rewriteValueMIPS_OpCvt32Fto32_0(v *Value) bool {
  1179  	// match: (Cvt32Fto32 x)
  1180  	// cond:
  1181  	// result: (TRUNCFW x)
  1182  	for {
  1183  		x := v.Args[0]
  1184  		v.reset(OpMIPSTRUNCFW)
  1185  		v.AddArg(x)
  1186  		return true
  1187  	}
  1188  }
  1189  func rewriteValueMIPS_OpCvt32Fto64F_0(v *Value) bool {
  1190  	// match: (Cvt32Fto64F x)
  1191  	// cond:
  1192  	// result: (MOVFD x)
  1193  	for {
  1194  		x := v.Args[0]
  1195  		v.reset(OpMIPSMOVFD)
  1196  		v.AddArg(x)
  1197  		return true
  1198  	}
  1199  }
  1200  func rewriteValueMIPS_OpCvt32to32F_0(v *Value) bool {
  1201  	// match: (Cvt32to32F x)
  1202  	// cond:
  1203  	// result: (MOVWF x)
  1204  	for {
  1205  		x := v.Args[0]
  1206  		v.reset(OpMIPSMOVWF)
  1207  		v.AddArg(x)
  1208  		return true
  1209  	}
  1210  }
  1211  func rewriteValueMIPS_OpCvt32to64F_0(v *Value) bool {
  1212  	// match: (Cvt32to64F x)
  1213  	// cond:
  1214  	// result: (MOVWD x)
  1215  	for {
  1216  		x := v.Args[0]
  1217  		v.reset(OpMIPSMOVWD)
  1218  		v.AddArg(x)
  1219  		return true
  1220  	}
  1221  }
  1222  func rewriteValueMIPS_OpCvt64Fto32_0(v *Value) bool {
  1223  	// match: (Cvt64Fto32 x)
  1224  	// cond:
  1225  	// result: (TRUNCDW x)
  1226  	for {
  1227  		x := v.Args[0]
  1228  		v.reset(OpMIPSTRUNCDW)
  1229  		v.AddArg(x)
  1230  		return true
  1231  	}
  1232  }
  1233  func rewriteValueMIPS_OpCvt64Fto32F_0(v *Value) bool {
  1234  	// match: (Cvt64Fto32F x)
  1235  	// cond:
  1236  	// result: (MOVDF x)
  1237  	for {
  1238  		x := v.Args[0]
  1239  		v.reset(OpMIPSMOVDF)
  1240  		v.AddArg(x)
  1241  		return true
  1242  	}
  1243  }
  1244  func rewriteValueMIPS_OpDiv16_0(v *Value) bool {
  1245  	b := v.Block
  1246  	_ = b
  1247  	types := &b.Func.Config.Types
  1248  	_ = types
  1249  	// match: (Div16 x y)
  1250  	// cond:
  1251  	// result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  1252  	for {
  1253  		x := v.Args[0]
  1254  		y := v.Args[1]
  1255  		v.reset(OpSelect1)
  1256  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(types.Int32, types.Int32))
  1257  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  1258  		v1.AddArg(x)
  1259  		v0.AddArg(v1)
  1260  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  1261  		v2.AddArg(y)
  1262  		v0.AddArg(v2)
  1263  		v.AddArg(v0)
  1264  		return true
  1265  	}
  1266  }
  1267  func rewriteValueMIPS_OpDiv16u_0(v *Value) bool {
  1268  	b := v.Block
  1269  	_ = b
  1270  	types := &b.Func.Config.Types
  1271  	_ = types
  1272  	// match: (Div16u x y)
  1273  	// cond:
  1274  	// result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1275  	for {
  1276  		x := v.Args[0]
  1277  		y := v.Args[1]
  1278  		v.reset(OpSelect1)
  1279  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(types.UInt32, types.UInt32))
  1280  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  1281  		v1.AddArg(x)
  1282  		v0.AddArg(v1)
  1283  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  1284  		v2.AddArg(y)
  1285  		v0.AddArg(v2)
  1286  		v.AddArg(v0)
  1287  		return true
  1288  	}
  1289  }
  1290  func rewriteValueMIPS_OpDiv32_0(v *Value) bool {
  1291  	b := v.Block
  1292  	_ = b
  1293  	types := &b.Func.Config.Types
  1294  	_ = types
  1295  	// match: (Div32 x y)
  1296  	// cond:
  1297  	// result: (Select1 (DIV x y))
  1298  	for {
  1299  		x := v.Args[0]
  1300  		y := v.Args[1]
  1301  		v.reset(OpSelect1)
  1302  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(types.Int32, types.Int32))
  1303  		v0.AddArg(x)
  1304  		v0.AddArg(y)
  1305  		v.AddArg(v0)
  1306  		return true
  1307  	}
  1308  }
  1309  func rewriteValueMIPS_OpDiv32F_0(v *Value) bool {
  1310  	// match: (Div32F x y)
  1311  	// cond:
  1312  	// result: (DIVF x y)
  1313  	for {
  1314  		x := v.Args[0]
  1315  		y := v.Args[1]
  1316  		v.reset(OpMIPSDIVF)
  1317  		v.AddArg(x)
  1318  		v.AddArg(y)
  1319  		return true
  1320  	}
  1321  }
  1322  func rewriteValueMIPS_OpDiv32u_0(v *Value) bool {
  1323  	b := v.Block
  1324  	_ = b
  1325  	types := &b.Func.Config.Types
  1326  	_ = types
  1327  	// match: (Div32u x y)
  1328  	// cond:
  1329  	// result: (Select1 (DIVU x y))
  1330  	for {
  1331  		x := v.Args[0]
  1332  		y := v.Args[1]
  1333  		v.reset(OpSelect1)
  1334  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(types.UInt32, types.UInt32))
  1335  		v0.AddArg(x)
  1336  		v0.AddArg(y)
  1337  		v.AddArg(v0)
  1338  		return true
  1339  	}
  1340  }
  1341  func rewriteValueMIPS_OpDiv64F_0(v *Value) bool {
  1342  	// match: (Div64F x y)
  1343  	// cond:
  1344  	// result: (DIVD x y)
  1345  	for {
  1346  		x := v.Args[0]
  1347  		y := v.Args[1]
  1348  		v.reset(OpMIPSDIVD)
  1349  		v.AddArg(x)
  1350  		v.AddArg(y)
  1351  		return true
  1352  	}
  1353  }
  1354  func rewriteValueMIPS_OpDiv8_0(v *Value) bool {
  1355  	b := v.Block
  1356  	_ = b
  1357  	types := &b.Func.Config.Types
  1358  	_ = types
  1359  	// match: (Div8 x y)
  1360  	// cond:
  1361  	// result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  1362  	for {
  1363  		x := v.Args[0]
  1364  		y := v.Args[1]
  1365  		v.reset(OpSelect1)
  1366  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(types.Int32, types.Int32))
  1367  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  1368  		v1.AddArg(x)
  1369  		v0.AddArg(v1)
  1370  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  1371  		v2.AddArg(y)
  1372  		v0.AddArg(v2)
  1373  		v.AddArg(v0)
  1374  		return true
  1375  	}
  1376  }
  1377  func rewriteValueMIPS_OpDiv8u_0(v *Value) bool {
  1378  	b := v.Block
  1379  	_ = b
  1380  	types := &b.Func.Config.Types
  1381  	_ = types
  1382  	// match: (Div8u x y)
  1383  	// cond:
  1384  	// result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1385  	for {
  1386  		x := v.Args[0]
  1387  		y := v.Args[1]
  1388  		v.reset(OpSelect1)
  1389  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(types.UInt32, types.UInt32))
  1390  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  1391  		v1.AddArg(x)
  1392  		v0.AddArg(v1)
  1393  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  1394  		v2.AddArg(y)
  1395  		v0.AddArg(v2)
  1396  		v.AddArg(v0)
  1397  		return true
  1398  	}
  1399  }
  1400  func rewriteValueMIPS_OpEq16_0(v *Value) bool {
  1401  	b := v.Block
  1402  	_ = b
  1403  	types := &b.Func.Config.Types
  1404  	_ = types
  1405  	// match: (Eq16 x y)
  1406  	// cond:
  1407  	// result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1408  	for {
  1409  		x := v.Args[0]
  1410  		y := v.Args[1]
  1411  		v.reset(OpMIPSSGTUconst)
  1412  		v.AuxInt = 1
  1413  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, types.UInt32)
  1414  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  1415  		v1.AddArg(x)
  1416  		v0.AddArg(v1)
  1417  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  1418  		v2.AddArg(y)
  1419  		v0.AddArg(v2)
  1420  		v.AddArg(v0)
  1421  		return true
  1422  	}
  1423  }
  1424  func rewriteValueMIPS_OpEq32_0(v *Value) bool {
  1425  	b := v.Block
  1426  	_ = b
  1427  	types := &b.Func.Config.Types
  1428  	_ = types
  1429  	// match: (Eq32 x y)
  1430  	// cond:
  1431  	// result: (SGTUconst [1] (XOR x y))
  1432  	for {
  1433  		x := v.Args[0]
  1434  		y := v.Args[1]
  1435  		v.reset(OpMIPSSGTUconst)
  1436  		v.AuxInt = 1
  1437  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, types.UInt32)
  1438  		v0.AddArg(x)
  1439  		v0.AddArg(y)
  1440  		v.AddArg(v0)
  1441  		return true
  1442  	}
  1443  }
  1444  func rewriteValueMIPS_OpEq32F_0(v *Value) bool {
  1445  	b := v.Block
  1446  	_ = b
  1447  	// match: (Eq32F x y)
  1448  	// cond:
  1449  	// result: (FPFlagTrue (CMPEQF x y))
  1450  	for {
  1451  		x := v.Args[0]
  1452  		y := v.Args[1]
  1453  		v.reset(OpMIPSFPFlagTrue)
  1454  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, TypeFlags)
  1455  		v0.AddArg(x)
  1456  		v0.AddArg(y)
  1457  		v.AddArg(v0)
  1458  		return true
  1459  	}
  1460  }
  1461  func rewriteValueMIPS_OpEq64F_0(v *Value) bool {
  1462  	b := v.Block
  1463  	_ = b
  1464  	// match: (Eq64F x y)
  1465  	// cond:
  1466  	// result: (FPFlagTrue (CMPEQD x y))
  1467  	for {
  1468  		x := v.Args[0]
  1469  		y := v.Args[1]
  1470  		v.reset(OpMIPSFPFlagTrue)
  1471  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, TypeFlags)
  1472  		v0.AddArg(x)
  1473  		v0.AddArg(y)
  1474  		v.AddArg(v0)
  1475  		return true
  1476  	}
  1477  }
  1478  func rewriteValueMIPS_OpEq8_0(v *Value) bool {
  1479  	b := v.Block
  1480  	_ = b
  1481  	types := &b.Func.Config.Types
  1482  	_ = types
  1483  	// match: (Eq8 x y)
  1484  	// cond:
  1485  	// result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1486  	for {
  1487  		x := v.Args[0]
  1488  		y := v.Args[1]
  1489  		v.reset(OpMIPSSGTUconst)
  1490  		v.AuxInt = 1
  1491  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, types.UInt32)
  1492  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  1493  		v1.AddArg(x)
  1494  		v0.AddArg(v1)
  1495  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  1496  		v2.AddArg(y)
  1497  		v0.AddArg(v2)
  1498  		v.AddArg(v0)
  1499  		return true
  1500  	}
  1501  }
  1502  func rewriteValueMIPS_OpEqB_0(v *Value) bool {
  1503  	b := v.Block
  1504  	_ = b
  1505  	types := &b.Func.Config.Types
  1506  	_ = types
  1507  	// match: (EqB x y)
  1508  	// cond:
  1509  	// result: (XORconst [1] (XOR <types.Bool> x y))
  1510  	for {
  1511  		x := v.Args[0]
  1512  		y := v.Args[1]
  1513  		v.reset(OpMIPSXORconst)
  1514  		v.AuxInt = 1
  1515  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, types.Bool)
  1516  		v0.AddArg(x)
  1517  		v0.AddArg(y)
  1518  		v.AddArg(v0)
  1519  		return true
  1520  	}
  1521  }
  1522  func rewriteValueMIPS_OpEqPtr_0(v *Value) bool {
  1523  	b := v.Block
  1524  	_ = b
  1525  	types := &b.Func.Config.Types
  1526  	_ = types
  1527  	// match: (EqPtr x y)
  1528  	// cond:
  1529  	// result: (SGTUconst [1] (XOR x y))
  1530  	for {
  1531  		x := v.Args[0]
  1532  		y := v.Args[1]
  1533  		v.reset(OpMIPSSGTUconst)
  1534  		v.AuxInt = 1
  1535  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, types.UInt32)
  1536  		v0.AddArg(x)
  1537  		v0.AddArg(y)
  1538  		v.AddArg(v0)
  1539  		return true
  1540  	}
  1541  }
  1542  func rewriteValueMIPS_OpGeq16_0(v *Value) bool {
  1543  	b := v.Block
  1544  	_ = b
  1545  	types := &b.Func.Config.Types
  1546  	_ = types
  1547  	// match: (Geq16 x y)
  1548  	// cond:
  1549  	// result: (XORconst [1] (SGT (SignExt16to32 y) (SignExt16to32 x)))
  1550  	for {
  1551  		x := v.Args[0]
  1552  		y := v.Args[1]
  1553  		v.reset(OpMIPSXORconst)
  1554  		v.AuxInt = 1
  1555  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, types.Bool)
  1556  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  1557  		v1.AddArg(y)
  1558  		v0.AddArg(v1)
  1559  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  1560  		v2.AddArg(x)
  1561  		v0.AddArg(v2)
  1562  		v.AddArg(v0)
  1563  		return true
  1564  	}
  1565  }
  1566  func rewriteValueMIPS_OpGeq16U_0(v *Value) bool {
  1567  	b := v.Block
  1568  	_ = b
  1569  	types := &b.Func.Config.Types
  1570  	_ = types
  1571  	// match: (Geq16U x y)
  1572  	// cond:
  1573  	// result: (XORconst [1] (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)))
  1574  	for {
  1575  		x := v.Args[0]
  1576  		y := v.Args[1]
  1577  		v.reset(OpMIPSXORconst)
  1578  		v.AuxInt = 1
  1579  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, types.Bool)
  1580  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  1581  		v1.AddArg(y)
  1582  		v0.AddArg(v1)
  1583  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  1584  		v2.AddArg(x)
  1585  		v0.AddArg(v2)
  1586  		v.AddArg(v0)
  1587  		return true
  1588  	}
  1589  }
  1590  func rewriteValueMIPS_OpGeq32_0(v *Value) bool {
  1591  	b := v.Block
  1592  	_ = b
  1593  	types := &b.Func.Config.Types
  1594  	_ = types
  1595  	// match: (Geq32 x y)
  1596  	// cond:
  1597  	// result: (XORconst [1] (SGT y x))
  1598  	for {
  1599  		x := v.Args[0]
  1600  		y := v.Args[1]
  1601  		v.reset(OpMIPSXORconst)
  1602  		v.AuxInt = 1
  1603  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, types.Bool)
  1604  		v0.AddArg(y)
  1605  		v0.AddArg(x)
  1606  		v.AddArg(v0)
  1607  		return true
  1608  	}
  1609  }
  1610  func rewriteValueMIPS_OpGeq32F_0(v *Value) bool {
  1611  	b := v.Block
  1612  	_ = b
  1613  	// match: (Geq32F x y)
  1614  	// cond:
  1615  	// result: (FPFlagTrue (CMPGEF x y))
  1616  	for {
  1617  		x := v.Args[0]
  1618  		y := v.Args[1]
  1619  		v.reset(OpMIPSFPFlagTrue)
  1620  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, TypeFlags)
  1621  		v0.AddArg(x)
  1622  		v0.AddArg(y)
  1623  		v.AddArg(v0)
  1624  		return true
  1625  	}
  1626  }
  1627  func rewriteValueMIPS_OpGeq32U_0(v *Value) bool {
  1628  	b := v.Block
  1629  	_ = b
  1630  	types := &b.Func.Config.Types
  1631  	_ = types
  1632  	// match: (Geq32U x y)
  1633  	// cond:
  1634  	// result: (XORconst [1] (SGTU y x))
  1635  	for {
  1636  		x := v.Args[0]
  1637  		y := v.Args[1]
  1638  		v.reset(OpMIPSXORconst)
  1639  		v.AuxInt = 1
  1640  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, types.Bool)
  1641  		v0.AddArg(y)
  1642  		v0.AddArg(x)
  1643  		v.AddArg(v0)
  1644  		return true
  1645  	}
  1646  }
  1647  func rewriteValueMIPS_OpGeq64F_0(v *Value) bool {
  1648  	b := v.Block
  1649  	_ = b
  1650  	// match: (Geq64F x y)
  1651  	// cond:
  1652  	// result: (FPFlagTrue (CMPGED x y))
  1653  	for {
  1654  		x := v.Args[0]
  1655  		y := v.Args[1]
  1656  		v.reset(OpMIPSFPFlagTrue)
  1657  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, TypeFlags)
  1658  		v0.AddArg(x)
  1659  		v0.AddArg(y)
  1660  		v.AddArg(v0)
  1661  		return true
  1662  	}
  1663  }
  1664  func rewriteValueMIPS_OpGeq8_0(v *Value) bool {
  1665  	b := v.Block
  1666  	_ = b
  1667  	types := &b.Func.Config.Types
  1668  	_ = types
  1669  	// match: (Geq8 x y)
  1670  	// cond:
  1671  	// result: (XORconst [1] (SGT (SignExt8to32 y) (SignExt8to32 x)))
  1672  	for {
  1673  		x := v.Args[0]
  1674  		y := v.Args[1]
  1675  		v.reset(OpMIPSXORconst)
  1676  		v.AuxInt = 1
  1677  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, types.Bool)
  1678  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  1679  		v1.AddArg(y)
  1680  		v0.AddArg(v1)
  1681  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  1682  		v2.AddArg(x)
  1683  		v0.AddArg(v2)
  1684  		v.AddArg(v0)
  1685  		return true
  1686  	}
  1687  }
  1688  func rewriteValueMIPS_OpGeq8U_0(v *Value) bool {
  1689  	b := v.Block
  1690  	_ = b
  1691  	types := &b.Func.Config.Types
  1692  	_ = types
  1693  	// match: (Geq8U x y)
  1694  	// cond:
  1695  	// result: (XORconst [1] (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)))
  1696  	for {
  1697  		x := v.Args[0]
  1698  		y := v.Args[1]
  1699  		v.reset(OpMIPSXORconst)
  1700  		v.AuxInt = 1
  1701  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, types.Bool)
  1702  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  1703  		v1.AddArg(y)
  1704  		v0.AddArg(v1)
  1705  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  1706  		v2.AddArg(x)
  1707  		v0.AddArg(v2)
  1708  		v.AddArg(v0)
  1709  		return true
  1710  	}
  1711  }
  1712  func rewriteValueMIPS_OpGetClosurePtr_0(v *Value) bool {
  1713  	// match: (GetClosurePtr)
  1714  	// cond:
  1715  	// result: (LoweredGetClosurePtr)
  1716  	for {
  1717  		v.reset(OpMIPSLoweredGetClosurePtr)
  1718  		return true
  1719  	}
  1720  }
  1721  func rewriteValueMIPS_OpGreater16_0(v *Value) bool {
  1722  	b := v.Block
  1723  	_ = b
  1724  	types := &b.Func.Config.Types
  1725  	_ = types
  1726  	// match: (Greater16 x y)
  1727  	// cond:
  1728  	// result: (SGT (SignExt16to32 x) (SignExt16to32 y))
  1729  	for {
  1730  		x := v.Args[0]
  1731  		y := v.Args[1]
  1732  		v.reset(OpMIPSSGT)
  1733  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  1734  		v0.AddArg(x)
  1735  		v.AddArg(v0)
  1736  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  1737  		v1.AddArg(y)
  1738  		v.AddArg(v1)
  1739  		return true
  1740  	}
  1741  }
  1742  func rewriteValueMIPS_OpGreater16U_0(v *Value) bool {
  1743  	b := v.Block
  1744  	_ = b
  1745  	types := &b.Func.Config.Types
  1746  	_ = types
  1747  	// match: (Greater16U x y)
  1748  	// cond:
  1749  	// result: (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))
  1750  	for {
  1751  		x := v.Args[0]
  1752  		y := v.Args[1]
  1753  		v.reset(OpMIPSSGTU)
  1754  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  1755  		v0.AddArg(x)
  1756  		v.AddArg(v0)
  1757  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  1758  		v1.AddArg(y)
  1759  		v.AddArg(v1)
  1760  		return true
  1761  	}
  1762  }
  1763  func rewriteValueMIPS_OpGreater32_0(v *Value) bool {
  1764  	// match: (Greater32 x y)
  1765  	// cond:
  1766  	// result: (SGT x y)
  1767  	for {
  1768  		x := v.Args[0]
  1769  		y := v.Args[1]
  1770  		v.reset(OpMIPSSGT)
  1771  		v.AddArg(x)
  1772  		v.AddArg(y)
  1773  		return true
  1774  	}
  1775  }
  1776  func rewriteValueMIPS_OpGreater32F_0(v *Value) bool {
  1777  	b := v.Block
  1778  	_ = b
  1779  	// match: (Greater32F x y)
  1780  	// cond:
  1781  	// result: (FPFlagTrue (CMPGTF x y))
  1782  	for {
  1783  		x := v.Args[0]
  1784  		y := v.Args[1]
  1785  		v.reset(OpMIPSFPFlagTrue)
  1786  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, TypeFlags)
  1787  		v0.AddArg(x)
  1788  		v0.AddArg(y)
  1789  		v.AddArg(v0)
  1790  		return true
  1791  	}
  1792  }
  1793  func rewriteValueMIPS_OpGreater32U_0(v *Value) bool {
  1794  	// match: (Greater32U x y)
  1795  	// cond:
  1796  	// result: (SGTU x y)
  1797  	for {
  1798  		x := v.Args[0]
  1799  		y := v.Args[1]
  1800  		v.reset(OpMIPSSGTU)
  1801  		v.AddArg(x)
  1802  		v.AddArg(y)
  1803  		return true
  1804  	}
  1805  }
  1806  func rewriteValueMIPS_OpGreater64F_0(v *Value) bool {
  1807  	b := v.Block
  1808  	_ = b
  1809  	// match: (Greater64F x y)
  1810  	// cond:
  1811  	// result: (FPFlagTrue (CMPGTD x y))
  1812  	for {
  1813  		x := v.Args[0]
  1814  		y := v.Args[1]
  1815  		v.reset(OpMIPSFPFlagTrue)
  1816  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, TypeFlags)
  1817  		v0.AddArg(x)
  1818  		v0.AddArg(y)
  1819  		v.AddArg(v0)
  1820  		return true
  1821  	}
  1822  }
  1823  func rewriteValueMIPS_OpGreater8_0(v *Value) bool {
  1824  	b := v.Block
  1825  	_ = b
  1826  	types := &b.Func.Config.Types
  1827  	_ = types
  1828  	// match: (Greater8 x y)
  1829  	// cond:
  1830  	// result: (SGT (SignExt8to32 x) (SignExt8to32 y))
  1831  	for {
  1832  		x := v.Args[0]
  1833  		y := v.Args[1]
  1834  		v.reset(OpMIPSSGT)
  1835  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  1836  		v0.AddArg(x)
  1837  		v.AddArg(v0)
  1838  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  1839  		v1.AddArg(y)
  1840  		v.AddArg(v1)
  1841  		return true
  1842  	}
  1843  }
  1844  func rewriteValueMIPS_OpGreater8U_0(v *Value) bool {
  1845  	b := v.Block
  1846  	_ = b
  1847  	types := &b.Func.Config.Types
  1848  	_ = types
  1849  	// match: (Greater8U x y)
  1850  	// cond:
  1851  	// result: (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))
  1852  	for {
  1853  		x := v.Args[0]
  1854  		y := v.Args[1]
  1855  		v.reset(OpMIPSSGTU)
  1856  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  1857  		v0.AddArg(x)
  1858  		v.AddArg(v0)
  1859  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  1860  		v1.AddArg(y)
  1861  		v.AddArg(v1)
  1862  		return true
  1863  	}
  1864  }
  1865  func rewriteValueMIPS_OpHmul32_0(v *Value) bool {
  1866  	b := v.Block
  1867  	_ = b
  1868  	types := &b.Func.Config.Types
  1869  	_ = types
  1870  	// match: (Hmul32 x y)
  1871  	// cond:
  1872  	// result: (Select0 (MULT x y))
  1873  	for {
  1874  		x := v.Args[0]
  1875  		y := v.Args[1]
  1876  		v.reset(OpSelect0)
  1877  		v0 := b.NewValue0(v.Pos, OpMIPSMULT, MakeTuple(types.Int32, types.Int32))
  1878  		v0.AddArg(x)
  1879  		v0.AddArg(y)
  1880  		v.AddArg(v0)
  1881  		return true
  1882  	}
  1883  }
  1884  func rewriteValueMIPS_OpHmul32u_0(v *Value) bool {
  1885  	b := v.Block
  1886  	_ = b
  1887  	types := &b.Func.Config.Types
  1888  	_ = types
  1889  	// match: (Hmul32u x y)
  1890  	// cond:
  1891  	// result: (Select0 (MULTU x y))
  1892  	for {
  1893  		x := v.Args[0]
  1894  		y := v.Args[1]
  1895  		v.reset(OpSelect0)
  1896  		v0 := b.NewValue0(v.Pos, OpMIPSMULTU, MakeTuple(types.UInt32, types.UInt32))
  1897  		v0.AddArg(x)
  1898  		v0.AddArg(y)
  1899  		v.AddArg(v0)
  1900  		return true
  1901  	}
  1902  }
  1903  func rewriteValueMIPS_OpInterCall_0(v *Value) bool {
  1904  	// match: (InterCall [argwid] entry mem)
  1905  	// cond:
  1906  	// result: (CALLinter [argwid] entry mem)
  1907  	for {
  1908  		argwid := v.AuxInt
  1909  		entry := v.Args[0]
  1910  		mem := v.Args[1]
  1911  		v.reset(OpMIPSCALLinter)
  1912  		v.AuxInt = argwid
  1913  		v.AddArg(entry)
  1914  		v.AddArg(mem)
  1915  		return true
  1916  	}
  1917  }
  1918  func rewriteValueMIPS_OpIsInBounds_0(v *Value) bool {
  1919  	// match: (IsInBounds idx len)
  1920  	// cond:
  1921  	// result: (SGTU len idx)
  1922  	for {
  1923  		idx := v.Args[0]
  1924  		len := v.Args[1]
  1925  		v.reset(OpMIPSSGTU)
  1926  		v.AddArg(len)
  1927  		v.AddArg(idx)
  1928  		return true
  1929  	}
  1930  }
  1931  func rewriteValueMIPS_OpIsNonNil_0(v *Value) bool {
  1932  	b := v.Block
  1933  	_ = b
  1934  	types := &b.Func.Config.Types
  1935  	_ = types
  1936  	// match: (IsNonNil ptr)
  1937  	// cond:
  1938  	// result: (SGTU ptr (MOVWconst [0]))
  1939  	for {
  1940  		ptr := v.Args[0]
  1941  		v.reset(OpMIPSSGTU)
  1942  		v.AddArg(ptr)
  1943  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  1944  		v0.AuxInt = 0
  1945  		v.AddArg(v0)
  1946  		return true
  1947  	}
  1948  }
  1949  func rewriteValueMIPS_OpIsSliceInBounds_0(v *Value) bool {
  1950  	b := v.Block
  1951  	_ = b
  1952  	types := &b.Func.Config.Types
  1953  	_ = types
  1954  	// match: (IsSliceInBounds idx len)
  1955  	// cond:
  1956  	// result: (XORconst [1] (SGTU idx len))
  1957  	for {
  1958  		idx := v.Args[0]
  1959  		len := v.Args[1]
  1960  		v.reset(OpMIPSXORconst)
  1961  		v.AuxInt = 1
  1962  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, types.Bool)
  1963  		v0.AddArg(idx)
  1964  		v0.AddArg(len)
  1965  		v.AddArg(v0)
  1966  		return true
  1967  	}
  1968  }
  1969  func rewriteValueMIPS_OpLeq16_0(v *Value) bool {
  1970  	b := v.Block
  1971  	_ = b
  1972  	types := &b.Func.Config.Types
  1973  	_ = types
  1974  	// match: (Leq16 x y)
  1975  	// cond:
  1976  	// result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y)))
  1977  	for {
  1978  		x := v.Args[0]
  1979  		y := v.Args[1]
  1980  		v.reset(OpMIPSXORconst)
  1981  		v.AuxInt = 1
  1982  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, types.Bool)
  1983  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  1984  		v1.AddArg(x)
  1985  		v0.AddArg(v1)
  1986  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  1987  		v2.AddArg(y)
  1988  		v0.AddArg(v2)
  1989  		v.AddArg(v0)
  1990  		return true
  1991  	}
  1992  }
  1993  func rewriteValueMIPS_OpLeq16U_0(v *Value) bool {
  1994  	b := v.Block
  1995  	_ = b
  1996  	types := &b.Func.Config.Types
  1997  	_ = types
  1998  	// match: (Leq16U x y)
  1999  	// cond:
  2000  	// result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  2001  	for {
  2002  		x := v.Args[0]
  2003  		y := v.Args[1]
  2004  		v.reset(OpMIPSXORconst)
  2005  		v.AuxInt = 1
  2006  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, types.Bool)
  2007  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  2008  		v1.AddArg(x)
  2009  		v0.AddArg(v1)
  2010  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  2011  		v2.AddArg(y)
  2012  		v0.AddArg(v2)
  2013  		v.AddArg(v0)
  2014  		return true
  2015  	}
  2016  }
  2017  func rewriteValueMIPS_OpLeq32_0(v *Value) bool {
  2018  	b := v.Block
  2019  	_ = b
  2020  	types := &b.Func.Config.Types
  2021  	_ = types
  2022  	// match: (Leq32 x y)
  2023  	// cond:
  2024  	// result: (XORconst [1] (SGT x y))
  2025  	for {
  2026  		x := v.Args[0]
  2027  		y := v.Args[1]
  2028  		v.reset(OpMIPSXORconst)
  2029  		v.AuxInt = 1
  2030  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, types.Bool)
  2031  		v0.AddArg(x)
  2032  		v0.AddArg(y)
  2033  		v.AddArg(v0)
  2034  		return true
  2035  	}
  2036  }
  2037  func rewriteValueMIPS_OpLeq32F_0(v *Value) bool {
  2038  	b := v.Block
  2039  	_ = b
  2040  	// match: (Leq32F x y)
  2041  	// cond:
  2042  	// result: (FPFlagTrue (CMPGEF y x))
  2043  	for {
  2044  		x := v.Args[0]
  2045  		y := v.Args[1]
  2046  		v.reset(OpMIPSFPFlagTrue)
  2047  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, TypeFlags)
  2048  		v0.AddArg(y)
  2049  		v0.AddArg(x)
  2050  		v.AddArg(v0)
  2051  		return true
  2052  	}
  2053  }
  2054  func rewriteValueMIPS_OpLeq32U_0(v *Value) bool {
  2055  	b := v.Block
  2056  	_ = b
  2057  	types := &b.Func.Config.Types
  2058  	_ = types
  2059  	// match: (Leq32U x y)
  2060  	// cond:
  2061  	// result: (XORconst [1] (SGTU x y))
  2062  	for {
  2063  		x := v.Args[0]
  2064  		y := v.Args[1]
  2065  		v.reset(OpMIPSXORconst)
  2066  		v.AuxInt = 1
  2067  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, types.Bool)
  2068  		v0.AddArg(x)
  2069  		v0.AddArg(y)
  2070  		v.AddArg(v0)
  2071  		return true
  2072  	}
  2073  }
  2074  func rewriteValueMIPS_OpLeq64F_0(v *Value) bool {
  2075  	b := v.Block
  2076  	_ = b
  2077  	// match: (Leq64F x y)
  2078  	// cond:
  2079  	// result: (FPFlagTrue (CMPGED y x))
  2080  	for {
  2081  		x := v.Args[0]
  2082  		y := v.Args[1]
  2083  		v.reset(OpMIPSFPFlagTrue)
  2084  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, TypeFlags)
  2085  		v0.AddArg(y)
  2086  		v0.AddArg(x)
  2087  		v.AddArg(v0)
  2088  		return true
  2089  	}
  2090  }
  2091  func rewriteValueMIPS_OpLeq8_0(v *Value) bool {
  2092  	b := v.Block
  2093  	_ = b
  2094  	types := &b.Func.Config.Types
  2095  	_ = types
  2096  	// match: (Leq8 x y)
  2097  	// cond:
  2098  	// result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y)))
  2099  	for {
  2100  		x := v.Args[0]
  2101  		y := v.Args[1]
  2102  		v.reset(OpMIPSXORconst)
  2103  		v.AuxInt = 1
  2104  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, types.Bool)
  2105  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  2106  		v1.AddArg(x)
  2107  		v0.AddArg(v1)
  2108  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  2109  		v2.AddArg(y)
  2110  		v0.AddArg(v2)
  2111  		v.AddArg(v0)
  2112  		return true
  2113  	}
  2114  }
  2115  func rewriteValueMIPS_OpLeq8U_0(v *Value) bool {
  2116  	b := v.Block
  2117  	_ = b
  2118  	types := &b.Func.Config.Types
  2119  	_ = types
  2120  	// match: (Leq8U x y)
  2121  	// cond:
  2122  	// result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  2123  	for {
  2124  		x := v.Args[0]
  2125  		y := v.Args[1]
  2126  		v.reset(OpMIPSXORconst)
  2127  		v.AuxInt = 1
  2128  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, types.Bool)
  2129  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  2130  		v1.AddArg(x)
  2131  		v0.AddArg(v1)
  2132  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  2133  		v2.AddArg(y)
  2134  		v0.AddArg(v2)
  2135  		v.AddArg(v0)
  2136  		return true
  2137  	}
  2138  }
  2139  func rewriteValueMIPS_OpLess16_0(v *Value) bool {
  2140  	b := v.Block
  2141  	_ = b
  2142  	types := &b.Func.Config.Types
  2143  	_ = types
  2144  	// match: (Less16 x y)
  2145  	// cond:
  2146  	// result: (SGT (SignExt16to32 y) (SignExt16to32 x))
  2147  	for {
  2148  		x := v.Args[0]
  2149  		y := v.Args[1]
  2150  		v.reset(OpMIPSSGT)
  2151  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  2152  		v0.AddArg(y)
  2153  		v.AddArg(v0)
  2154  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  2155  		v1.AddArg(x)
  2156  		v.AddArg(v1)
  2157  		return true
  2158  	}
  2159  }
  2160  func rewriteValueMIPS_OpLess16U_0(v *Value) bool {
  2161  	b := v.Block
  2162  	_ = b
  2163  	types := &b.Func.Config.Types
  2164  	_ = types
  2165  	// match: (Less16U x y)
  2166  	// cond:
  2167  	// result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x))
  2168  	for {
  2169  		x := v.Args[0]
  2170  		y := v.Args[1]
  2171  		v.reset(OpMIPSSGTU)
  2172  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  2173  		v0.AddArg(y)
  2174  		v.AddArg(v0)
  2175  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  2176  		v1.AddArg(x)
  2177  		v.AddArg(v1)
  2178  		return true
  2179  	}
  2180  }
  2181  func rewriteValueMIPS_OpLess32_0(v *Value) bool {
  2182  	// match: (Less32 x y)
  2183  	// cond:
  2184  	// result: (SGT y x)
  2185  	for {
  2186  		x := v.Args[0]
  2187  		y := v.Args[1]
  2188  		v.reset(OpMIPSSGT)
  2189  		v.AddArg(y)
  2190  		v.AddArg(x)
  2191  		return true
  2192  	}
  2193  }
  2194  func rewriteValueMIPS_OpLess32F_0(v *Value) bool {
  2195  	b := v.Block
  2196  	_ = b
  2197  	// match: (Less32F x y)
  2198  	// cond:
  2199  	// result: (FPFlagTrue (CMPGTF y x))
  2200  	for {
  2201  		x := v.Args[0]
  2202  		y := v.Args[1]
  2203  		v.reset(OpMIPSFPFlagTrue)
  2204  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, TypeFlags)
  2205  		v0.AddArg(y)
  2206  		v0.AddArg(x)
  2207  		v.AddArg(v0)
  2208  		return true
  2209  	}
  2210  }
  2211  func rewriteValueMIPS_OpLess32U_0(v *Value) bool {
  2212  	// match: (Less32U x y)
  2213  	// cond:
  2214  	// result: (SGTU y x)
  2215  	for {
  2216  		x := v.Args[0]
  2217  		y := v.Args[1]
  2218  		v.reset(OpMIPSSGTU)
  2219  		v.AddArg(y)
  2220  		v.AddArg(x)
  2221  		return true
  2222  	}
  2223  }
  2224  func rewriteValueMIPS_OpLess64F_0(v *Value) bool {
  2225  	b := v.Block
  2226  	_ = b
  2227  	// match: (Less64F x y)
  2228  	// cond:
  2229  	// result: (FPFlagTrue (CMPGTD y x))
  2230  	for {
  2231  		x := v.Args[0]
  2232  		y := v.Args[1]
  2233  		v.reset(OpMIPSFPFlagTrue)
  2234  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, TypeFlags)
  2235  		v0.AddArg(y)
  2236  		v0.AddArg(x)
  2237  		v.AddArg(v0)
  2238  		return true
  2239  	}
  2240  }
  2241  func rewriteValueMIPS_OpLess8_0(v *Value) bool {
  2242  	b := v.Block
  2243  	_ = b
  2244  	types := &b.Func.Config.Types
  2245  	_ = types
  2246  	// match: (Less8 x y)
  2247  	// cond:
  2248  	// result: (SGT (SignExt8to32 y) (SignExt8to32 x))
  2249  	for {
  2250  		x := v.Args[0]
  2251  		y := v.Args[1]
  2252  		v.reset(OpMIPSSGT)
  2253  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  2254  		v0.AddArg(y)
  2255  		v.AddArg(v0)
  2256  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  2257  		v1.AddArg(x)
  2258  		v.AddArg(v1)
  2259  		return true
  2260  	}
  2261  }
  2262  func rewriteValueMIPS_OpLess8U_0(v *Value) bool {
  2263  	b := v.Block
  2264  	_ = b
  2265  	types := &b.Func.Config.Types
  2266  	_ = types
  2267  	// match: (Less8U x y)
  2268  	// cond:
  2269  	// result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x))
  2270  	for {
  2271  		x := v.Args[0]
  2272  		y := v.Args[1]
  2273  		v.reset(OpMIPSSGTU)
  2274  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  2275  		v0.AddArg(y)
  2276  		v.AddArg(v0)
  2277  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  2278  		v1.AddArg(x)
  2279  		v.AddArg(v1)
  2280  		return true
  2281  	}
  2282  }
  2283  func rewriteValueMIPS_OpLoad_0(v *Value) bool {
  2284  	// match: (Load <t> ptr mem)
  2285  	// cond: t.IsBoolean()
  2286  	// result: (MOVBUload ptr mem)
  2287  	for {
  2288  		t := v.Type
  2289  		ptr := v.Args[0]
  2290  		mem := v.Args[1]
  2291  		if !(t.IsBoolean()) {
  2292  			break
  2293  		}
  2294  		v.reset(OpMIPSMOVBUload)
  2295  		v.AddArg(ptr)
  2296  		v.AddArg(mem)
  2297  		return true
  2298  	}
  2299  	// match: (Load <t> ptr mem)
  2300  	// cond: (is8BitInt(t) && isSigned(t))
  2301  	// result: (MOVBload ptr mem)
  2302  	for {
  2303  		t := v.Type
  2304  		ptr := v.Args[0]
  2305  		mem := v.Args[1]
  2306  		if !(is8BitInt(t) && isSigned(t)) {
  2307  			break
  2308  		}
  2309  		v.reset(OpMIPSMOVBload)
  2310  		v.AddArg(ptr)
  2311  		v.AddArg(mem)
  2312  		return true
  2313  	}
  2314  	// match: (Load <t> ptr mem)
  2315  	// cond: (is8BitInt(t) && !isSigned(t))
  2316  	// result: (MOVBUload ptr mem)
  2317  	for {
  2318  		t := v.Type
  2319  		ptr := v.Args[0]
  2320  		mem := v.Args[1]
  2321  		if !(is8BitInt(t) && !isSigned(t)) {
  2322  			break
  2323  		}
  2324  		v.reset(OpMIPSMOVBUload)
  2325  		v.AddArg(ptr)
  2326  		v.AddArg(mem)
  2327  		return true
  2328  	}
  2329  	// match: (Load <t> ptr mem)
  2330  	// cond: (is16BitInt(t) && isSigned(t))
  2331  	// result: (MOVHload ptr mem)
  2332  	for {
  2333  		t := v.Type
  2334  		ptr := v.Args[0]
  2335  		mem := v.Args[1]
  2336  		if !(is16BitInt(t) && isSigned(t)) {
  2337  			break
  2338  		}
  2339  		v.reset(OpMIPSMOVHload)
  2340  		v.AddArg(ptr)
  2341  		v.AddArg(mem)
  2342  		return true
  2343  	}
  2344  	// match: (Load <t> ptr mem)
  2345  	// cond: (is16BitInt(t) && !isSigned(t))
  2346  	// result: (MOVHUload ptr mem)
  2347  	for {
  2348  		t := v.Type
  2349  		ptr := v.Args[0]
  2350  		mem := v.Args[1]
  2351  		if !(is16BitInt(t) && !isSigned(t)) {
  2352  			break
  2353  		}
  2354  		v.reset(OpMIPSMOVHUload)
  2355  		v.AddArg(ptr)
  2356  		v.AddArg(mem)
  2357  		return true
  2358  	}
  2359  	// match: (Load <t> ptr mem)
  2360  	// cond: (is32BitInt(t) || isPtr(t))
  2361  	// result: (MOVWload ptr mem)
  2362  	for {
  2363  		t := v.Type
  2364  		ptr := v.Args[0]
  2365  		mem := v.Args[1]
  2366  		if !(is32BitInt(t) || isPtr(t)) {
  2367  			break
  2368  		}
  2369  		v.reset(OpMIPSMOVWload)
  2370  		v.AddArg(ptr)
  2371  		v.AddArg(mem)
  2372  		return true
  2373  	}
  2374  	// match: (Load <t> ptr mem)
  2375  	// cond: is32BitFloat(t)
  2376  	// result: (MOVFload ptr mem)
  2377  	for {
  2378  		t := v.Type
  2379  		ptr := v.Args[0]
  2380  		mem := v.Args[1]
  2381  		if !(is32BitFloat(t)) {
  2382  			break
  2383  		}
  2384  		v.reset(OpMIPSMOVFload)
  2385  		v.AddArg(ptr)
  2386  		v.AddArg(mem)
  2387  		return true
  2388  	}
  2389  	// match: (Load <t> ptr mem)
  2390  	// cond: is64BitFloat(t)
  2391  	// result: (MOVDload ptr mem)
  2392  	for {
  2393  		t := v.Type
  2394  		ptr := v.Args[0]
  2395  		mem := v.Args[1]
  2396  		if !(is64BitFloat(t)) {
  2397  			break
  2398  		}
  2399  		v.reset(OpMIPSMOVDload)
  2400  		v.AddArg(ptr)
  2401  		v.AddArg(mem)
  2402  		return true
  2403  	}
  2404  	return false
  2405  }
  2406  func rewriteValueMIPS_OpLsh16x16_0(v *Value) bool {
  2407  	b := v.Block
  2408  	_ = b
  2409  	types := &b.Func.Config.Types
  2410  	_ = types
  2411  	// match: (Lsh16x16 <t> x y)
  2412  	// cond:
  2413  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2414  	for {
  2415  		t := v.Type
  2416  		x := v.Args[0]
  2417  		y := v.Args[1]
  2418  		v.reset(OpMIPSCMOVZ)
  2419  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2420  		v0.AddArg(x)
  2421  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  2422  		v1.AddArg(y)
  2423  		v0.AddArg(v1)
  2424  		v.AddArg(v0)
  2425  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  2426  		v2.AuxInt = 0
  2427  		v.AddArg(v2)
  2428  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  2429  		v3.AuxInt = 32
  2430  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  2431  		v4.AddArg(y)
  2432  		v3.AddArg(v4)
  2433  		v.AddArg(v3)
  2434  		return true
  2435  	}
  2436  }
  2437  func rewriteValueMIPS_OpLsh16x32_0(v *Value) bool {
  2438  	b := v.Block
  2439  	_ = b
  2440  	types := &b.Func.Config.Types
  2441  	_ = types
  2442  	// match: (Lsh16x32 <t> x y)
  2443  	// cond:
  2444  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2445  	for {
  2446  		t := v.Type
  2447  		x := v.Args[0]
  2448  		y := v.Args[1]
  2449  		v.reset(OpMIPSCMOVZ)
  2450  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2451  		v0.AddArg(x)
  2452  		v0.AddArg(y)
  2453  		v.AddArg(v0)
  2454  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  2455  		v1.AuxInt = 0
  2456  		v.AddArg(v1)
  2457  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  2458  		v2.AuxInt = 32
  2459  		v2.AddArg(y)
  2460  		v.AddArg(v2)
  2461  		return true
  2462  	}
  2463  }
  2464  func rewriteValueMIPS_OpLsh16x64_0(v *Value) bool {
  2465  	// match: (Lsh16x64 x (Const64 [c]))
  2466  	// cond: uint32(c) < 16
  2467  	// result: (SLLconst x [c])
  2468  	for {
  2469  		x := v.Args[0]
  2470  		v_1 := v.Args[1]
  2471  		if v_1.Op != OpConst64 {
  2472  			break
  2473  		}
  2474  		c := v_1.AuxInt
  2475  		if !(uint32(c) < 16) {
  2476  			break
  2477  		}
  2478  		v.reset(OpMIPSSLLconst)
  2479  		v.AuxInt = c
  2480  		v.AddArg(x)
  2481  		return true
  2482  	}
  2483  	// match: (Lsh16x64 _ (Const64 [c]))
  2484  	// cond: uint32(c) >= 16
  2485  	// result: (MOVWconst [0])
  2486  	for {
  2487  		v_1 := v.Args[1]
  2488  		if v_1.Op != OpConst64 {
  2489  			break
  2490  		}
  2491  		c := v_1.AuxInt
  2492  		if !(uint32(c) >= 16) {
  2493  			break
  2494  		}
  2495  		v.reset(OpMIPSMOVWconst)
  2496  		v.AuxInt = 0
  2497  		return true
  2498  	}
  2499  	return false
  2500  }
  2501  func rewriteValueMIPS_OpLsh16x8_0(v *Value) bool {
  2502  	b := v.Block
  2503  	_ = b
  2504  	types := &b.Func.Config.Types
  2505  	_ = types
  2506  	// match: (Lsh16x8 <t> x y)
  2507  	// cond:
  2508  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2509  	for {
  2510  		t := v.Type
  2511  		x := v.Args[0]
  2512  		y := v.Args[1]
  2513  		v.reset(OpMIPSCMOVZ)
  2514  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2515  		v0.AddArg(x)
  2516  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  2517  		v1.AddArg(y)
  2518  		v0.AddArg(v1)
  2519  		v.AddArg(v0)
  2520  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  2521  		v2.AuxInt = 0
  2522  		v.AddArg(v2)
  2523  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  2524  		v3.AuxInt = 32
  2525  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  2526  		v4.AddArg(y)
  2527  		v3.AddArg(v4)
  2528  		v.AddArg(v3)
  2529  		return true
  2530  	}
  2531  }
  2532  func rewriteValueMIPS_OpLsh32x16_0(v *Value) bool {
  2533  	b := v.Block
  2534  	_ = b
  2535  	types := &b.Func.Config.Types
  2536  	_ = types
  2537  	// match: (Lsh32x16 <t> x y)
  2538  	// cond:
  2539  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2540  	for {
  2541  		t := v.Type
  2542  		x := v.Args[0]
  2543  		y := v.Args[1]
  2544  		v.reset(OpMIPSCMOVZ)
  2545  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2546  		v0.AddArg(x)
  2547  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  2548  		v1.AddArg(y)
  2549  		v0.AddArg(v1)
  2550  		v.AddArg(v0)
  2551  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  2552  		v2.AuxInt = 0
  2553  		v.AddArg(v2)
  2554  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  2555  		v3.AuxInt = 32
  2556  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  2557  		v4.AddArg(y)
  2558  		v3.AddArg(v4)
  2559  		v.AddArg(v3)
  2560  		return true
  2561  	}
  2562  }
  2563  func rewriteValueMIPS_OpLsh32x32_0(v *Value) bool {
  2564  	b := v.Block
  2565  	_ = b
  2566  	types := &b.Func.Config.Types
  2567  	_ = types
  2568  	// match: (Lsh32x32 <t> x y)
  2569  	// cond:
  2570  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2571  	for {
  2572  		t := v.Type
  2573  		x := v.Args[0]
  2574  		y := v.Args[1]
  2575  		v.reset(OpMIPSCMOVZ)
  2576  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2577  		v0.AddArg(x)
  2578  		v0.AddArg(y)
  2579  		v.AddArg(v0)
  2580  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  2581  		v1.AuxInt = 0
  2582  		v.AddArg(v1)
  2583  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  2584  		v2.AuxInt = 32
  2585  		v2.AddArg(y)
  2586  		v.AddArg(v2)
  2587  		return true
  2588  	}
  2589  }
  2590  func rewriteValueMIPS_OpLsh32x64_0(v *Value) bool {
  2591  	// match: (Lsh32x64 x (Const64 [c]))
  2592  	// cond: uint32(c) < 32
  2593  	// result: (SLLconst x [c])
  2594  	for {
  2595  		x := v.Args[0]
  2596  		v_1 := v.Args[1]
  2597  		if v_1.Op != OpConst64 {
  2598  			break
  2599  		}
  2600  		c := v_1.AuxInt
  2601  		if !(uint32(c) < 32) {
  2602  			break
  2603  		}
  2604  		v.reset(OpMIPSSLLconst)
  2605  		v.AuxInt = c
  2606  		v.AddArg(x)
  2607  		return true
  2608  	}
  2609  	// match: (Lsh32x64 _ (Const64 [c]))
  2610  	// cond: uint32(c) >= 32
  2611  	// result: (MOVWconst [0])
  2612  	for {
  2613  		v_1 := v.Args[1]
  2614  		if v_1.Op != OpConst64 {
  2615  			break
  2616  		}
  2617  		c := v_1.AuxInt
  2618  		if !(uint32(c) >= 32) {
  2619  			break
  2620  		}
  2621  		v.reset(OpMIPSMOVWconst)
  2622  		v.AuxInt = 0
  2623  		return true
  2624  	}
  2625  	return false
  2626  }
  2627  func rewriteValueMIPS_OpLsh32x8_0(v *Value) bool {
  2628  	b := v.Block
  2629  	_ = b
  2630  	types := &b.Func.Config.Types
  2631  	_ = types
  2632  	// match: (Lsh32x8 <t> x y)
  2633  	// cond:
  2634  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2635  	for {
  2636  		t := v.Type
  2637  		x := v.Args[0]
  2638  		y := v.Args[1]
  2639  		v.reset(OpMIPSCMOVZ)
  2640  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2641  		v0.AddArg(x)
  2642  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  2643  		v1.AddArg(y)
  2644  		v0.AddArg(v1)
  2645  		v.AddArg(v0)
  2646  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  2647  		v2.AuxInt = 0
  2648  		v.AddArg(v2)
  2649  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  2650  		v3.AuxInt = 32
  2651  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  2652  		v4.AddArg(y)
  2653  		v3.AddArg(v4)
  2654  		v.AddArg(v3)
  2655  		return true
  2656  	}
  2657  }
  2658  func rewriteValueMIPS_OpLsh8x16_0(v *Value) bool {
  2659  	b := v.Block
  2660  	_ = b
  2661  	types := &b.Func.Config.Types
  2662  	_ = types
  2663  	// match: (Lsh8x16 <t> x y)
  2664  	// cond:
  2665  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2666  	for {
  2667  		t := v.Type
  2668  		x := v.Args[0]
  2669  		y := v.Args[1]
  2670  		v.reset(OpMIPSCMOVZ)
  2671  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2672  		v0.AddArg(x)
  2673  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  2674  		v1.AddArg(y)
  2675  		v0.AddArg(v1)
  2676  		v.AddArg(v0)
  2677  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  2678  		v2.AuxInt = 0
  2679  		v.AddArg(v2)
  2680  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  2681  		v3.AuxInt = 32
  2682  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  2683  		v4.AddArg(y)
  2684  		v3.AddArg(v4)
  2685  		v.AddArg(v3)
  2686  		return true
  2687  	}
  2688  }
  2689  func rewriteValueMIPS_OpLsh8x32_0(v *Value) bool {
  2690  	b := v.Block
  2691  	_ = b
  2692  	types := &b.Func.Config.Types
  2693  	_ = types
  2694  	// match: (Lsh8x32 <t> x y)
  2695  	// cond:
  2696  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2697  	for {
  2698  		t := v.Type
  2699  		x := v.Args[0]
  2700  		y := v.Args[1]
  2701  		v.reset(OpMIPSCMOVZ)
  2702  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2703  		v0.AddArg(x)
  2704  		v0.AddArg(y)
  2705  		v.AddArg(v0)
  2706  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  2707  		v1.AuxInt = 0
  2708  		v.AddArg(v1)
  2709  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  2710  		v2.AuxInt = 32
  2711  		v2.AddArg(y)
  2712  		v.AddArg(v2)
  2713  		return true
  2714  	}
  2715  }
  2716  func rewriteValueMIPS_OpLsh8x64_0(v *Value) bool {
  2717  	// match: (Lsh8x64 x (Const64 [c]))
  2718  	// cond: uint32(c) < 8
  2719  	// result: (SLLconst x [c])
  2720  	for {
  2721  		x := v.Args[0]
  2722  		v_1 := v.Args[1]
  2723  		if v_1.Op != OpConst64 {
  2724  			break
  2725  		}
  2726  		c := v_1.AuxInt
  2727  		if !(uint32(c) < 8) {
  2728  			break
  2729  		}
  2730  		v.reset(OpMIPSSLLconst)
  2731  		v.AuxInt = c
  2732  		v.AddArg(x)
  2733  		return true
  2734  	}
  2735  	// match: (Lsh8x64 _ (Const64 [c]))
  2736  	// cond: uint32(c) >= 8
  2737  	// result: (MOVWconst [0])
  2738  	for {
  2739  		v_1 := v.Args[1]
  2740  		if v_1.Op != OpConst64 {
  2741  			break
  2742  		}
  2743  		c := v_1.AuxInt
  2744  		if !(uint32(c) >= 8) {
  2745  			break
  2746  		}
  2747  		v.reset(OpMIPSMOVWconst)
  2748  		v.AuxInt = 0
  2749  		return true
  2750  	}
  2751  	return false
  2752  }
  2753  func rewriteValueMIPS_OpLsh8x8_0(v *Value) bool {
  2754  	b := v.Block
  2755  	_ = b
  2756  	types := &b.Func.Config.Types
  2757  	_ = types
  2758  	// match: (Lsh8x8 <t> x y)
  2759  	// cond:
  2760  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2761  	for {
  2762  		t := v.Type
  2763  		x := v.Args[0]
  2764  		y := v.Args[1]
  2765  		v.reset(OpMIPSCMOVZ)
  2766  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2767  		v0.AddArg(x)
  2768  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  2769  		v1.AddArg(y)
  2770  		v0.AddArg(v1)
  2771  		v.AddArg(v0)
  2772  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  2773  		v2.AuxInt = 0
  2774  		v.AddArg(v2)
  2775  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  2776  		v3.AuxInt = 32
  2777  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  2778  		v4.AddArg(y)
  2779  		v3.AddArg(v4)
  2780  		v.AddArg(v3)
  2781  		return true
  2782  	}
  2783  }
  2784  func rewriteValueMIPS_OpMIPSADD_0(v *Value) bool {
  2785  	// match: (ADD x (MOVWconst [c]))
  2786  	// cond:
  2787  	// result: (ADDconst [c] x)
  2788  	for {
  2789  		x := v.Args[0]
  2790  		v_1 := v.Args[1]
  2791  		if v_1.Op != OpMIPSMOVWconst {
  2792  			break
  2793  		}
  2794  		c := v_1.AuxInt
  2795  		v.reset(OpMIPSADDconst)
  2796  		v.AuxInt = c
  2797  		v.AddArg(x)
  2798  		return true
  2799  	}
  2800  	// match: (ADD (MOVWconst [c]) x)
  2801  	// cond:
  2802  	// result: (ADDconst [c] x)
  2803  	for {
  2804  		v_0 := v.Args[0]
  2805  		if v_0.Op != OpMIPSMOVWconst {
  2806  			break
  2807  		}
  2808  		c := v_0.AuxInt
  2809  		x := v.Args[1]
  2810  		v.reset(OpMIPSADDconst)
  2811  		v.AuxInt = c
  2812  		v.AddArg(x)
  2813  		return true
  2814  	}
  2815  	// match: (ADD x (NEG y))
  2816  	// cond:
  2817  	// result: (SUB x y)
  2818  	for {
  2819  		x := v.Args[0]
  2820  		v_1 := v.Args[1]
  2821  		if v_1.Op != OpMIPSNEG {
  2822  			break
  2823  		}
  2824  		y := v_1.Args[0]
  2825  		v.reset(OpMIPSSUB)
  2826  		v.AddArg(x)
  2827  		v.AddArg(y)
  2828  		return true
  2829  	}
  2830  	// match: (ADD (NEG y) x)
  2831  	// cond:
  2832  	// result: (SUB x y)
  2833  	for {
  2834  		v_0 := v.Args[0]
  2835  		if v_0.Op != OpMIPSNEG {
  2836  			break
  2837  		}
  2838  		y := v_0.Args[0]
  2839  		x := v.Args[1]
  2840  		v.reset(OpMIPSSUB)
  2841  		v.AddArg(x)
  2842  		v.AddArg(y)
  2843  		return true
  2844  	}
  2845  	return false
  2846  }
  2847  func rewriteValueMIPS_OpMIPSADDconst_0(v *Value) bool {
  2848  	// match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr))
  2849  	// cond:
  2850  	// result: (MOVWaddr [off1+off2] {sym} ptr)
  2851  	for {
  2852  		off1 := v.AuxInt
  2853  		v_0 := v.Args[0]
  2854  		if v_0.Op != OpMIPSMOVWaddr {
  2855  			break
  2856  		}
  2857  		off2 := v_0.AuxInt
  2858  		sym := v_0.Aux
  2859  		ptr := v_0.Args[0]
  2860  		v.reset(OpMIPSMOVWaddr)
  2861  		v.AuxInt = off1 + off2
  2862  		v.Aux = sym
  2863  		v.AddArg(ptr)
  2864  		return true
  2865  	}
  2866  	// match: (ADDconst [0] x)
  2867  	// cond:
  2868  	// result: x
  2869  	for {
  2870  		if v.AuxInt != 0 {
  2871  			break
  2872  		}
  2873  		x := v.Args[0]
  2874  		v.reset(OpCopy)
  2875  		v.Type = x.Type
  2876  		v.AddArg(x)
  2877  		return true
  2878  	}
  2879  	// match: (ADDconst [c] (MOVWconst [d]))
  2880  	// cond:
  2881  	// result: (MOVWconst [int64(int32(c+d))])
  2882  	for {
  2883  		c := v.AuxInt
  2884  		v_0 := v.Args[0]
  2885  		if v_0.Op != OpMIPSMOVWconst {
  2886  			break
  2887  		}
  2888  		d := v_0.AuxInt
  2889  		v.reset(OpMIPSMOVWconst)
  2890  		v.AuxInt = int64(int32(c + d))
  2891  		return true
  2892  	}
  2893  	// match: (ADDconst [c] (ADDconst [d] x))
  2894  	// cond:
  2895  	// result: (ADDconst [int64(int32(c+d))] x)
  2896  	for {
  2897  		c := v.AuxInt
  2898  		v_0 := v.Args[0]
  2899  		if v_0.Op != OpMIPSADDconst {
  2900  			break
  2901  		}
  2902  		d := v_0.AuxInt
  2903  		x := v_0.Args[0]
  2904  		v.reset(OpMIPSADDconst)
  2905  		v.AuxInt = int64(int32(c + d))
  2906  		v.AddArg(x)
  2907  		return true
  2908  	}
  2909  	// match: (ADDconst [c] (SUBconst [d] x))
  2910  	// cond:
  2911  	// result: (ADDconst [int64(int32(c-d))] x)
  2912  	for {
  2913  		c := v.AuxInt
  2914  		v_0 := v.Args[0]
  2915  		if v_0.Op != OpMIPSSUBconst {
  2916  			break
  2917  		}
  2918  		d := v_0.AuxInt
  2919  		x := v_0.Args[0]
  2920  		v.reset(OpMIPSADDconst)
  2921  		v.AuxInt = int64(int32(c - d))
  2922  		v.AddArg(x)
  2923  		return true
  2924  	}
  2925  	return false
  2926  }
  2927  func rewriteValueMIPS_OpMIPSAND_0(v *Value) bool {
  2928  	b := v.Block
  2929  	_ = b
  2930  	// match: (AND x (MOVWconst [c]))
  2931  	// cond:
  2932  	// result: (ANDconst [c] x)
  2933  	for {
  2934  		x := v.Args[0]
  2935  		v_1 := v.Args[1]
  2936  		if v_1.Op != OpMIPSMOVWconst {
  2937  			break
  2938  		}
  2939  		c := v_1.AuxInt
  2940  		v.reset(OpMIPSANDconst)
  2941  		v.AuxInt = c
  2942  		v.AddArg(x)
  2943  		return true
  2944  	}
  2945  	// match: (AND (MOVWconst [c]) x)
  2946  	// cond:
  2947  	// result: (ANDconst [c] x)
  2948  	for {
  2949  		v_0 := v.Args[0]
  2950  		if v_0.Op != OpMIPSMOVWconst {
  2951  			break
  2952  		}
  2953  		c := v_0.AuxInt
  2954  		x := v.Args[1]
  2955  		v.reset(OpMIPSANDconst)
  2956  		v.AuxInt = c
  2957  		v.AddArg(x)
  2958  		return true
  2959  	}
  2960  	// match: (AND x x)
  2961  	// cond:
  2962  	// result: x
  2963  	for {
  2964  		x := v.Args[0]
  2965  		if x != v.Args[1] {
  2966  			break
  2967  		}
  2968  		v.reset(OpCopy)
  2969  		v.Type = x.Type
  2970  		v.AddArg(x)
  2971  		return true
  2972  	}
  2973  	// match: (AND (SGTUconst [1] x) (SGTUconst [1] y))
  2974  	// cond:
  2975  	// result: (SGTUconst [1] (OR <x.Type> x y))
  2976  	for {
  2977  		v_0 := v.Args[0]
  2978  		if v_0.Op != OpMIPSSGTUconst {
  2979  			break
  2980  		}
  2981  		if v_0.AuxInt != 1 {
  2982  			break
  2983  		}
  2984  		x := v_0.Args[0]
  2985  		v_1 := v.Args[1]
  2986  		if v_1.Op != OpMIPSSGTUconst {
  2987  			break
  2988  		}
  2989  		if v_1.AuxInt != 1 {
  2990  			break
  2991  		}
  2992  		y := v_1.Args[0]
  2993  		v.reset(OpMIPSSGTUconst)
  2994  		v.AuxInt = 1
  2995  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  2996  		v0.AddArg(x)
  2997  		v0.AddArg(y)
  2998  		v.AddArg(v0)
  2999  		return true
  3000  	}
  3001  	// match: (AND (SGTUconst [1] y) (SGTUconst [1] x))
  3002  	// cond:
  3003  	// result: (SGTUconst [1] (OR <x.Type> x y))
  3004  	for {
  3005  		v_0 := v.Args[0]
  3006  		if v_0.Op != OpMIPSSGTUconst {
  3007  			break
  3008  		}
  3009  		if v_0.AuxInt != 1 {
  3010  			break
  3011  		}
  3012  		y := v_0.Args[0]
  3013  		v_1 := v.Args[1]
  3014  		if v_1.Op != OpMIPSSGTUconst {
  3015  			break
  3016  		}
  3017  		if v_1.AuxInt != 1 {
  3018  			break
  3019  		}
  3020  		x := v_1.Args[0]
  3021  		v.reset(OpMIPSSGTUconst)
  3022  		v.AuxInt = 1
  3023  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  3024  		v0.AddArg(x)
  3025  		v0.AddArg(y)
  3026  		v.AddArg(v0)
  3027  		return true
  3028  	}
  3029  	return false
  3030  }
  3031  func rewriteValueMIPS_OpMIPSANDconst_0(v *Value) bool {
  3032  	// match: (ANDconst [0] _)
  3033  	// cond:
  3034  	// result: (MOVWconst [0])
  3035  	for {
  3036  		if v.AuxInt != 0 {
  3037  			break
  3038  		}
  3039  		v.reset(OpMIPSMOVWconst)
  3040  		v.AuxInt = 0
  3041  		return true
  3042  	}
  3043  	// match: (ANDconst [-1] x)
  3044  	// cond:
  3045  	// result: x
  3046  	for {
  3047  		if v.AuxInt != -1 {
  3048  			break
  3049  		}
  3050  		x := v.Args[0]
  3051  		v.reset(OpCopy)
  3052  		v.Type = x.Type
  3053  		v.AddArg(x)
  3054  		return true
  3055  	}
  3056  	// match: (ANDconst [c] (MOVWconst [d]))
  3057  	// cond:
  3058  	// result: (MOVWconst [c&d])
  3059  	for {
  3060  		c := v.AuxInt
  3061  		v_0 := v.Args[0]
  3062  		if v_0.Op != OpMIPSMOVWconst {
  3063  			break
  3064  		}
  3065  		d := v_0.AuxInt
  3066  		v.reset(OpMIPSMOVWconst)
  3067  		v.AuxInt = c & d
  3068  		return true
  3069  	}
  3070  	// match: (ANDconst [c] (ANDconst [d] x))
  3071  	// cond:
  3072  	// result: (ANDconst [c&d] x)
  3073  	for {
  3074  		c := v.AuxInt
  3075  		v_0 := v.Args[0]
  3076  		if v_0.Op != OpMIPSANDconst {
  3077  			break
  3078  		}
  3079  		d := v_0.AuxInt
  3080  		x := v_0.Args[0]
  3081  		v.reset(OpMIPSANDconst)
  3082  		v.AuxInt = c & d
  3083  		v.AddArg(x)
  3084  		return true
  3085  	}
  3086  	return false
  3087  }
  3088  func rewriteValueMIPS_OpMIPSCMOVZ_0(v *Value) bool {
  3089  	b := v.Block
  3090  	_ = b
  3091  	// match: (CMOVZ _ b (MOVWconst [0]))
  3092  	// cond:
  3093  	// result: b
  3094  	for {
  3095  		b := v.Args[1]
  3096  		v_2 := v.Args[2]
  3097  		if v_2.Op != OpMIPSMOVWconst {
  3098  			break
  3099  		}
  3100  		if v_2.AuxInt != 0 {
  3101  			break
  3102  		}
  3103  		v.reset(OpCopy)
  3104  		v.Type = b.Type
  3105  		v.AddArg(b)
  3106  		return true
  3107  	}
  3108  	// match: (CMOVZ a _ (MOVWconst [c]))
  3109  	// cond: c!=0
  3110  	// result: a
  3111  	for {
  3112  		a := v.Args[0]
  3113  		v_2 := v.Args[2]
  3114  		if v_2.Op != OpMIPSMOVWconst {
  3115  			break
  3116  		}
  3117  		c := v_2.AuxInt
  3118  		if !(c != 0) {
  3119  			break
  3120  		}
  3121  		v.reset(OpCopy)
  3122  		v.Type = a.Type
  3123  		v.AddArg(a)
  3124  		return true
  3125  	}
  3126  	// match: (CMOVZ a (MOVWconst [0]) c)
  3127  	// cond:
  3128  	// result: (CMOVZzero a c)
  3129  	for {
  3130  		a := v.Args[0]
  3131  		v_1 := v.Args[1]
  3132  		if v_1.Op != OpMIPSMOVWconst {
  3133  			break
  3134  		}
  3135  		if v_1.AuxInt != 0 {
  3136  			break
  3137  		}
  3138  		c := v.Args[2]
  3139  		v.reset(OpMIPSCMOVZzero)
  3140  		v.AddArg(a)
  3141  		v.AddArg(c)
  3142  		return true
  3143  	}
  3144  	return false
  3145  }
  3146  func rewriteValueMIPS_OpMIPSCMOVZzero_0(v *Value) bool {
  3147  	// match: (CMOVZzero _ (MOVWconst [0]))
  3148  	// cond:
  3149  	// result: (MOVWconst [0])
  3150  	for {
  3151  		v_1 := v.Args[1]
  3152  		if v_1.Op != OpMIPSMOVWconst {
  3153  			break
  3154  		}
  3155  		if v_1.AuxInt != 0 {
  3156  			break
  3157  		}
  3158  		v.reset(OpMIPSMOVWconst)
  3159  		v.AuxInt = 0
  3160  		return true
  3161  	}
  3162  	// match: (CMOVZzero a (MOVWconst [c]))
  3163  	// cond: c!=0
  3164  	// result: a
  3165  	for {
  3166  		a := v.Args[0]
  3167  		v_1 := v.Args[1]
  3168  		if v_1.Op != OpMIPSMOVWconst {
  3169  			break
  3170  		}
  3171  		c := v_1.AuxInt
  3172  		if !(c != 0) {
  3173  			break
  3174  		}
  3175  		v.reset(OpCopy)
  3176  		v.Type = a.Type
  3177  		v.AddArg(a)
  3178  		return true
  3179  	}
  3180  	return false
  3181  }
  3182  func rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v *Value) bool {
  3183  	// match: (LoweredAtomicAdd ptr (MOVWconst [c]) mem)
  3184  	// cond: is16Bit(c)
  3185  	// result: (LoweredAtomicAddconst [c] ptr mem)
  3186  	for {
  3187  		ptr := v.Args[0]
  3188  		v_1 := v.Args[1]
  3189  		if v_1.Op != OpMIPSMOVWconst {
  3190  			break
  3191  		}
  3192  		c := v_1.AuxInt
  3193  		mem := v.Args[2]
  3194  		if !(is16Bit(c)) {
  3195  			break
  3196  		}
  3197  		v.reset(OpMIPSLoweredAtomicAddconst)
  3198  		v.AuxInt = c
  3199  		v.AddArg(ptr)
  3200  		v.AddArg(mem)
  3201  		return true
  3202  	}
  3203  	return false
  3204  }
  3205  func rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v *Value) bool {
  3206  	// match: (LoweredAtomicStore ptr (MOVWconst [0]) mem)
  3207  	// cond:
  3208  	// result: (LoweredAtomicStorezero ptr mem)
  3209  	for {
  3210  		ptr := v.Args[0]
  3211  		v_1 := v.Args[1]
  3212  		if v_1.Op != OpMIPSMOVWconst {
  3213  			break
  3214  		}
  3215  		if v_1.AuxInt != 0 {
  3216  			break
  3217  		}
  3218  		mem := v.Args[2]
  3219  		v.reset(OpMIPSLoweredAtomicStorezero)
  3220  		v.AddArg(ptr)
  3221  		v.AddArg(mem)
  3222  		return true
  3223  	}
  3224  	return false
  3225  }
  3226  func rewriteValueMIPS_OpMIPSMOVBUload_0(v *Value) bool {
  3227  	// match: (MOVBUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3228  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3229  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3230  	for {
  3231  		off1 := v.AuxInt
  3232  		sym := v.Aux
  3233  		x := v.Args[0]
  3234  		if x.Op != OpMIPSADDconst {
  3235  			break
  3236  		}
  3237  		off2 := x.AuxInt
  3238  		ptr := x.Args[0]
  3239  		mem := v.Args[1]
  3240  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3241  			break
  3242  		}
  3243  		v.reset(OpMIPSMOVBUload)
  3244  		v.AuxInt = off1 + off2
  3245  		v.Aux = sym
  3246  		v.AddArg(ptr)
  3247  		v.AddArg(mem)
  3248  		return true
  3249  	}
  3250  	// match: (MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3251  	// cond: canMergeSym(sym1,sym2)
  3252  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3253  	for {
  3254  		off1 := v.AuxInt
  3255  		sym1 := v.Aux
  3256  		v_0 := v.Args[0]
  3257  		if v_0.Op != OpMIPSMOVWaddr {
  3258  			break
  3259  		}
  3260  		off2 := v_0.AuxInt
  3261  		sym2 := v_0.Aux
  3262  		ptr := v_0.Args[0]
  3263  		mem := v.Args[1]
  3264  		if !(canMergeSym(sym1, sym2)) {
  3265  			break
  3266  		}
  3267  		v.reset(OpMIPSMOVBUload)
  3268  		v.AuxInt = off1 + off2
  3269  		v.Aux = mergeSym(sym1, sym2)
  3270  		v.AddArg(ptr)
  3271  		v.AddArg(mem)
  3272  		return true
  3273  	}
  3274  	// match: (MOVBUload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3275  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && !isSigned(x.Type)
  3276  	// result: x
  3277  	for {
  3278  		off := v.AuxInt
  3279  		sym := v.Aux
  3280  		ptr := v.Args[0]
  3281  		v_1 := v.Args[1]
  3282  		if v_1.Op != OpMIPSMOVBstore {
  3283  			break
  3284  		}
  3285  		off2 := v_1.AuxInt
  3286  		sym2 := v_1.Aux
  3287  		ptr2 := v_1.Args[0]
  3288  		x := v_1.Args[1]
  3289  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && !isSigned(x.Type)) {
  3290  			break
  3291  		}
  3292  		v.reset(OpCopy)
  3293  		v.Type = x.Type
  3294  		v.AddArg(x)
  3295  		return true
  3296  	}
  3297  	return false
  3298  }
  3299  func rewriteValueMIPS_OpMIPSMOVBUreg_0(v *Value) bool {
  3300  	b := v.Block
  3301  	_ = b
  3302  	// match: (MOVBUreg x:(MOVBUload _ _))
  3303  	// cond:
  3304  	// result: (MOVWreg x)
  3305  	for {
  3306  		x := v.Args[0]
  3307  		if x.Op != OpMIPSMOVBUload {
  3308  			break
  3309  		}
  3310  		v.reset(OpMIPSMOVWreg)
  3311  		v.AddArg(x)
  3312  		return true
  3313  	}
  3314  	// match: (MOVBUreg x:(MOVBUreg _))
  3315  	// cond:
  3316  	// result: (MOVWreg x)
  3317  	for {
  3318  		x := v.Args[0]
  3319  		if x.Op != OpMIPSMOVBUreg {
  3320  			break
  3321  		}
  3322  		v.reset(OpMIPSMOVWreg)
  3323  		v.AddArg(x)
  3324  		return true
  3325  	}
  3326  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  3327  	// cond: x.Uses == 1 && clobber(x)
  3328  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  3329  	for {
  3330  		t := v.Type
  3331  		x := v.Args[0]
  3332  		if x.Op != OpMIPSMOVBload {
  3333  			break
  3334  		}
  3335  		off := x.AuxInt
  3336  		sym := x.Aux
  3337  		ptr := x.Args[0]
  3338  		mem := x.Args[1]
  3339  		if !(x.Uses == 1 && clobber(x)) {
  3340  			break
  3341  		}
  3342  		b = x.Block
  3343  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, t)
  3344  		v.reset(OpCopy)
  3345  		v.AddArg(v0)
  3346  		v0.AuxInt = off
  3347  		v0.Aux = sym
  3348  		v0.AddArg(ptr)
  3349  		v0.AddArg(mem)
  3350  		return true
  3351  	}
  3352  	// match: (MOVBUreg (ANDconst [c] x))
  3353  	// cond:
  3354  	// result: (ANDconst [c&0xff] x)
  3355  	for {
  3356  		v_0 := v.Args[0]
  3357  		if v_0.Op != OpMIPSANDconst {
  3358  			break
  3359  		}
  3360  		c := v_0.AuxInt
  3361  		x := v_0.Args[0]
  3362  		v.reset(OpMIPSANDconst)
  3363  		v.AuxInt = c & 0xff
  3364  		v.AddArg(x)
  3365  		return true
  3366  	}
  3367  	// match: (MOVBUreg (MOVWconst [c]))
  3368  	// cond:
  3369  	// result: (MOVWconst [int64(uint8(c))])
  3370  	for {
  3371  		v_0 := v.Args[0]
  3372  		if v_0.Op != OpMIPSMOVWconst {
  3373  			break
  3374  		}
  3375  		c := v_0.AuxInt
  3376  		v.reset(OpMIPSMOVWconst)
  3377  		v.AuxInt = int64(uint8(c))
  3378  		return true
  3379  	}
  3380  	return false
  3381  }
  3382  func rewriteValueMIPS_OpMIPSMOVBload_0(v *Value) bool {
  3383  	// match: (MOVBload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3384  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3385  	// result: (MOVBload  [off1+off2] {sym} ptr mem)
  3386  	for {
  3387  		off1 := v.AuxInt
  3388  		sym := v.Aux
  3389  		x := v.Args[0]
  3390  		if x.Op != OpMIPSADDconst {
  3391  			break
  3392  		}
  3393  		off2 := x.AuxInt
  3394  		ptr := x.Args[0]
  3395  		mem := v.Args[1]
  3396  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3397  			break
  3398  		}
  3399  		v.reset(OpMIPSMOVBload)
  3400  		v.AuxInt = off1 + off2
  3401  		v.Aux = sym
  3402  		v.AddArg(ptr)
  3403  		v.AddArg(mem)
  3404  		return true
  3405  	}
  3406  	// match: (MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3407  	// cond: canMergeSym(sym1,sym2)
  3408  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3409  	for {
  3410  		off1 := v.AuxInt
  3411  		sym1 := v.Aux
  3412  		v_0 := v.Args[0]
  3413  		if v_0.Op != OpMIPSMOVWaddr {
  3414  			break
  3415  		}
  3416  		off2 := v_0.AuxInt
  3417  		sym2 := v_0.Aux
  3418  		ptr := v_0.Args[0]
  3419  		mem := v.Args[1]
  3420  		if !(canMergeSym(sym1, sym2)) {
  3421  			break
  3422  		}
  3423  		v.reset(OpMIPSMOVBload)
  3424  		v.AuxInt = off1 + off2
  3425  		v.Aux = mergeSym(sym1, sym2)
  3426  		v.AddArg(ptr)
  3427  		v.AddArg(mem)
  3428  		return true
  3429  	}
  3430  	// match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3431  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && isSigned(x.Type)
  3432  	// result: x
  3433  	for {
  3434  		off := v.AuxInt
  3435  		sym := v.Aux
  3436  		ptr := v.Args[0]
  3437  		v_1 := v.Args[1]
  3438  		if v_1.Op != OpMIPSMOVBstore {
  3439  			break
  3440  		}
  3441  		off2 := v_1.AuxInt
  3442  		sym2 := v_1.Aux
  3443  		ptr2 := v_1.Args[0]
  3444  		x := v_1.Args[1]
  3445  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && isSigned(x.Type)) {
  3446  			break
  3447  		}
  3448  		v.reset(OpCopy)
  3449  		v.Type = x.Type
  3450  		v.AddArg(x)
  3451  		return true
  3452  	}
  3453  	return false
  3454  }
  3455  func rewriteValueMIPS_OpMIPSMOVBreg_0(v *Value) bool {
  3456  	b := v.Block
  3457  	_ = b
  3458  	// match: (MOVBreg x:(MOVBload _ _))
  3459  	// cond:
  3460  	// result: (MOVWreg x)
  3461  	for {
  3462  		x := v.Args[0]
  3463  		if x.Op != OpMIPSMOVBload {
  3464  			break
  3465  		}
  3466  		v.reset(OpMIPSMOVWreg)
  3467  		v.AddArg(x)
  3468  		return true
  3469  	}
  3470  	// match: (MOVBreg x:(MOVBreg _))
  3471  	// cond:
  3472  	// result: (MOVWreg x)
  3473  	for {
  3474  		x := v.Args[0]
  3475  		if x.Op != OpMIPSMOVBreg {
  3476  			break
  3477  		}
  3478  		v.reset(OpMIPSMOVWreg)
  3479  		v.AddArg(x)
  3480  		return true
  3481  	}
  3482  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  3483  	// cond: x.Uses == 1 && clobber(x)
  3484  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  3485  	for {
  3486  		t := v.Type
  3487  		x := v.Args[0]
  3488  		if x.Op != OpMIPSMOVBUload {
  3489  			break
  3490  		}
  3491  		off := x.AuxInt
  3492  		sym := x.Aux
  3493  		ptr := x.Args[0]
  3494  		mem := x.Args[1]
  3495  		if !(x.Uses == 1 && clobber(x)) {
  3496  			break
  3497  		}
  3498  		b = x.Block
  3499  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBload, t)
  3500  		v.reset(OpCopy)
  3501  		v.AddArg(v0)
  3502  		v0.AuxInt = off
  3503  		v0.Aux = sym
  3504  		v0.AddArg(ptr)
  3505  		v0.AddArg(mem)
  3506  		return true
  3507  	}
  3508  	// match: (MOVBreg (ANDconst [c] x))
  3509  	// cond: c & 0x80 == 0
  3510  	// result: (ANDconst [c&0x7f] x)
  3511  	for {
  3512  		v_0 := v.Args[0]
  3513  		if v_0.Op != OpMIPSANDconst {
  3514  			break
  3515  		}
  3516  		c := v_0.AuxInt
  3517  		x := v_0.Args[0]
  3518  		if !(c&0x80 == 0) {
  3519  			break
  3520  		}
  3521  		v.reset(OpMIPSANDconst)
  3522  		v.AuxInt = c & 0x7f
  3523  		v.AddArg(x)
  3524  		return true
  3525  	}
  3526  	// match: (MOVBreg (MOVWconst [c]))
  3527  	// cond:
  3528  	// result: (MOVWconst [int64(int8(c))])
  3529  	for {
  3530  		v_0 := v.Args[0]
  3531  		if v_0.Op != OpMIPSMOVWconst {
  3532  			break
  3533  		}
  3534  		c := v_0.AuxInt
  3535  		v.reset(OpMIPSMOVWconst)
  3536  		v.AuxInt = int64(int8(c))
  3537  		return true
  3538  	}
  3539  	return false
  3540  }
  3541  func rewriteValueMIPS_OpMIPSMOVBstore_0(v *Value) bool {
  3542  	// match: (MOVBstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3543  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3544  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3545  	for {
  3546  		off1 := v.AuxInt
  3547  		sym := v.Aux
  3548  		x := v.Args[0]
  3549  		if x.Op != OpMIPSADDconst {
  3550  			break
  3551  		}
  3552  		off2 := x.AuxInt
  3553  		ptr := x.Args[0]
  3554  		val := v.Args[1]
  3555  		mem := v.Args[2]
  3556  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3557  			break
  3558  		}
  3559  		v.reset(OpMIPSMOVBstore)
  3560  		v.AuxInt = off1 + off2
  3561  		v.Aux = sym
  3562  		v.AddArg(ptr)
  3563  		v.AddArg(val)
  3564  		v.AddArg(mem)
  3565  		return true
  3566  	}
  3567  	// match: (MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3568  	// cond: canMergeSym(sym1,sym2)
  3569  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3570  	for {
  3571  		off1 := v.AuxInt
  3572  		sym1 := v.Aux
  3573  		v_0 := v.Args[0]
  3574  		if v_0.Op != OpMIPSMOVWaddr {
  3575  			break
  3576  		}
  3577  		off2 := v_0.AuxInt
  3578  		sym2 := v_0.Aux
  3579  		ptr := v_0.Args[0]
  3580  		val := v.Args[1]
  3581  		mem := v.Args[2]
  3582  		if !(canMergeSym(sym1, sym2)) {
  3583  			break
  3584  		}
  3585  		v.reset(OpMIPSMOVBstore)
  3586  		v.AuxInt = off1 + off2
  3587  		v.Aux = mergeSym(sym1, sym2)
  3588  		v.AddArg(ptr)
  3589  		v.AddArg(val)
  3590  		v.AddArg(mem)
  3591  		return true
  3592  	}
  3593  	// match: (MOVBstore [off] {sym} ptr (MOVWconst [0]) mem)
  3594  	// cond:
  3595  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3596  	for {
  3597  		off := v.AuxInt
  3598  		sym := v.Aux
  3599  		ptr := v.Args[0]
  3600  		v_1 := v.Args[1]
  3601  		if v_1.Op != OpMIPSMOVWconst {
  3602  			break
  3603  		}
  3604  		if v_1.AuxInt != 0 {
  3605  			break
  3606  		}
  3607  		mem := v.Args[2]
  3608  		v.reset(OpMIPSMOVBstorezero)
  3609  		v.AuxInt = off
  3610  		v.Aux = sym
  3611  		v.AddArg(ptr)
  3612  		v.AddArg(mem)
  3613  		return true
  3614  	}
  3615  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3616  	// cond:
  3617  	// result: (MOVBstore [off] {sym} ptr x mem)
  3618  	for {
  3619  		off := v.AuxInt
  3620  		sym := v.Aux
  3621  		ptr := v.Args[0]
  3622  		v_1 := v.Args[1]
  3623  		if v_1.Op != OpMIPSMOVBreg {
  3624  			break
  3625  		}
  3626  		x := v_1.Args[0]
  3627  		mem := v.Args[2]
  3628  		v.reset(OpMIPSMOVBstore)
  3629  		v.AuxInt = off
  3630  		v.Aux = sym
  3631  		v.AddArg(ptr)
  3632  		v.AddArg(x)
  3633  		v.AddArg(mem)
  3634  		return true
  3635  	}
  3636  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3637  	// cond:
  3638  	// result: (MOVBstore [off] {sym} ptr x mem)
  3639  	for {
  3640  		off := v.AuxInt
  3641  		sym := v.Aux
  3642  		ptr := v.Args[0]
  3643  		v_1 := v.Args[1]
  3644  		if v_1.Op != OpMIPSMOVBUreg {
  3645  			break
  3646  		}
  3647  		x := v_1.Args[0]
  3648  		mem := v.Args[2]
  3649  		v.reset(OpMIPSMOVBstore)
  3650  		v.AuxInt = off
  3651  		v.Aux = sym
  3652  		v.AddArg(ptr)
  3653  		v.AddArg(x)
  3654  		v.AddArg(mem)
  3655  		return true
  3656  	}
  3657  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3658  	// cond:
  3659  	// result: (MOVBstore [off] {sym} ptr x mem)
  3660  	for {
  3661  		off := v.AuxInt
  3662  		sym := v.Aux
  3663  		ptr := v.Args[0]
  3664  		v_1 := v.Args[1]
  3665  		if v_1.Op != OpMIPSMOVHreg {
  3666  			break
  3667  		}
  3668  		x := v_1.Args[0]
  3669  		mem := v.Args[2]
  3670  		v.reset(OpMIPSMOVBstore)
  3671  		v.AuxInt = off
  3672  		v.Aux = sym
  3673  		v.AddArg(ptr)
  3674  		v.AddArg(x)
  3675  		v.AddArg(mem)
  3676  		return true
  3677  	}
  3678  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3679  	// cond:
  3680  	// result: (MOVBstore [off] {sym} ptr x mem)
  3681  	for {
  3682  		off := v.AuxInt
  3683  		sym := v.Aux
  3684  		ptr := v.Args[0]
  3685  		v_1 := v.Args[1]
  3686  		if v_1.Op != OpMIPSMOVHUreg {
  3687  			break
  3688  		}
  3689  		x := v_1.Args[0]
  3690  		mem := v.Args[2]
  3691  		v.reset(OpMIPSMOVBstore)
  3692  		v.AuxInt = off
  3693  		v.Aux = sym
  3694  		v.AddArg(ptr)
  3695  		v.AddArg(x)
  3696  		v.AddArg(mem)
  3697  		return true
  3698  	}
  3699  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3700  	// cond:
  3701  	// result: (MOVBstore [off] {sym} ptr x mem)
  3702  	for {
  3703  		off := v.AuxInt
  3704  		sym := v.Aux
  3705  		ptr := v.Args[0]
  3706  		v_1 := v.Args[1]
  3707  		if v_1.Op != OpMIPSMOVWreg {
  3708  			break
  3709  		}
  3710  		x := v_1.Args[0]
  3711  		mem := v.Args[2]
  3712  		v.reset(OpMIPSMOVBstore)
  3713  		v.AuxInt = off
  3714  		v.Aux = sym
  3715  		v.AddArg(ptr)
  3716  		v.AddArg(x)
  3717  		v.AddArg(mem)
  3718  		return true
  3719  	}
  3720  	return false
  3721  }
  3722  func rewriteValueMIPS_OpMIPSMOVBstorezero_0(v *Value) bool {
  3723  	// match: (MOVBstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3724  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3725  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3726  	for {
  3727  		off1 := v.AuxInt
  3728  		sym := v.Aux
  3729  		x := v.Args[0]
  3730  		if x.Op != OpMIPSADDconst {
  3731  			break
  3732  		}
  3733  		off2 := x.AuxInt
  3734  		ptr := x.Args[0]
  3735  		mem := v.Args[1]
  3736  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3737  			break
  3738  		}
  3739  		v.reset(OpMIPSMOVBstorezero)
  3740  		v.AuxInt = off1 + off2
  3741  		v.Aux = sym
  3742  		v.AddArg(ptr)
  3743  		v.AddArg(mem)
  3744  		return true
  3745  	}
  3746  	// match: (MOVBstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3747  	// cond: canMergeSym(sym1,sym2)
  3748  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3749  	for {
  3750  		off1 := v.AuxInt
  3751  		sym1 := v.Aux
  3752  		v_0 := v.Args[0]
  3753  		if v_0.Op != OpMIPSMOVWaddr {
  3754  			break
  3755  		}
  3756  		off2 := v_0.AuxInt
  3757  		sym2 := v_0.Aux
  3758  		ptr := v_0.Args[0]
  3759  		mem := v.Args[1]
  3760  		if !(canMergeSym(sym1, sym2)) {
  3761  			break
  3762  		}
  3763  		v.reset(OpMIPSMOVBstorezero)
  3764  		v.AuxInt = off1 + off2
  3765  		v.Aux = mergeSym(sym1, sym2)
  3766  		v.AddArg(ptr)
  3767  		v.AddArg(mem)
  3768  		return true
  3769  	}
  3770  	return false
  3771  }
  3772  func rewriteValueMIPS_OpMIPSMOVDload_0(v *Value) bool {
  3773  	// match: (MOVDload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3774  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3775  	// result: (MOVDload  [off1+off2] {sym} ptr mem)
  3776  	for {
  3777  		off1 := v.AuxInt
  3778  		sym := v.Aux
  3779  		x := v.Args[0]
  3780  		if x.Op != OpMIPSADDconst {
  3781  			break
  3782  		}
  3783  		off2 := x.AuxInt
  3784  		ptr := x.Args[0]
  3785  		mem := v.Args[1]
  3786  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3787  			break
  3788  		}
  3789  		v.reset(OpMIPSMOVDload)
  3790  		v.AuxInt = off1 + off2
  3791  		v.Aux = sym
  3792  		v.AddArg(ptr)
  3793  		v.AddArg(mem)
  3794  		return true
  3795  	}
  3796  	// match: (MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3797  	// cond: canMergeSym(sym1,sym2)
  3798  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3799  	for {
  3800  		off1 := v.AuxInt
  3801  		sym1 := v.Aux
  3802  		v_0 := v.Args[0]
  3803  		if v_0.Op != OpMIPSMOVWaddr {
  3804  			break
  3805  		}
  3806  		off2 := v_0.AuxInt
  3807  		sym2 := v_0.Aux
  3808  		ptr := v_0.Args[0]
  3809  		mem := v.Args[1]
  3810  		if !(canMergeSym(sym1, sym2)) {
  3811  			break
  3812  		}
  3813  		v.reset(OpMIPSMOVDload)
  3814  		v.AuxInt = off1 + off2
  3815  		v.Aux = mergeSym(sym1, sym2)
  3816  		v.AddArg(ptr)
  3817  		v.AddArg(mem)
  3818  		return true
  3819  	}
  3820  	// match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _))
  3821  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3822  	// result: x
  3823  	for {
  3824  		off := v.AuxInt
  3825  		sym := v.Aux
  3826  		ptr := v.Args[0]
  3827  		v_1 := v.Args[1]
  3828  		if v_1.Op != OpMIPSMOVDstore {
  3829  			break
  3830  		}
  3831  		off2 := v_1.AuxInt
  3832  		sym2 := v_1.Aux
  3833  		ptr2 := v_1.Args[0]
  3834  		x := v_1.Args[1]
  3835  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3836  			break
  3837  		}
  3838  		v.reset(OpCopy)
  3839  		v.Type = x.Type
  3840  		v.AddArg(x)
  3841  		return true
  3842  	}
  3843  	return false
  3844  }
  3845  func rewriteValueMIPS_OpMIPSMOVDstore_0(v *Value) bool {
  3846  	// match: (MOVDstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3847  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3848  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  3849  	for {
  3850  		off1 := v.AuxInt
  3851  		sym := v.Aux
  3852  		x := v.Args[0]
  3853  		if x.Op != OpMIPSADDconst {
  3854  			break
  3855  		}
  3856  		off2 := x.AuxInt
  3857  		ptr := x.Args[0]
  3858  		val := v.Args[1]
  3859  		mem := v.Args[2]
  3860  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3861  			break
  3862  		}
  3863  		v.reset(OpMIPSMOVDstore)
  3864  		v.AuxInt = off1 + off2
  3865  		v.Aux = sym
  3866  		v.AddArg(ptr)
  3867  		v.AddArg(val)
  3868  		v.AddArg(mem)
  3869  		return true
  3870  	}
  3871  	// match: (MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3872  	// cond: canMergeSym(sym1,sym2)
  3873  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3874  	for {
  3875  		off1 := v.AuxInt
  3876  		sym1 := v.Aux
  3877  		v_0 := v.Args[0]
  3878  		if v_0.Op != OpMIPSMOVWaddr {
  3879  			break
  3880  		}
  3881  		off2 := v_0.AuxInt
  3882  		sym2 := v_0.Aux
  3883  		ptr := v_0.Args[0]
  3884  		val := v.Args[1]
  3885  		mem := v.Args[2]
  3886  		if !(canMergeSym(sym1, sym2)) {
  3887  			break
  3888  		}
  3889  		v.reset(OpMIPSMOVDstore)
  3890  		v.AuxInt = off1 + off2
  3891  		v.Aux = mergeSym(sym1, sym2)
  3892  		v.AddArg(ptr)
  3893  		v.AddArg(val)
  3894  		v.AddArg(mem)
  3895  		return true
  3896  	}
  3897  	return false
  3898  }
  3899  func rewriteValueMIPS_OpMIPSMOVFload_0(v *Value) bool {
  3900  	// match: (MOVFload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3901  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3902  	// result: (MOVFload  [off1+off2] {sym} ptr mem)
  3903  	for {
  3904  		off1 := v.AuxInt
  3905  		sym := v.Aux
  3906  		x := v.Args[0]
  3907  		if x.Op != OpMIPSADDconst {
  3908  			break
  3909  		}
  3910  		off2 := x.AuxInt
  3911  		ptr := x.Args[0]
  3912  		mem := v.Args[1]
  3913  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3914  			break
  3915  		}
  3916  		v.reset(OpMIPSMOVFload)
  3917  		v.AuxInt = off1 + off2
  3918  		v.Aux = sym
  3919  		v.AddArg(ptr)
  3920  		v.AddArg(mem)
  3921  		return true
  3922  	}
  3923  	// match: (MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3924  	// cond: canMergeSym(sym1,sym2)
  3925  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3926  	for {
  3927  		off1 := v.AuxInt
  3928  		sym1 := v.Aux
  3929  		v_0 := v.Args[0]
  3930  		if v_0.Op != OpMIPSMOVWaddr {
  3931  			break
  3932  		}
  3933  		off2 := v_0.AuxInt
  3934  		sym2 := v_0.Aux
  3935  		ptr := v_0.Args[0]
  3936  		mem := v.Args[1]
  3937  		if !(canMergeSym(sym1, sym2)) {
  3938  			break
  3939  		}
  3940  		v.reset(OpMIPSMOVFload)
  3941  		v.AuxInt = off1 + off2
  3942  		v.Aux = mergeSym(sym1, sym2)
  3943  		v.AddArg(ptr)
  3944  		v.AddArg(mem)
  3945  		return true
  3946  	}
  3947  	// match: (MOVFload [off] {sym} ptr (MOVFstore [off2] {sym2} ptr2 x _))
  3948  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3949  	// result: x
  3950  	for {
  3951  		off := v.AuxInt
  3952  		sym := v.Aux
  3953  		ptr := v.Args[0]
  3954  		v_1 := v.Args[1]
  3955  		if v_1.Op != OpMIPSMOVFstore {
  3956  			break
  3957  		}
  3958  		off2 := v_1.AuxInt
  3959  		sym2 := v_1.Aux
  3960  		ptr2 := v_1.Args[0]
  3961  		x := v_1.Args[1]
  3962  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3963  			break
  3964  		}
  3965  		v.reset(OpCopy)
  3966  		v.Type = x.Type
  3967  		v.AddArg(x)
  3968  		return true
  3969  	}
  3970  	return false
  3971  }
  3972  func rewriteValueMIPS_OpMIPSMOVFstore_0(v *Value) bool {
  3973  	// match: (MOVFstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3974  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3975  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  3976  	for {
  3977  		off1 := v.AuxInt
  3978  		sym := v.Aux
  3979  		x := v.Args[0]
  3980  		if x.Op != OpMIPSADDconst {
  3981  			break
  3982  		}
  3983  		off2 := x.AuxInt
  3984  		ptr := x.Args[0]
  3985  		val := v.Args[1]
  3986  		mem := v.Args[2]
  3987  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3988  			break
  3989  		}
  3990  		v.reset(OpMIPSMOVFstore)
  3991  		v.AuxInt = off1 + off2
  3992  		v.Aux = sym
  3993  		v.AddArg(ptr)
  3994  		v.AddArg(val)
  3995  		v.AddArg(mem)
  3996  		return true
  3997  	}
  3998  	// match: (MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3999  	// cond: canMergeSym(sym1,sym2)
  4000  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4001  	for {
  4002  		off1 := v.AuxInt
  4003  		sym1 := v.Aux
  4004  		v_0 := v.Args[0]
  4005  		if v_0.Op != OpMIPSMOVWaddr {
  4006  			break
  4007  		}
  4008  		off2 := v_0.AuxInt
  4009  		sym2 := v_0.Aux
  4010  		ptr := v_0.Args[0]
  4011  		val := v.Args[1]
  4012  		mem := v.Args[2]
  4013  		if !(canMergeSym(sym1, sym2)) {
  4014  			break
  4015  		}
  4016  		v.reset(OpMIPSMOVFstore)
  4017  		v.AuxInt = off1 + off2
  4018  		v.Aux = mergeSym(sym1, sym2)
  4019  		v.AddArg(ptr)
  4020  		v.AddArg(val)
  4021  		v.AddArg(mem)
  4022  		return true
  4023  	}
  4024  	return false
  4025  }
  4026  func rewriteValueMIPS_OpMIPSMOVHUload_0(v *Value) bool {
  4027  	// match: (MOVHUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4028  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4029  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4030  	for {
  4031  		off1 := v.AuxInt
  4032  		sym := v.Aux
  4033  		x := v.Args[0]
  4034  		if x.Op != OpMIPSADDconst {
  4035  			break
  4036  		}
  4037  		off2 := x.AuxInt
  4038  		ptr := x.Args[0]
  4039  		mem := v.Args[1]
  4040  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4041  			break
  4042  		}
  4043  		v.reset(OpMIPSMOVHUload)
  4044  		v.AuxInt = off1 + off2
  4045  		v.Aux = sym
  4046  		v.AddArg(ptr)
  4047  		v.AddArg(mem)
  4048  		return true
  4049  	}
  4050  	// match: (MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4051  	// cond: canMergeSym(sym1,sym2)
  4052  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4053  	for {
  4054  		off1 := v.AuxInt
  4055  		sym1 := v.Aux
  4056  		v_0 := v.Args[0]
  4057  		if v_0.Op != OpMIPSMOVWaddr {
  4058  			break
  4059  		}
  4060  		off2 := v_0.AuxInt
  4061  		sym2 := v_0.Aux
  4062  		ptr := v_0.Args[0]
  4063  		mem := v.Args[1]
  4064  		if !(canMergeSym(sym1, sym2)) {
  4065  			break
  4066  		}
  4067  		v.reset(OpMIPSMOVHUload)
  4068  		v.AuxInt = off1 + off2
  4069  		v.Aux = mergeSym(sym1, sym2)
  4070  		v.AddArg(ptr)
  4071  		v.AddArg(mem)
  4072  		return true
  4073  	}
  4074  	// match: (MOVHUload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4075  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && !isSigned(x.Type)
  4076  	// result: x
  4077  	for {
  4078  		off := v.AuxInt
  4079  		sym := v.Aux
  4080  		ptr := v.Args[0]
  4081  		v_1 := v.Args[1]
  4082  		if v_1.Op != OpMIPSMOVHstore {
  4083  			break
  4084  		}
  4085  		off2 := v_1.AuxInt
  4086  		sym2 := v_1.Aux
  4087  		ptr2 := v_1.Args[0]
  4088  		x := v_1.Args[1]
  4089  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && !isSigned(x.Type)) {
  4090  			break
  4091  		}
  4092  		v.reset(OpCopy)
  4093  		v.Type = x.Type
  4094  		v.AddArg(x)
  4095  		return true
  4096  	}
  4097  	return false
  4098  }
  4099  func rewriteValueMIPS_OpMIPSMOVHUreg_0(v *Value) bool {
  4100  	b := v.Block
  4101  	_ = b
  4102  	// match: (MOVHUreg x:(MOVBUload _ _))
  4103  	// cond:
  4104  	// result: (MOVWreg x)
  4105  	for {
  4106  		x := v.Args[0]
  4107  		if x.Op != OpMIPSMOVBUload {
  4108  			break
  4109  		}
  4110  		v.reset(OpMIPSMOVWreg)
  4111  		v.AddArg(x)
  4112  		return true
  4113  	}
  4114  	// match: (MOVHUreg x:(MOVHUload _ _))
  4115  	// cond:
  4116  	// result: (MOVWreg x)
  4117  	for {
  4118  		x := v.Args[0]
  4119  		if x.Op != OpMIPSMOVHUload {
  4120  			break
  4121  		}
  4122  		v.reset(OpMIPSMOVWreg)
  4123  		v.AddArg(x)
  4124  		return true
  4125  	}
  4126  	// match: (MOVHUreg x:(MOVBUreg _))
  4127  	// cond:
  4128  	// result: (MOVWreg x)
  4129  	for {
  4130  		x := v.Args[0]
  4131  		if x.Op != OpMIPSMOVBUreg {
  4132  			break
  4133  		}
  4134  		v.reset(OpMIPSMOVWreg)
  4135  		v.AddArg(x)
  4136  		return true
  4137  	}
  4138  	// match: (MOVHUreg x:(MOVHUreg _))
  4139  	// cond:
  4140  	// result: (MOVWreg x)
  4141  	for {
  4142  		x := v.Args[0]
  4143  		if x.Op != OpMIPSMOVHUreg {
  4144  			break
  4145  		}
  4146  		v.reset(OpMIPSMOVWreg)
  4147  		v.AddArg(x)
  4148  		return true
  4149  	}
  4150  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  4151  	// cond: x.Uses == 1 && clobber(x)
  4152  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  4153  	for {
  4154  		t := v.Type
  4155  		x := v.Args[0]
  4156  		if x.Op != OpMIPSMOVHload {
  4157  			break
  4158  		}
  4159  		off := x.AuxInt
  4160  		sym := x.Aux
  4161  		ptr := x.Args[0]
  4162  		mem := x.Args[1]
  4163  		if !(x.Uses == 1 && clobber(x)) {
  4164  			break
  4165  		}
  4166  		b = x.Block
  4167  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, t)
  4168  		v.reset(OpCopy)
  4169  		v.AddArg(v0)
  4170  		v0.AuxInt = off
  4171  		v0.Aux = sym
  4172  		v0.AddArg(ptr)
  4173  		v0.AddArg(mem)
  4174  		return true
  4175  	}
  4176  	// match: (MOVHUreg (ANDconst [c] x))
  4177  	// cond:
  4178  	// result: (ANDconst [c&0xffff] x)
  4179  	for {
  4180  		v_0 := v.Args[0]
  4181  		if v_0.Op != OpMIPSANDconst {
  4182  			break
  4183  		}
  4184  		c := v_0.AuxInt
  4185  		x := v_0.Args[0]
  4186  		v.reset(OpMIPSANDconst)
  4187  		v.AuxInt = c & 0xffff
  4188  		v.AddArg(x)
  4189  		return true
  4190  	}
  4191  	// match: (MOVHUreg (MOVWconst [c]))
  4192  	// cond:
  4193  	// result: (MOVWconst [int64(uint16(c))])
  4194  	for {
  4195  		v_0 := v.Args[0]
  4196  		if v_0.Op != OpMIPSMOVWconst {
  4197  			break
  4198  		}
  4199  		c := v_0.AuxInt
  4200  		v.reset(OpMIPSMOVWconst)
  4201  		v.AuxInt = int64(uint16(c))
  4202  		return true
  4203  	}
  4204  	return false
  4205  }
  4206  func rewriteValueMIPS_OpMIPSMOVHload_0(v *Value) bool {
  4207  	// match: (MOVHload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4208  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4209  	// result: (MOVHload  [off1+off2] {sym} ptr mem)
  4210  	for {
  4211  		off1 := v.AuxInt
  4212  		sym := v.Aux
  4213  		x := v.Args[0]
  4214  		if x.Op != OpMIPSADDconst {
  4215  			break
  4216  		}
  4217  		off2 := x.AuxInt
  4218  		ptr := x.Args[0]
  4219  		mem := v.Args[1]
  4220  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4221  			break
  4222  		}
  4223  		v.reset(OpMIPSMOVHload)
  4224  		v.AuxInt = off1 + off2
  4225  		v.Aux = sym
  4226  		v.AddArg(ptr)
  4227  		v.AddArg(mem)
  4228  		return true
  4229  	}
  4230  	// match: (MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4231  	// cond: canMergeSym(sym1,sym2)
  4232  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4233  	for {
  4234  		off1 := v.AuxInt
  4235  		sym1 := v.Aux
  4236  		v_0 := v.Args[0]
  4237  		if v_0.Op != OpMIPSMOVWaddr {
  4238  			break
  4239  		}
  4240  		off2 := v_0.AuxInt
  4241  		sym2 := v_0.Aux
  4242  		ptr := v_0.Args[0]
  4243  		mem := v.Args[1]
  4244  		if !(canMergeSym(sym1, sym2)) {
  4245  			break
  4246  		}
  4247  		v.reset(OpMIPSMOVHload)
  4248  		v.AuxInt = off1 + off2
  4249  		v.Aux = mergeSym(sym1, sym2)
  4250  		v.AddArg(ptr)
  4251  		v.AddArg(mem)
  4252  		return true
  4253  	}
  4254  	// match: (MOVHload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4255  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && isSigned(x.Type)
  4256  	// result: x
  4257  	for {
  4258  		off := v.AuxInt
  4259  		sym := v.Aux
  4260  		ptr := v.Args[0]
  4261  		v_1 := v.Args[1]
  4262  		if v_1.Op != OpMIPSMOVHstore {
  4263  			break
  4264  		}
  4265  		off2 := v_1.AuxInt
  4266  		sym2 := v_1.Aux
  4267  		ptr2 := v_1.Args[0]
  4268  		x := v_1.Args[1]
  4269  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && isSigned(x.Type)) {
  4270  			break
  4271  		}
  4272  		v.reset(OpCopy)
  4273  		v.Type = x.Type
  4274  		v.AddArg(x)
  4275  		return true
  4276  	}
  4277  	return false
  4278  }
  4279  func rewriteValueMIPS_OpMIPSMOVHreg_0(v *Value) bool {
  4280  	b := v.Block
  4281  	_ = b
  4282  	// match: (MOVHreg x:(MOVBload _ _))
  4283  	// cond:
  4284  	// result: (MOVWreg x)
  4285  	for {
  4286  		x := v.Args[0]
  4287  		if x.Op != OpMIPSMOVBload {
  4288  			break
  4289  		}
  4290  		v.reset(OpMIPSMOVWreg)
  4291  		v.AddArg(x)
  4292  		return true
  4293  	}
  4294  	// match: (MOVHreg x:(MOVBUload _ _))
  4295  	// cond:
  4296  	// result: (MOVWreg x)
  4297  	for {
  4298  		x := v.Args[0]
  4299  		if x.Op != OpMIPSMOVBUload {
  4300  			break
  4301  		}
  4302  		v.reset(OpMIPSMOVWreg)
  4303  		v.AddArg(x)
  4304  		return true
  4305  	}
  4306  	// match: (MOVHreg x:(MOVHload _ _))
  4307  	// cond:
  4308  	// result: (MOVWreg x)
  4309  	for {
  4310  		x := v.Args[0]
  4311  		if x.Op != OpMIPSMOVHload {
  4312  			break
  4313  		}
  4314  		v.reset(OpMIPSMOVWreg)
  4315  		v.AddArg(x)
  4316  		return true
  4317  	}
  4318  	// match: (MOVHreg x:(MOVBreg _))
  4319  	// cond:
  4320  	// result: (MOVWreg x)
  4321  	for {
  4322  		x := v.Args[0]
  4323  		if x.Op != OpMIPSMOVBreg {
  4324  			break
  4325  		}
  4326  		v.reset(OpMIPSMOVWreg)
  4327  		v.AddArg(x)
  4328  		return true
  4329  	}
  4330  	// match: (MOVHreg x:(MOVBUreg _))
  4331  	// cond:
  4332  	// result: (MOVWreg x)
  4333  	for {
  4334  		x := v.Args[0]
  4335  		if x.Op != OpMIPSMOVBUreg {
  4336  			break
  4337  		}
  4338  		v.reset(OpMIPSMOVWreg)
  4339  		v.AddArg(x)
  4340  		return true
  4341  	}
  4342  	// match: (MOVHreg x:(MOVHreg _))
  4343  	// cond:
  4344  	// result: (MOVWreg x)
  4345  	for {
  4346  		x := v.Args[0]
  4347  		if x.Op != OpMIPSMOVHreg {
  4348  			break
  4349  		}
  4350  		v.reset(OpMIPSMOVWreg)
  4351  		v.AddArg(x)
  4352  		return true
  4353  	}
  4354  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  4355  	// cond: x.Uses == 1 && clobber(x)
  4356  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  4357  	for {
  4358  		t := v.Type
  4359  		x := v.Args[0]
  4360  		if x.Op != OpMIPSMOVHUload {
  4361  			break
  4362  		}
  4363  		off := x.AuxInt
  4364  		sym := x.Aux
  4365  		ptr := x.Args[0]
  4366  		mem := x.Args[1]
  4367  		if !(x.Uses == 1 && clobber(x)) {
  4368  			break
  4369  		}
  4370  		b = x.Block
  4371  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, t)
  4372  		v.reset(OpCopy)
  4373  		v.AddArg(v0)
  4374  		v0.AuxInt = off
  4375  		v0.Aux = sym
  4376  		v0.AddArg(ptr)
  4377  		v0.AddArg(mem)
  4378  		return true
  4379  	}
  4380  	// match: (MOVHreg (ANDconst [c] x))
  4381  	// cond: c & 0x8000 == 0
  4382  	// result: (ANDconst [c&0x7fff] x)
  4383  	for {
  4384  		v_0 := v.Args[0]
  4385  		if v_0.Op != OpMIPSANDconst {
  4386  			break
  4387  		}
  4388  		c := v_0.AuxInt
  4389  		x := v_0.Args[0]
  4390  		if !(c&0x8000 == 0) {
  4391  			break
  4392  		}
  4393  		v.reset(OpMIPSANDconst)
  4394  		v.AuxInt = c & 0x7fff
  4395  		v.AddArg(x)
  4396  		return true
  4397  	}
  4398  	// match: (MOVHreg (MOVWconst [c]))
  4399  	// cond:
  4400  	// result: (MOVWconst [int64(int16(c))])
  4401  	for {
  4402  		v_0 := v.Args[0]
  4403  		if v_0.Op != OpMIPSMOVWconst {
  4404  			break
  4405  		}
  4406  		c := v_0.AuxInt
  4407  		v.reset(OpMIPSMOVWconst)
  4408  		v.AuxInt = int64(int16(c))
  4409  		return true
  4410  	}
  4411  	return false
  4412  }
  4413  func rewriteValueMIPS_OpMIPSMOVHstore_0(v *Value) bool {
  4414  	// match: (MOVHstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4415  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4416  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4417  	for {
  4418  		off1 := v.AuxInt
  4419  		sym := v.Aux
  4420  		x := v.Args[0]
  4421  		if x.Op != OpMIPSADDconst {
  4422  			break
  4423  		}
  4424  		off2 := x.AuxInt
  4425  		ptr := x.Args[0]
  4426  		val := v.Args[1]
  4427  		mem := v.Args[2]
  4428  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4429  			break
  4430  		}
  4431  		v.reset(OpMIPSMOVHstore)
  4432  		v.AuxInt = off1 + off2
  4433  		v.Aux = sym
  4434  		v.AddArg(ptr)
  4435  		v.AddArg(val)
  4436  		v.AddArg(mem)
  4437  		return true
  4438  	}
  4439  	// match: (MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4440  	// cond: canMergeSym(sym1,sym2)
  4441  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4442  	for {
  4443  		off1 := v.AuxInt
  4444  		sym1 := v.Aux
  4445  		v_0 := v.Args[0]
  4446  		if v_0.Op != OpMIPSMOVWaddr {
  4447  			break
  4448  		}
  4449  		off2 := v_0.AuxInt
  4450  		sym2 := v_0.Aux
  4451  		ptr := v_0.Args[0]
  4452  		val := v.Args[1]
  4453  		mem := v.Args[2]
  4454  		if !(canMergeSym(sym1, sym2)) {
  4455  			break
  4456  		}
  4457  		v.reset(OpMIPSMOVHstore)
  4458  		v.AuxInt = off1 + off2
  4459  		v.Aux = mergeSym(sym1, sym2)
  4460  		v.AddArg(ptr)
  4461  		v.AddArg(val)
  4462  		v.AddArg(mem)
  4463  		return true
  4464  	}
  4465  	// match: (MOVHstore [off] {sym} ptr (MOVWconst [0]) mem)
  4466  	// cond:
  4467  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4468  	for {
  4469  		off := v.AuxInt
  4470  		sym := v.Aux
  4471  		ptr := v.Args[0]
  4472  		v_1 := v.Args[1]
  4473  		if v_1.Op != OpMIPSMOVWconst {
  4474  			break
  4475  		}
  4476  		if v_1.AuxInt != 0 {
  4477  			break
  4478  		}
  4479  		mem := v.Args[2]
  4480  		v.reset(OpMIPSMOVHstorezero)
  4481  		v.AuxInt = off
  4482  		v.Aux = sym
  4483  		v.AddArg(ptr)
  4484  		v.AddArg(mem)
  4485  		return true
  4486  	}
  4487  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4488  	// cond:
  4489  	// result: (MOVHstore [off] {sym} ptr x mem)
  4490  	for {
  4491  		off := v.AuxInt
  4492  		sym := v.Aux
  4493  		ptr := v.Args[0]
  4494  		v_1 := v.Args[1]
  4495  		if v_1.Op != OpMIPSMOVHreg {
  4496  			break
  4497  		}
  4498  		x := v_1.Args[0]
  4499  		mem := v.Args[2]
  4500  		v.reset(OpMIPSMOVHstore)
  4501  		v.AuxInt = off
  4502  		v.Aux = sym
  4503  		v.AddArg(ptr)
  4504  		v.AddArg(x)
  4505  		v.AddArg(mem)
  4506  		return true
  4507  	}
  4508  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4509  	// cond:
  4510  	// result: (MOVHstore [off] {sym} ptr x mem)
  4511  	for {
  4512  		off := v.AuxInt
  4513  		sym := v.Aux
  4514  		ptr := v.Args[0]
  4515  		v_1 := v.Args[1]
  4516  		if v_1.Op != OpMIPSMOVHUreg {
  4517  			break
  4518  		}
  4519  		x := v_1.Args[0]
  4520  		mem := v.Args[2]
  4521  		v.reset(OpMIPSMOVHstore)
  4522  		v.AuxInt = off
  4523  		v.Aux = sym
  4524  		v.AddArg(ptr)
  4525  		v.AddArg(x)
  4526  		v.AddArg(mem)
  4527  		return true
  4528  	}
  4529  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4530  	// cond:
  4531  	// result: (MOVHstore [off] {sym} ptr x mem)
  4532  	for {
  4533  		off := v.AuxInt
  4534  		sym := v.Aux
  4535  		ptr := v.Args[0]
  4536  		v_1 := v.Args[1]
  4537  		if v_1.Op != OpMIPSMOVWreg {
  4538  			break
  4539  		}
  4540  		x := v_1.Args[0]
  4541  		mem := v.Args[2]
  4542  		v.reset(OpMIPSMOVHstore)
  4543  		v.AuxInt = off
  4544  		v.Aux = sym
  4545  		v.AddArg(ptr)
  4546  		v.AddArg(x)
  4547  		v.AddArg(mem)
  4548  		return true
  4549  	}
  4550  	return false
  4551  }
  4552  func rewriteValueMIPS_OpMIPSMOVHstorezero_0(v *Value) bool {
  4553  	// match: (MOVHstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4554  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4555  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4556  	for {
  4557  		off1 := v.AuxInt
  4558  		sym := v.Aux
  4559  		x := v.Args[0]
  4560  		if x.Op != OpMIPSADDconst {
  4561  			break
  4562  		}
  4563  		off2 := x.AuxInt
  4564  		ptr := x.Args[0]
  4565  		mem := v.Args[1]
  4566  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4567  			break
  4568  		}
  4569  		v.reset(OpMIPSMOVHstorezero)
  4570  		v.AuxInt = off1 + off2
  4571  		v.Aux = sym
  4572  		v.AddArg(ptr)
  4573  		v.AddArg(mem)
  4574  		return true
  4575  	}
  4576  	// match: (MOVHstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4577  	// cond: canMergeSym(sym1,sym2)
  4578  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4579  	for {
  4580  		off1 := v.AuxInt
  4581  		sym1 := v.Aux
  4582  		v_0 := v.Args[0]
  4583  		if v_0.Op != OpMIPSMOVWaddr {
  4584  			break
  4585  		}
  4586  		off2 := v_0.AuxInt
  4587  		sym2 := v_0.Aux
  4588  		ptr := v_0.Args[0]
  4589  		mem := v.Args[1]
  4590  		if !(canMergeSym(sym1, sym2)) {
  4591  			break
  4592  		}
  4593  		v.reset(OpMIPSMOVHstorezero)
  4594  		v.AuxInt = off1 + off2
  4595  		v.Aux = mergeSym(sym1, sym2)
  4596  		v.AddArg(ptr)
  4597  		v.AddArg(mem)
  4598  		return true
  4599  	}
  4600  	return false
  4601  }
  4602  func rewriteValueMIPS_OpMIPSMOVWload_0(v *Value) bool {
  4603  	// match: (MOVWload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4604  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4605  	// result: (MOVWload  [off1+off2] {sym} ptr mem)
  4606  	for {
  4607  		off1 := v.AuxInt
  4608  		sym := v.Aux
  4609  		x := v.Args[0]
  4610  		if x.Op != OpMIPSADDconst {
  4611  			break
  4612  		}
  4613  		off2 := x.AuxInt
  4614  		ptr := x.Args[0]
  4615  		mem := v.Args[1]
  4616  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4617  			break
  4618  		}
  4619  		v.reset(OpMIPSMOVWload)
  4620  		v.AuxInt = off1 + off2
  4621  		v.Aux = sym
  4622  		v.AddArg(ptr)
  4623  		v.AddArg(mem)
  4624  		return true
  4625  	}
  4626  	// match: (MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4627  	// cond: canMergeSym(sym1,sym2)
  4628  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4629  	for {
  4630  		off1 := v.AuxInt
  4631  		sym1 := v.Aux
  4632  		v_0 := v.Args[0]
  4633  		if v_0.Op != OpMIPSMOVWaddr {
  4634  			break
  4635  		}
  4636  		off2 := v_0.AuxInt
  4637  		sym2 := v_0.Aux
  4638  		ptr := v_0.Args[0]
  4639  		mem := v.Args[1]
  4640  		if !(canMergeSym(sym1, sym2)) {
  4641  			break
  4642  		}
  4643  		v.reset(OpMIPSMOVWload)
  4644  		v.AuxInt = off1 + off2
  4645  		v.Aux = mergeSym(sym1, sym2)
  4646  		v.AddArg(ptr)
  4647  		v.AddArg(mem)
  4648  		return true
  4649  	}
  4650  	// match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
  4651  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4652  	// result: x
  4653  	for {
  4654  		off := v.AuxInt
  4655  		sym := v.Aux
  4656  		ptr := v.Args[0]
  4657  		v_1 := v.Args[1]
  4658  		if v_1.Op != OpMIPSMOVWstore {
  4659  			break
  4660  		}
  4661  		off2 := v_1.AuxInt
  4662  		sym2 := v_1.Aux
  4663  		ptr2 := v_1.Args[0]
  4664  		x := v_1.Args[1]
  4665  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4666  			break
  4667  		}
  4668  		v.reset(OpCopy)
  4669  		v.Type = x.Type
  4670  		v.AddArg(x)
  4671  		return true
  4672  	}
  4673  	return false
  4674  }
  4675  func rewriteValueMIPS_OpMIPSMOVWreg_0(v *Value) bool {
  4676  	// match: (MOVWreg x)
  4677  	// cond: x.Uses == 1
  4678  	// result: (MOVWnop x)
  4679  	for {
  4680  		x := v.Args[0]
  4681  		if !(x.Uses == 1) {
  4682  			break
  4683  		}
  4684  		v.reset(OpMIPSMOVWnop)
  4685  		v.AddArg(x)
  4686  		return true
  4687  	}
  4688  	// match: (MOVWreg (MOVWconst [c]))
  4689  	// cond:
  4690  	// result: (MOVWconst [c])
  4691  	for {
  4692  		v_0 := v.Args[0]
  4693  		if v_0.Op != OpMIPSMOVWconst {
  4694  			break
  4695  		}
  4696  		c := v_0.AuxInt
  4697  		v.reset(OpMIPSMOVWconst)
  4698  		v.AuxInt = c
  4699  		return true
  4700  	}
  4701  	return false
  4702  }
  4703  func rewriteValueMIPS_OpMIPSMOVWstore_0(v *Value) bool {
  4704  	// match: (MOVWstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4705  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4706  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  4707  	for {
  4708  		off1 := v.AuxInt
  4709  		sym := v.Aux
  4710  		x := v.Args[0]
  4711  		if x.Op != OpMIPSADDconst {
  4712  			break
  4713  		}
  4714  		off2 := x.AuxInt
  4715  		ptr := x.Args[0]
  4716  		val := v.Args[1]
  4717  		mem := v.Args[2]
  4718  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4719  			break
  4720  		}
  4721  		v.reset(OpMIPSMOVWstore)
  4722  		v.AuxInt = off1 + off2
  4723  		v.Aux = sym
  4724  		v.AddArg(ptr)
  4725  		v.AddArg(val)
  4726  		v.AddArg(mem)
  4727  		return true
  4728  	}
  4729  	// match: (MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4730  	// cond: canMergeSym(sym1,sym2)
  4731  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4732  	for {
  4733  		off1 := v.AuxInt
  4734  		sym1 := v.Aux
  4735  		v_0 := v.Args[0]
  4736  		if v_0.Op != OpMIPSMOVWaddr {
  4737  			break
  4738  		}
  4739  		off2 := v_0.AuxInt
  4740  		sym2 := v_0.Aux
  4741  		ptr := v_0.Args[0]
  4742  		val := v.Args[1]
  4743  		mem := v.Args[2]
  4744  		if !(canMergeSym(sym1, sym2)) {
  4745  			break
  4746  		}
  4747  		v.reset(OpMIPSMOVWstore)
  4748  		v.AuxInt = off1 + off2
  4749  		v.Aux = mergeSym(sym1, sym2)
  4750  		v.AddArg(ptr)
  4751  		v.AddArg(val)
  4752  		v.AddArg(mem)
  4753  		return true
  4754  	}
  4755  	// match: (MOVWstore [off] {sym} ptr (MOVWconst [0]) mem)
  4756  	// cond:
  4757  	// result: (MOVWstorezero [off] {sym} ptr mem)
  4758  	for {
  4759  		off := v.AuxInt
  4760  		sym := v.Aux
  4761  		ptr := v.Args[0]
  4762  		v_1 := v.Args[1]
  4763  		if v_1.Op != OpMIPSMOVWconst {
  4764  			break
  4765  		}
  4766  		if v_1.AuxInt != 0 {
  4767  			break
  4768  		}
  4769  		mem := v.Args[2]
  4770  		v.reset(OpMIPSMOVWstorezero)
  4771  		v.AuxInt = off
  4772  		v.Aux = sym
  4773  		v.AddArg(ptr)
  4774  		v.AddArg(mem)
  4775  		return true
  4776  	}
  4777  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4778  	// cond:
  4779  	// result: (MOVWstore [off] {sym} ptr x mem)
  4780  	for {
  4781  		off := v.AuxInt
  4782  		sym := v.Aux
  4783  		ptr := v.Args[0]
  4784  		v_1 := v.Args[1]
  4785  		if v_1.Op != OpMIPSMOVWreg {
  4786  			break
  4787  		}
  4788  		x := v_1.Args[0]
  4789  		mem := v.Args[2]
  4790  		v.reset(OpMIPSMOVWstore)
  4791  		v.AuxInt = off
  4792  		v.Aux = sym
  4793  		v.AddArg(ptr)
  4794  		v.AddArg(x)
  4795  		v.AddArg(mem)
  4796  		return true
  4797  	}
  4798  	return false
  4799  }
  4800  func rewriteValueMIPS_OpMIPSMOVWstorezero_0(v *Value) bool {
  4801  	// match: (MOVWstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4802  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4803  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  4804  	for {
  4805  		off1 := v.AuxInt
  4806  		sym := v.Aux
  4807  		x := v.Args[0]
  4808  		if x.Op != OpMIPSADDconst {
  4809  			break
  4810  		}
  4811  		off2 := x.AuxInt
  4812  		ptr := x.Args[0]
  4813  		mem := v.Args[1]
  4814  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4815  			break
  4816  		}
  4817  		v.reset(OpMIPSMOVWstorezero)
  4818  		v.AuxInt = off1 + off2
  4819  		v.Aux = sym
  4820  		v.AddArg(ptr)
  4821  		v.AddArg(mem)
  4822  		return true
  4823  	}
  4824  	// match: (MOVWstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4825  	// cond: canMergeSym(sym1,sym2)
  4826  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4827  	for {
  4828  		off1 := v.AuxInt
  4829  		sym1 := v.Aux
  4830  		v_0 := v.Args[0]
  4831  		if v_0.Op != OpMIPSMOVWaddr {
  4832  			break
  4833  		}
  4834  		off2 := v_0.AuxInt
  4835  		sym2 := v_0.Aux
  4836  		ptr := v_0.Args[0]
  4837  		mem := v.Args[1]
  4838  		if !(canMergeSym(sym1, sym2)) {
  4839  			break
  4840  		}
  4841  		v.reset(OpMIPSMOVWstorezero)
  4842  		v.AuxInt = off1 + off2
  4843  		v.Aux = mergeSym(sym1, sym2)
  4844  		v.AddArg(ptr)
  4845  		v.AddArg(mem)
  4846  		return true
  4847  	}
  4848  	return false
  4849  }
  4850  func rewriteValueMIPS_OpMIPSMUL_0(v *Value) bool {
  4851  	// match: (MUL (MOVWconst [0]) _)
  4852  	// cond:
  4853  	// result: (MOVWconst [0])
  4854  	for {
  4855  		v_0 := v.Args[0]
  4856  		if v_0.Op != OpMIPSMOVWconst {
  4857  			break
  4858  		}
  4859  		if v_0.AuxInt != 0 {
  4860  			break
  4861  		}
  4862  		v.reset(OpMIPSMOVWconst)
  4863  		v.AuxInt = 0
  4864  		return true
  4865  	}
  4866  	// match: (MUL _ (MOVWconst [0]))
  4867  	// cond:
  4868  	// result: (MOVWconst [0])
  4869  	for {
  4870  		v_1 := v.Args[1]
  4871  		if v_1.Op != OpMIPSMOVWconst {
  4872  			break
  4873  		}
  4874  		if v_1.AuxInt != 0 {
  4875  			break
  4876  		}
  4877  		v.reset(OpMIPSMOVWconst)
  4878  		v.AuxInt = 0
  4879  		return true
  4880  	}
  4881  	// match: (MUL (MOVWconst [1]) x)
  4882  	// cond:
  4883  	// result: x
  4884  	for {
  4885  		v_0 := v.Args[0]
  4886  		if v_0.Op != OpMIPSMOVWconst {
  4887  			break
  4888  		}
  4889  		if v_0.AuxInt != 1 {
  4890  			break
  4891  		}
  4892  		x := v.Args[1]
  4893  		v.reset(OpCopy)
  4894  		v.Type = x.Type
  4895  		v.AddArg(x)
  4896  		return true
  4897  	}
  4898  	// match: (MUL x (MOVWconst [1]))
  4899  	// cond:
  4900  	// result: x
  4901  	for {
  4902  		x := v.Args[0]
  4903  		v_1 := v.Args[1]
  4904  		if v_1.Op != OpMIPSMOVWconst {
  4905  			break
  4906  		}
  4907  		if v_1.AuxInt != 1 {
  4908  			break
  4909  		}
  4910  		v.reset(OpCopy)
  4911  		v.Type = x.Type
  4912  		v.AddArg(x)
  4913  		return true
  4914  	}
  4915  	// match: (MUL (MOVWconst [-1]) x)
  4916  	// cond:
  4917  	// result: (NEG x)
  4918  	for {
  4919  		v_0 := v.Args[0]
  4920  		if v_0.Op != OpMIPSMOVWconst {
  4921  			break
  4922  		}
  4923  		if v_0.AuxInt != -1 {
  4924  			break
  4925  		}
  4926  		x := v.Args[1]
  4927  		v.reset(OpMIPSNEG)
  4928  		v.AddArg(x)
  4929  		return true
  4930  	}
  4931  	// match: (MUL x (MOVWconst [-1]))
  4932  	// cond:
  4933  	// result: (NEG x)
  4934  	for {
  4935  		x := v.Args[0]
  4936  		v_1 := v.Args[1]
  4937  		if v_1.Op != OpMIPSMOVWconst {
  4938  			break
  4939  		}
  4940  		if v_1.AuxInt != -1 {
  4941  			break
  4942  		}
  4943  		v.reset(OpMIPSNEG)
  4944  		v.AddArg(x)
  4945  		return true
  4946  	}
  4947  	// match: (MUL (MOVWconst [c]) x)
  4948  	// cond: isPowerOfTwo(int64(uint32(c)))
  4949  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  4950  	for {
  4951  		v_0 := v.Args[0]
  4952  		if v_0.Op != OpMIPSMOVWconst {
  4953  			break
  4954  		}
  4955  		c := v_0.AuxInt
  4956  		x := v.Args[1]
  4957  		if !(isPowerOfTwo(int64(uint32(c)))) {
  4958  			break
  4959  		}
  4960  		v.reset(OpMIPSSLLconst)
  4961  		v.AuxInt = log2(int64(uint32(c)))
  4962  		v.AddArg(x)
  4963  		return true
  4964  	}
  4965  	// match: (MUL x (MOVWconst [c]))
  4966  	// cond: isPowerOfTwo(int64(uint32(c)))
  4967  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  4968  	for {
  4969  		x := v.Args[0]
  4970  		v_1 := v.Args[1]
  4971  		if v_1.Op != OpMIPSMOVWconst {
  4972  			break
  4973  		}
  4974  		c := v_1.AuxInt
  4975  		if !(isPowerOfTwo(int64(uint32(c)))) {
  4976  			break
  4977  		}
  4978  		v.reset(OpMIPSSLLconst)
  4979  		v.AuxInt = log2(int64(uint32(c)))
  4980  		v.AddArg(x)
  4981  		return true
  4982  	}
  4983  	// match: (MUL (MOVWconst [c]) (MOVWconst [d]))
  4984  	// cond:
  4985  	// result: (MOVWconst [int64(int32(c)*int32(d))])
  4986  	for {
  4987  		v_0 := v.Args[0]
  4988  		if v_0.Op != OpMIPSMOVWconst {
  4989  			break
  4990  		}
  4991  		c := v_0.AuxInt
  4992  		v_1 := v.Args[1]
  4993  		if v_1.Op != OpMIPSMOVWconst {
  4994  			break
  4995  		}
  4996  		d := v_1.AuxInt
  4997  		v.reset(OpMIPSMOVWconst)
  4998  		v.AuxInt = int64(int32(c) * int32(d))
  4999  		return true
  5000  	}
  5001  	// match: (MUL (MOVWconst [d]) (MOVWconst [c]))
  5002  	// cond:
  5003  	// result: (MOVWconst [int64(int32(c)*int32(d))])
  5004  	for {
  5005  		v_0 := v.Args[0]
  5006  		if v_0.Op != OpMIPSMOVWconst {
  5007  			break
  5008  		}
  5009  		d := v_0.AuxInt
  5010  		v_1 := v.Args[1]
  5011  		if v_1.Op != OpMIPSMOVWconst {
  5012  			break
  5013  		}
  5014  		c := v_1.AuxInt
  5015  		v.reset(OpMIPSMOVWconst)
  5016  		v.AuxInt = int64(int32(c) * int32(d))
  5017  		return true
  5018  	}
  5019  	return false
  5020  }
  5021  func rewriteValueMIPS_OpMIPSNEG_0(v *Value) bool {
  5022  	// match: (NEG (MOVWconst [c]))
  5023  	// cond:
  5024  	// result: (MOVWconst [int64(int32(-c))])
  5025  	for {
  5026  		v_0 := v.Args[0]
  5027  		if v_0.Op != OpMIPSMOVWconst {
  5028  			break
  5029  		}
  5030  		c := v_0.AuxInt
  5031  		v.reset(OpMIPSMOVWconst)
  5032  		v.AuxInt = int64(int32(-c))
  5033  		return true
  5034  	}
  5035  	return false
  5036  }
  5037  func rewriteValueMIPS_OpMIPSNOR_0(v *Value) bool {
  5038  	// match: (NOR x (MOVWconst [c]))
  5039  	// cond:
  5040  	// result: (NORconst [c] x)
  5041  	for {
  5042  		x := v.Args[0]
  5043  		v_1 := v.Args[1]
  5044  		if v_1.Op != OpMIPSMOVWconst {
  5045  			break
  5046  		}
  5047  		c := v_1.AuxInt
  5048  		v.reset(OpMIPSNORconst)
  5049  		v.AuxInt = c
  5050  		v.AddArg(x)
  5051  		return true
  5052  	}
  5053  	// match: (NOR (MOVWconst [c]) x)
  5054  	// cond:
  5055  	// result: (NORconst [c] x)
  5056  	for {
  5057  		v_0 := v.Args[0]
  5058  		if v_0.Op != OpMIPSMOVWconst {
  5059  			break
  5060  		}
  5061  		c := v_0.AuxInt
  5062  		x := v.Args[1]
  5063  		v.reset(OpMIPSNORconst)
  5064  		v.AuxInt = c
  5065  		v.AddArg(x)
  5066  		return true
  5067  	}
  5068  	return false
  5069  }
  5070  func rewriteValueMIPS_OpMIPSNORconst_0(v *Value) bool {
  5071  	// match: (NORconst [c] (MOVWconst [d]))
  5072  	// cond:
  5073  	// result: (MOVWconst [^(c|d)])
  5074  	for {
  5075  		c := v.AuxInt
  5076  		v_0 := v.Args[0]
  5077  		if v_0.Op != OpMIPSMOVWconst {
  5078  			break
  5079  		}
  5080  		d := v_0.AuxInt
  5081  		v.reset(OpMIPSMOVWconst)
  5082  		v.AuxInt = ^(c | d)
  5083  		return true
  5084  	}
  5085  	return false
  5086  }
  5087  func rewriteValueMIPS_OpMIPSOR_0(v *Value) bool {
  5088  	b := v.Block
  5089  	_ = b
  5090  	// match: (OR x (MOVWconst [c]))
  5091  	// cond:
  5092  	// result: (ORconst  [c] x)
  5093  	for {
  5094  		x := v.Args[0]
  5095  		v_1 := v.Args[1]
  5096  		if v_1.Op != OpMIPSMOVWconst {
  5097  			break
  5098  		}
  5099  		c := v_1.AuxInt
  5100  		v.reset(OpMIPSORconst)
  5101  		v.AuxInt = c
  5102  		v.AddArg(x)
  5103  		return true
  5104  	}
  5105  	// match: (OR (MOVWconst [c]) x)
  5106  	// cond:
  5107  	// result: (ORconst  [c] x)
  5108  	for {
  5109  		v_0 := v.Args[0]
  5110  		if v_0.Op != OpMIPSMOVWconst {
  5111  			break
  5112  		}
  5113  		c := v_0.AuxInt
  5114  		x := v.Args[1]
  5115  		v.reset(OpMIPSORconst)
  5116  		v.AuxInt = c
  5117  		v.AddArg(x)
  5118  		return true
  5119  	}
  5120  	// match: (OR x x)
  5121  	// cond:
  5122  	// result: x
  5123  	for {
  5124  		x := v.Args[0]
  5125  		if x != v.Args[1] {
  5126  			break
  5127  		}
  5128  		v.reset(OpCopy)
  5129  		v.Type = x.Type
  5130  		v.AddArg(x)
  5131  		return true
  5132  	}
  5133  	// match: (OR (SGTUzero x) (SGTUzero y))
  5134  	// cond:
  5135  	// result: (SGTUzero (OR <x.Type> x y))
  5136  	for {
  5137  		v_0 := v.Args[0]
  5138  		if v_0.Op != OpMIPSSGTUzero {
  5139  			break
  5140  		}
  5141  		x := v_0.Args[0]
  5142  		v_1 := v.Args[1]
  5143  		if v_1.Op != OpMIPSSGTUzero {
  5144  			break
  5145  		}
  5146  		y := v_1.Args[0]
  5147  		v.reset(OpMIPSSGTUzero)
  5148  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5149  		v0.AddArg(x)
  5150  		v0.AddArg(y)
  5151  		v.AddArg(v0)
  5152  		return true
  5153  	}
  5154  	// match: (OR (SGTUzero y) (SGTUzero x))
  5155  	// cond:
  5156  	// result: (SGTUzero (OR <x.Type> x y))
  5157  	for {
  5158  		v_0 := v.Args[0]
  5159  		if v_0.Op != OpMIPSSGTUzero {
  5160  			break
  5161  		}
  5162  		y := v_0.Args[0]
  5163  		v_1 := v.Args[1]
  5164  		if v_1.Op != OpMIPSSGTUzero {
  5165  			break
  5166  		}
  5167  		x := v_1.Args[0]
  5168  		v.reset(OpMIPSSGTUzero)
  5169  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5170  		v0.AddArg(x)
  5171  		v0.AddArg(y)
  5172  		v.AddArg(v0)
  5173  		return true
  5174  	}
  5175  	return false
  5176  }
  5177  func rewriteValueMIPS_OpMIPSORconst_0(v *Value) bool {
  5178  	// match: (ORconst [0] x)
  5179  	// cond:
  5180  	// result: x
  5181  	for {
  5182  		if v.AuxInt != 0 {
  5183  			break
  5184  		}
  5185  		x := v.Args[0]
  5186  		v.reset(OpCopy)
  5187  		v.Type = x.Type
  5188  		v.AddArg(x)
  5189  		return true
  5190  	}
  5191  	// match: (ORconst [-1] _)
  5192  	// cond:
  5193  	// result: (MOVWconst [-1])
  5194  	for {
  5195  		if v.AuxInt != -1 {
  5196  			break
  5197  		}
  5198  		v.reset(OpMIPSMOVWconst)
  5199  		v.AuxInt = -1
  5200  		return true
  5201  	}
  5202  	// match: (ORconst [c] (MOVWconst [d]))
  5203  	// cond:
  5204  	// result: (MOVWconst [c|d])
  5205  	for {
  5206  		c := v.AuxInt
  5207  		v_0 := v.Args[0]
  5208  		if v_0.Op != OpMIPSMOVWconst {
  5209  			break
  5210  		}
  5211  		d := v_0.AuxInt
  5212  		v.reset(OpMIPSMOVWconst)
  5213  		v.AuxInt = c | d
  5214  		return true
  5215  	}
  5216  	// match: (ORconst [c] (ORconst [d] x))
  5217  	// cond:
  5218  	// result: (ORconst [c|d] x)
  5219  	for {
  5220  		c := v.AuxInt
  5221  		v_0 := v.Args[0]
  5222  		if v_0.Op != OpMIPSORconst {
  5223  			break
  5224  		}
  5225  		d := v_0.AuxInt
  5226  		x := v_0.Args[0]
  5227  		v.reset(OpMIPSORconst)
  5228  		v.AuxInt = c | d
  5229  		v.AddArg(x)
  5230  		return true
  5231  	}
  5232  	return false
  5233  }
  5234  func rewriteValueMIPS_OpMIPSSGT_0(v *Value) bool {
  5235  	// match: (SGT (MOVWconst [c]) x)
  5236  	// cond:
  5237  	// result: (SGTconst  [c] x)
  5238  	for {
  5239  		v_0 := v.Args[0]
  5240  		if v_0.Op != OpMIPSMOVWconst {
  5241  			break
  5242  		}
  5243  		c := v_0.AuxInt
  5244  		x := v.Args[1]
  5245  		v.reset(OpMIPSSGTconst)
  5246  		v.AuxInt = c
  5247  		v.AddArg(x)
  5248  		return true
  5249  	}
  5250  	// match: (SGT x (MOVWconst [0]))
  5251  	// cond:
  5252  	// result: (SGTzero x)
  5253  	for {
  5254  		x := v.Args[0]
  5255  		v_1 := v.Args[1]
  5256  		if v_1.Op != OpMIPSMOVWconst {
  5257  			break
  5258  		}
  5259  		if v_1.AuxInt != 0 {
  5260  			break
  5261  		}
  5262  		v.reset(OpMIPSSGTzero)
  5263  		v.AddArg(x)
  5264  		return true
  5265  	}
  5266  	return false
  5267  }
  5268  func rewriteValueMIPS_OpMIPSSGTU_0(v *Value) bool {
  5269  	// match: (SGTU (MOVWconst [c]) x)
  5270  	// cond:
  5271  	// result: (SGTUconst [c] x)
  5272  	for {
  5273  		v_0 := v.Args[0]
  5274  		if v_0.Op != OpMIPSMOVWconst {
  5275  			break
  5276  		}
  5277  		c := v_0.AuxInt
  5278  		x := v.Args[1]
  5279  		v.reset(OpMIPSSGTUconst)
  5280  		v.AuxInt = c
  5281  		v.AddArg(x)
  5282  		return true
  5283  	}
  5284  	// match: (SGTU x (MOVWconst [0]))
  5285  	// cond:
  5286  	// result: (SGTUzero x)
  5287  	for {
  5288  		x := v.Args[0]
  5289  		v_1 := v.Args[1]
  5290  		if v_1.Op != OpMIPSMOVWconst {
  5291  			break
  5292  		}
  5293  		if v_1.AuxInt != 0 {
  5294  			break
  5295  		}
  5296  		v.reset(OpMIPSSGTUzero)
  5297  		v.AddArg(x)
  5298  		return true
  5299  	}
  5300  	return false
  5301  }
  5302  func rewriteValueMIPS_OpMIPSSGTUconst_0(v *Value) bool {
  5303  	// match: (SGTUconst [c] (MOVWconst [d]))
  5304  	// cond: uint32(c)>uint32(d)
  5305  	// result: (MOVWconst [1])
  5306  	for {
  5307  		c := v.AuxInt
  5308  		v_0 := v.Args[0]
  5309  		if v_0.Op != OpMIPSMOVWconst {
  5310  			break
  5311  		}
  5312  		d := v_0.AuxInt
  5313  		if !(uint32(c) > uint32(d)) {
  5314  			break
  5315  		}
  5316  		v.reset(OpMIPSMOVWconst)
  5317  		v.AuxInt = 1
  5318  		return true
  5319  	}
  5320  	// match: (SGTUconst [c] (MOVWconst [d]))
  5321  	// cond: uint32(c)<=uint32(d)
  5322  	// result: (MOVWconst [0])
  5323  	for {
  5324  		c := v.AuxInt
  5325  		v_0 := v.Args[0]
  5326  		if v_0.Op != OpMIPSMOVWconst {
  5327  			break
  5328  		}
  5329  		d := v_0.AuxInt
  5330  		if !(uint32(c) <= uint32(d)) {
  5331  			break
  5332  		}
  5333  		v.reset(OpMIPSMOVWconst)
  5334  		v.AuxInt = 0
  5335  		return true
  5336  	}
  5337  	// match: (SGTUconst [c] (MOVBUreg _))
  5338  	// cond: 0xff < uint32(c)
  5339  	// result: (MOVWconst [1])
  5340  	for {
  5341  		c := v.AuxInt
  5342  		v_0 := v.Args[0]
  5343  		if v_0.Op != OpMIPSMOVBUreg {
  5344  			break
  5345  		}
  5346  		if !(0xff < uint32(c)) {
  5347  			break
  5348  		}
  5349  		v.reset(OpMIPSMOVWconst)
  5350  		v.AuxInt = 1
  5351  		return true
  5352  	}
  5353  	// match: (SGTUconst [c] (MOVHUreg _))
  5354  	// cond: 0xffff < uint32(c)
  5355  	// result: (MOVWconst [1])
  5356  	for {
  5357  		c := v.AuxInt
  5358  		v_0 := v.Args[0]
  5359  		if v_0.Op != OpMIPSMOVHUreg {
  5360  			break
  5361  		}
  5362  		if !(0xffff < uint32(c)) {
  5363  			break
  5364  		}
  5365  		v.reset(OpMIPSMOVWconst)
  5366  		v.AuxInt = 1
  5367  		return true
  5368  	}
  5369  	// match: (SGTUconst [c] (ANDconst [m] _))
  5370  	// cond: uint32(m) < uint32(c)
  5371  	// result: (MOVWconst [1])
  5372  	for {
  5373  		c := v.AuxInt
  5374  		v_0 := v.Args[0]
  5375  		if v_0.Op != OpMIPSANDconst {
  5376  			break
  5377  		}
  5378  		m := v_0.AuxInt
  5379  		if !(uint32(m) < uint32(c)) {
  5380  			break
  5381  		}
  5382  		v.reset(OpMIPSMOVWconst)
  5383  		v.AuxInt = 1
  5384  		return true
  5385  	}
  5386  	// match: (SGTUconst [c] (SRLconst _ [d]))
  5387  	// cond: uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)
  5388  	// result: (MOVWconst [1])
  5389  	for {
  5390  		c := v.AuxInt
  5391  		v_0 := v.Args[0]
  5392  		if v_0.Op != OpMIPSSRLconst {
  5393  			break
  5394  		}
  5395  		d := v_0.AuxInt
  5396  		if !(uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)) {
  5397  			break
  5398  		}
  5399  		v.reset(OpMIPSMOVWconst)
  5400  		v.AuxInt = 1
  5401  		return true
  5402  	}
  5403  	return false
  5404  }
  5405  func rewriteValueMIPS_OpMIPSSGTUzero_0(v *Value) bool {
  5406  	// match: (SGTUzero (MOVWconst [d]))
  5407  	// cond: uint32(d) != 0
  5408  	// result: (MOVWconst [1])
  5409  	for {
  5410  		v_0 := v.Args[0]
  5411  		if v_0.Op != OpMIPSMOVWconst {
  5412  			break
  5413  		}
  5414  		d := v_0.AuxInt
  5415  		if !(uint32(d) != 0) {
  5416  			break
  5417  		}
  5418  		v.reset(OpMIPSMOVWconst)
  5419  		v.AuxInt = 1
  5420  		return true
  5421  	}
  5422  	// match: (SGTUzero (MOVWconst [d]))
  5423  	// cond: uint32(d) == 0
  5424  	// result: (MOVWconst [0])
  5425  	for {
  5426  		v_0 := v.Args[0]
  5427  		if v_0.Op != OpMIPSMOVWconst {
  5428  			break
  5429  		}
  5430  		d := v_0.AuxInt
  5431  		if !(uint32(d) == 0) {
  5432  			break
  5433  		}
  5434  		v.reset(OpMIPSMOVWconst)
  5435  		v.AuxInt = 0
  5436  		return true
  5437  	}
  5438  	return false
  5439  }
  5440  func rewriteValueMIPS_OpMIPSSGTconst_0(v *Value) bool {
  5441  	// match: (SGTconst [c] (MOVWconst [d]))
  5442  	// cond: int32(c) > int32(d)
  5443  	// result: (MOVWconst [1])
  5444  	for {
  5445  		c := v.AuxInt
  5446  		v_0 := v.Args[0]
  5447  		if v_0.Op != OpMIPSMOVWconst {
  5448  			break
  5449  		}
  5450  		d := v_0.AuxInt
  5451  		if !(int32(c) > int32(d)) {
  5452  			break
  5453  		}
  5454  		v.reset(OpMIPSMOVWconst)
  5455  		v.AuxInt = 1
  5456  		return true
  5457  	}
  5458  	// match: (SGTconst [c] (MOVWconst [d]))
  5459  	// cond: int32(c) <= int32(d)
  5460  	// result: (MOVWconst [0])
  5461  	for {
  5462  		c := v.AuxInt
  5463  		v_0 := v.Args[0]
  5464  		if v_0.Op != OpMIPSMOVWconst {
  5465  			break
  5466  		}
  5467  		d := v_0.AuxInt
  5468  		if !(int32(c) <= int32(d)) {
  5469  			break
  5470  		}
  5471  		v.reset(OpMIPSMOVWconst)
  5472  		v.AuxInt = 0
  5473  		return true
  5474  	}
  5475  	// match: (SGTconst [c] (MOVBreg _))
  5476  	// cond: 0x7f < int32(c)
  5477  	// result: (MOVWconst [1])
  5478  	for {
  5479  		c := v.AuxInt
  5480  		v_0 := v.Args[0]
  5481  		if v_0.Op != OpMIPSMOVBreg {
  5482  			break
  5483  		}
  5484  		if !(0x7f < int32(c)) {
  5485  			break
  5486  		}
  5487  		v.reset(OpMIPSMOVWconst)
  5488  		v.AuxInt = 1
  5489  		return true
  5490  	}
  5491  	// match: (SGTconst [c] (MOVBreg _))
  5492  	// cond: int32(c) <= -0x80
  5493  	// result: (MOVWconst [0])
  5494  	for {
  5495  		c := v.AuxInt
  5496  		v_0 := v.Args[0]
  5497  		if v_0.Op != OpMIPSMOVBreg {
  5498  			break
  5499  		}
  5500  		if !(int32(c) <= -0x80) {
  5501  			break
  5502  		}
  5503  		v.reset(OpMIPSMOVWconst)
  5504  		v.AuxInt = 0
  5505  		return true
  5506  	}
  5507  	// match: (SGTconst [c] (MOVBUreg _))
  5508  	// cond: 0xff < int32(c)
  5509  	// result: (MOVWconst [1])
  5510  	for {
  5511  		c := v.AuxInt
  5512  		v_0 := v.Args[0]
  5513  		if v_0.Op != OpMIPSMOVBUreg {
  5514  			break
  5515  		}
  5516  		if !(0xff < int32(c)) {
  5517  			break
  5518  		}
  5519  		v.reset(OpMIPSMOVWconst)
  5520  		v.AuxInt = 1
  5521  		return true
  5522  	}
  5523  	// match: (SGTconst [c] (MOVBUreg _))
  5524  	// cond: int32(c) < 0
  5525  	// result: (MOVWconst [0])
  5526  	for {
  5527  		c := v.AuxInt
  5528  		v_0 := v.Args[0]
  5529  		if v_0.Op != OpMIPSMOVBUreg {
  5530  			break
  5531  		}
  5532  		if !(int32(c) < 0) {
  5533  			break
  5534  		}
  5535  		v.reset(OpMIPSMOVWconst)
  5536  		v.AuxInt = 0
  5537  		return true
  5538  	}
  5539  	// match: (SGTconst [c] (MOVHreg _))
  5540  	// cond: 0x7fff < int32(c)
  5541  	// result: (MOVWconst [1])
  5542  	for {
  5543  		c := v.AuxInt
  5544  		v_0 := v.Args[0]
  5545  		if v_0.Op != OpMIPSMOVHreg {
  5546  			break
  5547  		}
  5548  		if !(0x7fff < int32(c)) {
  5549  			break
  5550  		}
  5551  		v.reset(OpMIPSMOVWconst)
  5552  		v.AuxInt = 1
  5553  		return true
  5554  	}
  5555  	// match: (SGTconst [c] (MOVHreg _))
  5556  	// cond: int32(c) <= -0x8000
  5557  	// result: (MOVWconst [0])
  5558  	for {
  5559  		c := v.AuxInt
  5560  		v_0 := v.Args[0]
  5561  		if v_0.Op != OpMIPSMOVHreg {
  5562  			break
  5563  		}
  5564  		if !(int32(c) <= -0x8000) {
  5565  			break
  5566  		}
  5567  		v.reset(OpMIPSMOVWconst)
  5568  		v.AuxInt = 0
  5569  		return true
  5570  	}
  5571  	// match: (SGTconst [c] (MOVHUreg _))
  5572  	// cond: 0xffff < int32(c)
  5573  	// result: (MOVWconst [1])
  5574  	for {
  5575  		c := v.AuxInt
  5576  		v_0 := v.Args[0]
  5577  		if v_0.Op != OpMIPSMOVHUreg {
  5578  			break
  5579  		}
  5580  		if !(0xffff < int32(c)) {
  5581  			break
  5582  		}
  5583  		v.reset(OpMIPSMOVWconst)
  5584  		v.AuxInt = 1
  5585  		return true
  5586  	}
  5587  	// match: (SGTconst [c] (MOVHUreg _))
  5588  	// cond: int32(c) < 0
  5589  	// result: (MOVWconst [0])
  5590  	for {
  5591  		c := v.AuxInt
  5592  		v_0 := v.Args[0]
  5593  		if v_0.Op != OpMIPSMOVHUreg {
  5594  			break
  5595  		}
  5596  		if !(int32(c) < 0) {
  5597  			break
  5598  		}
  5599  		v.reset(OpMIPSMOVWconst)
  5600  		v.AuxInt = 0
  5601  		return true
  5602  	}
  5603  	return false
  5604  }
  5605  func rewriteValueMIPS_OpMIPSSGTconst_10(v *Value) bool {
  5606  	// match: (SGTconst [c] (ANDconst [m] _))
  5607  	// cond: 0 <= int32(m) && int32(m) < int32(c)
  5608  	// result: (MOVWconst [1])
  5609  	for {
  5610  		c := v.AuxInt
  5611  		v_0 := v.Args[0]
  5612  		if v_0.Op != OpMIPSANDconst {
  5613  			break
  5614  		}
  5615  		m := v_0.AuxInt
  5616  		if !(0 <= int32(m) && int32(m) < int32(c)) {
  5617  			break
  5618  		}
  5619  		v.reset(OpMIPSMOVWconst)
  5620  		v.AuxInt = 1
  5621  		return true
  5622  	}
  5623  	// match: (SGTconst [c] (SRLconst _ [d]))
  5624  	// cond: 0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)
  5625  	// result: (MOVWconst [1])
  5626  	for {
  5627  		c := v.AuxInt
  5628  		v_0 := v.Args[0]
  5629  		if v_0.Op != OpMIPSSRLconst {
  5630  			break
  5631  		}
  5632  		d := v_0.AuxInt
  5633  		if !(0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)) {
  5634  			break
  5635  		}
  5636  		v.reset(OpMIPSMOVWconst)
  5637  		v.AuxInt = 1
  5638  		return true
  5639  	}
  5640  	return false
  5641  }
  5642  func rewriteValueMIPS_OpMIPSSGTzero_0(v *Value) bool {
  5643  	// match: (SGTzero (MOVWconst [d]))
  5644  	// cond: int32(d) > 0
  5645  	// result: (MOVWconst [1])
  5646  	for {
  5647  		v_0 := v.Args[0]
  5648  		if v_0.Op != OpMIPSMOVWconst {
  5649  			break
  5650  		}
  5651  		d := v_0.AuxInt
  5652  		if !(int32(d) > 0) {
  5653  			break
  5654  		}
  5655  		v.reset(OpMIPSMOVWconst)
  5656  		v.AuxInt = 1
  5657  		return true
  5658  	}
  5659  	// match: (SGTzero (MOVWconst [d]))
  5660  	// cond: int32(d) <= 0
  5661  	// result: (MOVWconst [0])
  5662  	for {
  5663  		v_0 := v.Args[0]
  5664  		if v_0.Op != OpMIPSMOVWconst {
  5665  			break
  5666  		}
  5667  		d := v_0.AuxInt
  5668  		if !(int32(d) <= 0) {
  5669  			break
  5670  		}
  5671  		v.reset(OpMIPSMOVWconst)
  5672  		v.AuxInt = 0
  5673  		return true
  5674  	}
  5675  	return false
  5676  }
  5677  func rewriteValueMIPS_OpMIPSSLL_0(v *Value) bool {
  5678  	// match: (SLL _ (MOVWconst [c]))
  5679  	// cond: uint32(c)>=32
  5680  	// result: (MOVWconst [0])
  5681  	for {
  5682  		v_1 := v.Args[1]
  5683  		if v_1.Op != OpMIPSMOVWconst {
  5684  			break
  5685  		}
  5686  		c := v_1.AuxInt
  5687  		if !(uint32(c) >= 32) {
  5688  			break
  5689  		}
  5690  		v.reset(OpMIPSMOVWconst)
  5691  		v.AuxInt = 0
  5692  		return true
  5693  	}
  5694  	// match: (SLL x (MOVWconst [c]))
  5695  	// cond:
  5696  	// result: (SLLconst x [c])
  5697  	for {
  5698  		x := v.Args[0]
  5699  		v_1 := v.Args[1]
  5700  		if v_1.Op != OpMIPSMOVWconst {
  5701  			break
  5702  		}
  5703  		c := v_1.AuxInt
  5704  		v.reset(OpMIPSSLLconst)
  5705  		v.AuxInt = c
  5706  		v.AddArg(x)
  5707  		return true
  5708  	}
  5709  	return false
  5710  }
  5711  func rewriteValueMIPS_OpMIPSSLLconst_0(v *Value) bool {
  5712  	// match: (SLLconst [c] (MOVWconst [d]))
  5713  	// cond:
  5714  	// result: (MOVWconst [int64(int32(uint32(d)<<uint32(c)))])
  5715  	for {
  5716  		c := v.AuxInt
  5717  		v_0 := v.Args[0]
  5718  		if v_0.Op != OpMIPSMOVWconst {
  5719  			break
  5720  		}
  5721  		d := v_0.AuxInt
  5722  		v.reset(OpMIPSMOVWconst)
  5723  		v.AuxInt = int64(int32(uint32(d) << uint32(c)))
  5724  		return true
  5725  	}
  5726  	return false
  5727  }
  5728  func rewriteValueMIPS_OpMIPSSRA_0(v *Value) bool {
  5729  	// match: (SRA x (MOVWconst [c]))
  5730  	// cond: uint32(c)>=32
  5731  	// result: (SRAconst x [31])
  5732  	for {
  5733  		x := v.Args[0]
  5734  		v_1 := v.Args[1]
  5735  		if v_1.Op != OpMIPSMOVWconst {
  5736  			break
  5737  		}
  5738  		c := v_1.AuxInt
  5739  		if !(uint32(c) >= 32) {
  5740  			break
  5741  		}
  5742  		v.reset(OpMIPSSRAconst)
  5743  		v.AuxInt = 31
  5744  		v.AddArg(x)
  5745  		return true
  5746  	}
  5747  	// match: (SRA x (MOVWconst [c]))
  5748  	// cond:
  5749  	// result: (SRAconst x [c])
  5750  	for {
  5751  		x := v.Args[0]
  5752  		v_1 := v.Args[1]
  5753  		if v_1.Op != OpMIPSMOVWconst {
  5754  			break
  5755  		}
  5756  		c := v_1.AuxInt
  5757  		v.reset(OpMIPSSRAconst)
  5758  		v.AuxInt = c
  5759  		v.AddArg(x)
  5760  		return true
  5761  	}
  5762  	return false
  5763  }
  5764  func rewriteValueMIPS_OpMIPSSRAconst_0(v *Value) bool {
  5765  	// match: (SRAconst [c] (MOVWconst [d]))
  5766  	// cond:
  5767  	// result: (MOVWconst [int64(int32(d)>>uint32(c))])
  5768  	for {
  5769  		c := v.AuxInt
  5770  		v_0 := v.Args[0]
  5771  		if v_0.Op != OpMIPSMOVWconst {
  5772  			break
  5773  		}
  5774  		d := v_0.AuxInt
  5775  		v.reset(OpMIPSMOVWconst)
  5776  		v.AuxInt = int64(int32(d) >> uint32(c))
  5777  		return true
  5778  	}
  5779  	return false
  5780  }
  5781  func rewriteValueMIPS_OpMIPSSRL_0(v *Value) bool {
  5782  	// match: (SRL _ (MOVWconst [c]))
  5783  	// cond: uint32(c)>=32
  5784  	// result: (MOVWconst [0])
  5785  	for {
  5786  		v_1 := v.Args[1]
  5787  		if v_1.Op != OpMIPSMOVWconst {
  5788  			break
  5789  		}
  5790  		c := v_1.AuxInt
  5791  		if !(uint32(c) >= 32) {
  5792  			break
  5793  		}
  5794  		v.reset(OpMIPSMOVWconst)
  5795  		v.AuxInt = 0
  5796  		return true
  5797  	}
  5798  	// match: (SRL x (MOVWconst [c]))
  5799  	// cond:
  5800  	// result: (SRLconst x [c])
  5801  	for {
  5802  		x := v.Args[0]
  5803  		v_1 := v.Args[1]
  5804  		if v_1.Op != OpMIPSMOVWconst {
  5805  			break
  5806  		}
  5807  		c := v_1.AuxInt
  5808  		v.reset(OpMIPSSRLconst)
  5809  		v.AuxInt = c
  5810  		v.AddArg(x)
  5811  		return true
  5812  	}
  5813  	return false
  5814  }
  5815  func rewriteValueMIPS_OpMIPSSRLconst_0(v *Value) bool {
  5816  	// match: (SRLconst [c] (MOVWconst [d]))
  5817  	// cond:
  5818  	// result: (MOVWconst [int64(uint32(d)>>uint32(c))])
  5819  	for {
  5820  		c := v.AuxInt
  5821  		v_0 := v.Args[0]
  5822  		if v_0.Op != OpMIPSMOVWconst {
  5823  			break
  5824  		}
  5825  		d := v_0.AuxInt
  5826  		v.reset(OpMIPSMOVWconst)
  5827  		v.AuxInt = int64(uint32(d) >> uint32(c))
  5828  		return true
  5829  	}
  5830  	return false
  5831  }
  5832  func rewriteValueMIPS_OpMIPSSUB_0(v *Value) bool {
  5833  	// match: (SUB x (MOVWconst [c]))
  5834  	// cond:
  5835  	// result: (SUBconst [c] x)
  5836  	for {
  5837  		x := v.Args[0]
  5838  		v_1 := v.Args[1]
  5839  		if v_1.Op != OpMIPSMOVWconst {
  5840  			break
  5841  		}
  5842  		c := v_1.AuxInt
  5843  		v.reset(OpMIPSSUBconst)
  5844  		v.AuxInt = c
  5845  		v.AddArg(x)
  5846  		return true
  5847  	}
  5848  	// match: (SUB x x)
  5849  	// cond:
  5850  	// result: (MOVWconst [0])
  5851  	for {
  5852  		x := v.Args[0]
  5853  		if x != v.Args[1] {
  5854  			break
  5855  		}
  5856  		v.reset(OpMIPSMOVWconst)
  5857  		v.AuxInt = 0
  5858  		return true
  5859  	}
  5860  	// match: (SUB (MOVWconst [0]) x)
  5861  	// cond:
  5862  	// result: (NEG x)
  5863  	for {
  5864  		v_0 := v.Args[0]
  5865  		if v_0.Op != OpMIPSMOVWconst {
  5866  			break
  5867  		}
  5868  		if v_0.AuxInt != 0 {
  5869  			break
  5870  		}
  5871  		x := v.Args[1]
  5872  		v.reset(OpMIPSNEG)
  5873  		v.AddArg(x)
  5874  		return true
  5875  	}
  5876  	return false
  5877  }
  5878  func rewriteValueMIPS_OpMIPSSUBconst_0(v *Value) bool {
  5879  	// match: (SUBconst [0] x)
  5880  	// cond:
  5881  	// result: x
  5882  	for {
  5883  		if v.AuxInt != 0 {
  5884  			break
  5885  		}
  5886  		x := v.Args[0]
  5887  		v.reset(OpCopy)
  5888  		v.Type = x.Type
  5889  		v.AddArg(x)
  5890  		return true
  5891  	}
  5892  	// match: (SUBconst [c] (MOVWconst [d]))
  5893  	// cond:
  5894  	// result: (MOVWconst [int64(int32(d-c))])
  5895  	for {
  5896  		c := v.AuxInt
  5897  		v_0 := v.Args[0]
  5898  		if v_0.Op != OpMIPSMOVWconst {
  5899  			break
  5900  		}
  5901  		d := v_0.AuxInt
  5902  		v.reset(OpMIPSMOVWconst)
  5903  		v.AuxInt = int64(int32(d - c))
  5904  		return true
  5905  	}
  5906  	// match: (SUBconst [c] (SUBconst [d] x))
  5907  	// cond:
  5908  	// result: (ADDconst [int64(int32(-c-d))] x)
  5909  	for {
  5910  		c := v.AuxInt
  5911  		v_0 := v.Args[0]
  5912  		if v_0.Op != OpMIPSSUBconst {
  5913  			break
  5914  		}
  5915  		d := v_0.AuxInt
  5916  		x := v_0.Args[0]
  5917  		v.reset(OpMIPSADDconst)
  5918  		v.AuxInt = int64(int32(-c - d))
  5919  		v.AddArg(x)
  5920  		return true
  5921  	}
  5922  	// match: (SUBconst [c] (ADDconst [d] x))
  5923  	// cond:
  5924  	// result: (ADDconst [int64(int32(-c+d))] x)
  5925  	for {
  5926  		c := v.AuxInt
  5927  		v_0 := v.Args[0]
  5928  		if v_0.Op != OpMIPSADDconst {
  5929  			break
  5930  		}
  5931  		d := v_0.AuxInt
  5932  		x := v_0.Args[0]
  5933  		v.reset(OpMIPSADDconst)
  5934  		v.AuxInt = int64(int32(-c + d))
  5935  		v.AddArg(x)
  5936  		return true
  5937  	}
  5938  	return false
  5939  }
  5940  func rewriteValueMIPS_OpMIPSXOR_0(v *Value) bool {
  5941  	// match: (XOR x (MOVWconst [c]))
  5942  	// cond:
  5943  	// result: (XORconst [c] x)
  5944  	for {
  5945  		x := v.Args[0]
  5946  		v_1 := v.Args[1]
  5947  		if v_1.Op != OpMIPSMOVWconst {
  5948  			break
  5949  		}
  5950  		c := v_1.AuxInt
  5951  		v.reset(OpMIPSXORconst)
  5952  		v.AuxInt = c
  5953  		v.AddArg(x)
  5954  		return true
  5955  	}
  5956  	// match: (XOR (MOVWconst [c]) x)
  5957  	// cond:
  5958  	// result: (XORconst [c] x)
  5959  	for {
  5960  		v_0 := v.Args[0]
  5961  		if v_0.Op != OpMIPSMOVWconst {
  5962  			break
  5963  		}
  5964  		c := v_0.AuxInt
  5965  		x := v.Args[1]
  5966  		v.reset(OpMIPSXORconst)
  5967  		v.AuxInt = c
  5968  		v.AddArg(x)
  5969  		return true
  5970  	}
  5971  	// match: (XOR x x)
  5972  	// cond:
  5973  	// result: (MOVWconst [0])
  5974  	for {
  5975  		x := v.Args[0]
  5976  		if x != v.Args[1] {
  5977  			break
  5978  		}
  5979  		v.reset(OpMIPSMOVWconst)
  5980  		v.AuxInt = 0
  5981  		return true
  5982  	}
  5983  	return false
  5984  }
  5985  func rewriteValueMIPS_OpMIPSXORconst_0(v *Value) bool {
  5986  	// match: (XORconst [0] x)
  5987  	// cond:
  5988  	// result: x
  5989  	for {
  5990  		if v.AuxInt != 0 {
  5991  			break
  5992  		}
  5993  		x := v.Args[0]
  5994  		v.reset(OpCopy)
  5995  		v.Type = x.Type
  5996  		v.AddArg(x)
  5997  		return true
  5998  	}
  5999  	// match: (XORconst [-1] x)
  6000  	// cond:
  6001  	// result: (NORconst [0] x)
  6002  	for {
  6003  		if v.AuxInt != -1 {
  6004  			break
  6005  		}
  6006  		x := v.Args[0]
  6007  		v.reset(OpMIPSNORconst)
  6008  		v.AuxInt = 0
  6009  		v.AddArg(x)
  6010  		return true
  6011  	}
  6012  	// match: (XORconst [c] (MOVWconst [d]))
  6013  	// cond:
  6014  	// result: (MOVWconst [c^d])
  6015  	for {
  6016  		c := v.AuxInt
  6017  		v_0 := v.Args[0]
  6018  		if v_0.Op != OpMIPSMOVWconst {
  6019  			break
  6020  		}
  6021  		d := v_0.AuxInt
  6022  		v.reset(OpMIPSMOVWconst)
  6023  		v.AuxInt = c ^ d
  6024  		return true
  6025  	}
  6026  	// match: (XORconst [c] (XORconst [d] x))
  6027  	// cond:
  6028  	// result: (XORconst [c^d] x)
  6029  	for {
  6030  		c := v.AuxInt
  6031  		v_0 := v.Args[0]
  6032  		if v_0.Op != OpMIPSXORconst {
  6033  			break
  6034  		}
  6035  		d := v_0.AuxInt
  6036  		x := v_0.Args[0]
  6037  		v.reset(OpMIPSXORconst)
  6038  		v.AuxInt = c ^ d
  6039  		v.AddArg(x)
  6040  		return true
  6041  	}
  6042  	return false
  6043  }
  6044  func rewriteValueMIPS_OpMod16_0(v *Value) bool {
  6045  	b := v.Block
  6046  	_ = b
  6047  	types := &b.Func.Config.Types
  6048  	_ = types
  6049  	// match: (Mod16 x y)
  6050  	// cond:
  6051  	// result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  6052  	for {
  6053  		x := v.Args[0]
  6054  		y := v.Args[1]
  6055  		v.reset(OpSelect0)
  6056  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(types.Int32, types.Int32))
  6057  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  6058  		v1.AddArg(x)
  6059  		v0.AddArg(v1)
  6060  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  6061  		v2.AddArg(y)
  6062  		v0.AddArg(v2)
  6063  		v.AddArg(v0)
  6064  		return true
  6065  	}
  6066  }
  6067  func rewriteValueMIPS_OpMod16u_0(v *Value) bool {
  6068  	b := v.Block
  6069  	_ = b
  6070  	types := &b.Func.Config.Types
  6071  	_ = types
  6072  	// match: (Mod16u x y)
  6073  	// cond:
  6074  	// result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  6075  	for {
  6076  		x := v.Args[0]
  6077  		y := v.Args[1]
  6078  		v.reset(OpSelect0)
  6079  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(types.UInt32, types.UInt32))
  6080  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  6081  		v1.AddArg(x)
  6082  		v0.AddArg(v1)
  6083  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  6084  		v2.AddArg(y)
  6085  		v0.AddArg(v2)
  6086  		v.AddArg(v0)
  6087  		return true
  6088  	}
  6089  }
  6090  func rewriteValueMIPS_OpMod32_0(v *Value) bool {
  6091  	b := v.Block
  6092  	_ = b
  6093  	types := &b.Func.Config.Types
  6094  	_ = types
  6095  	// match: (Mod32 x y)
  6096  	// cond:
  6097  	// result: (Select0 (DIV x y))
  6098  	for {
  6099  		x := v.Args[0]
  6100  		y := v.Args[1]
  6101  		v.reset(OpSelect0)
  6102  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(types.Int32, types.Int32))
  6103  		v0.AddArg(x)
  6104  		v0.AddArg(y)
  6105  		v.AddArg(v0)
  6106  		return true
  6107  	}
  6108  }
  6109  func rewriteValueMIPS_OpMod32u_0(v *Value) bool {
  6110  	b := v.Block
  6111  	_ = b
  6112  	types := &b.Func.Config.Types
  6113  	_ = types
  6114  	// match: (Mod32u x y)
  6115  	// cond:
  6116  	// result: (Select0 (DIVU x y))
  6117  	for {
  6118  		x := v.Args[0]
  6119  		y := v.Args[1]
  6120  		v.reset(OpSelect0)
  6121  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(types.UInt32, types.UInt32))
  6122  		v0.AddArg(x)
  6123  		v0.AddArg(y)
  6124  		v.AddArg(v0)
  6125  		return true
  6126  	}
  6127  }
  6128  func rewriteValueMIPS_OpMod8_0(v *Value) bool {
  6129  	b := v.Block
  6130  	_ = b
  6131  	types := &b.Func.Config.Types
  6132  	_ = types
  6133  	// match: (Mod8 x y)
  6134  	// cond:
  6135  	// result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  6136  	for {
  6137  		x := v.Args[0]
  6138  		y := v.Args[1]
  6139  		v.reset(OpSelect0)
  6140  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(types.Int32, types.Int32))
  6141  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  6142  		v1.AddArg(x)
  6143  		v0.AddArg(v1)
  6144  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, types.Int32)
  6145  		v2.AddArg(y)
  6146  		v0.AddArg(v2)
  6147  		v.AddArg(v0)
  6148  		return true
  6149  	}
  6150  }
  6151  func rewriteValueMIPS_OpMod8u_0(v *Value) bool {
  6152  	b := v.Block
  6153  	_ = b
  6154  	types := &b.Func.Config.Types
  6155  	_ = types
  6156  	// match: (Mod8u x y)
  6157  	// cond:
  6158  	// result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  6159  	for {
  6160  		x := v.Args[0]
  6161  		y := v.Args[1]
  6162  		v.reset(OpSelect0)
  6163  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(types.UInt32, types.UInt32))
  6164  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  6165  		v1.AddArg(x)
  6166  		v0.AddArg(v1)
  6167  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  6168  		v2.AddArg(y)
  6169  		v0.AddArg(v2)
  6170  		v.AddArg(v0)
  6171  		return true
  6172  	}
  6173  }
  6174  func rewriteValueMIPS_OpMove_0(v *Value) bool {
  6175  	b := v.Block
  6176  	_ = b
  6177  	types := &b.Func.Config.Types
  6178  	_ = types
  6179  	// match: (Move [0] _ _ mem)
  6180  	// cond:
  6181  	// result: mem
  6182  	for {
  6183  		if v.AuxInt != 0 {
  6184  			break
  6185  		}
  6186  		mem := v.Args[2]
  6187  		v.reset(OpCopy)
  6188  		v.Type = mem.Type
  6189  		v.AddArg(mem)
  6190  		return true
  6191  	}
  6192  	// match: (Move [1] dst src mem)
  6193  	// cond:
  6194  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  6195  	for {
  6196  		if v.AuxInt != 1 {
  6197  			break
  6198  		}
  6199  		dst := v.Args[0]
  6200  		src := v.Args[1]
  6201  		mem := v.Args[2]
  6202  		v.reset(OpMIPSMOVBstore)
  6203  		v.AddArg(dst)
  6204  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, types.UInt8)
  6205  		v0.AddArg(src)
  6206  		v0.AddArg(mem)
  6207  		v.AddArg(v0)
  6208  		v.AddArg(mem)
  6209  		return true
  6210  	}
  6211  	// match: (Move [2] {t} dst src mem)
  6212  	// cond: t.(Type).Alignment()%2 == 0
  6213  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  6214  	for {
  6215  		if v.AuxInt != 2 {
  6216  			break
  6217  		}
  6218  		t := v.Aux
  6219  		dst := v.Args[0]
  6220  		src := v.Args[1]
  6221  		mem := v.Args[2]
  6222  		if !(t.(Type).Alignment()%2 == 0) {
  6223  			break
  6224  		}
  6225  		v.reset(OpMIPSMOVHstore)
  6226  		v.AddArg(dst)
  6227  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, types.UInt16)
  6228  		v0.AddArg(src)
  6229  		v0.AddArg(mem)
  6230  		v.AddArg(v0)
  6231  		v.AddArg(mem)
  6232  		return true
  6233  	}
  6234  	// match: (Move [2] dst src mem)
  6235  	// cond:
  6236  	// result: (MOVBstore [1] dst (MOVBUload [1] src mem) 		(MOVBstore dst (MOVBUload src mem) mem))
  6237  	for {
  6238  		if v.AuxInt != 2 {
  6239  			break
  6240  		}
  6241  		dst := v.Args[0]
  6242  		src := v.Args[1]
  6243  		mem := v.Args[2]
  6244  		v.reset(OpMIPSMOVBstore)
  6245  		v.AuxInt = 1
  6246  		v.AddArg(dst)
  6247  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, types.UInt8)
  6248  		v0.AuxInt = 1
  6249  		v0.AddArg(src)
  6250  		v0.AddArg(mem)
  6251  		v.AddArg(v0)
  6252  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6253  		v1.AddArg(dst)
  6254  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, types.UInt8)
  6255  		v2.AddArg(src)
  6256  		v2.AddArg(mem)
  6257  		v1.AddArg(v2)
  6258  		v1.AddArg(mem)
  6259  		v.AddArg(v1)
  6260  		return true
  6261  	}
  6262  	// match: (Move [4] {t} dst src mem)
  6263  	// cond: t.(Type).Alignment()%4 == 0
  6264  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6265  	for {
  6266  		if v.AuxInt != 4 {
  6267  			break
  6268  		}
  6269  		t := v.Aux
  6270  		dst := v.Args[0]
  6271  		src := v.Args[1]
  6272  		mem := v.Args[2]
  6273  		if !(t.(Type).Alignment()%4 == 0) {
  6274  			break
  6275  		}
  6276  		v.reset(OpMIPSMOVWstore)
  6277  		v.AddArg(dst)
  6278  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, types.UInt32)
  6279  		v0.AddArg(src)
  6280  		v0.AddArg(mem)
  6281  		v.AddArg(v0)
  6282  		v.AddArg(mem)
  6283  		return true
  6284  	}
  6285  	// match: (Move [4] {t} dst src mem)
  6286  	// cond: t.(Type).Alignment()%2 == 0
  6287  	// result: (MOVHstore [2] dst (MOVHUload [2] src mem) 		(MOVHstore dst (MOVHUload src mem) mem))
  6288  	for {
  6289  		if v.AuxInt != 4 {
  6290  			break
  6291  		}
  6292  		t := v.Aux
  6293  		dst := v.Args[0]
  6294  		src := v.Args[1]
  6295  		mem := v.Args[2]
  6296  		if !(t.(Type).Alignment()%2 == 0) {
  6297  			break
  6298  		}
  6299  		v.reset(OpMIPSMOVHstore)
  6300  		v.AuxInt = 2
  6301  		v.AddArg(dst)
  6302  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, types.UInt16)
  6303  		v0.AuxInt = 2
  6304  		v0.AddArg(src)
  6305  		v0.AddArg(mem)
  6306  		v.AddArg(v0)
  6307  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6308  		v1.AddArg(dst)
  6309  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHUload, types.UInt16)
  6310  		v2.AddArg(src)
  6311  		v2.AddArg(mem)
  6312  		v1.AddArg(v2)
  6313  		v1.AddArg(mem)
  6314  		v.AddArg(v1)
  6315  		return true
  6316  	}
  6317  	// match: (Move [4] dst src mem)
  6318  	// cond:
  6319  	// result: (MOVBstore [3] dst (MOVBUload [3] src mem) 		(MOVBstore [2] dst (MOVBUload [2] src mem) 			(MOVBstore [1] dst (MOVBUload [1] src mem) 				(MOVBstore dst (MOVBUload src mem) mem))))
  6320  	for {
  6321  		if v.AuxInt != 4 {
  6322  			break
  6323  		}
  6324  		dst := v.Args[0]
  6325  		src := v.Args[1]
  6326  		mem := v.Args[2]
  6327  		v.reset(OpMIPSMOVBstore)
  6328  		v.AuxInt = 3
  6329  		v.AddArg(dst)
  6330  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, types.UInt8)
  6331  		v0.AuxInt = 3
  6332  		v0.AddArg(src)
  6333  		v0.AddArg(mem)
  6334  		v.AddArg(v0)
  6335  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6336  		v1.AuxInt = 2
  6337  		v1.AddArg(dst)
  6338  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, types.UInt8)
  6339  		v2.AuxInt = 2
  6340  		v2.AddArg(src)
  6341  		v2.AddArg(mem)
  6342  		v1.AddArg(v2)
  6343  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6344  		v3.AuxInt = 1
  6345  		v3.AddArg(dst)
  6346  		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, types.UInt8)
  6347  		v4.AuxInt = 1
  6348  		v4.AddArg(src)
  6349  		v4.AddArg(mem)
  6350  		v3.AddArg(v4)
  6351  		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6352  		v5.AddArg(dst)
  6353  		v6 := b.NewValue0(v.Pos, OpMIPSMOVBUload, types.UInt8)
  6354  		v6.AddArg(src)
  6355  		v6.AddArg(mem)
  6356  		v5.AddArg(v6)
  6357  		v5.AddArg(mem)
  6358  		v3.AddArg(v5)
  6359  		v1.AddArg(v3)
  6360  		v.AddArg(v1)
  6361  		return true
  6362  	}
  6363  	// match: (Move [3] dst src mem)
  6364  	// cond:
  6365  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) 		(MOVBstore [1] dst (MOVBUload [1] src mem) 			(MOVBstore dst (MOVBUload src mem) mem)))
  6366  	for {
  6367  		if v.AuxInt != 3 {
  6368  			break
  6369  		}
  6370  		dst := v.Args[0]
  6371  		src := v.Args[1]
  6372  		mem := v.Args[2]
  6373  		v.reset(OpMIPSMOVBstore)
  6374  		v.AuxInt = 2
  6375  		v.AddArg(dst)
  6376  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, types.UInt8)
  6377  		v0.AuxInt = 2
  6378  		v0.AddArg(src)
  6379  		v0.AddArg(mem)
  6380  		v.AddArg(v0)
  6381  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6382  		v1.AuxInt = 1
  6383  		v1.AddArg(dst)
  6384  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, types.UInt8)
  6385  		v2.AuxInt = 1
  6386  		v2.AddArg(src)
  6387  		v2.AddArg(mem)
  6388  		v1.AddArg(v2)
  6389  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6390  		v3.AddArg(dst)
  6391  		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, types.UInt8)
  6392  		v4.AddArg(src)
  6393  		v4.AddArg(mem)
  6394  		v3.AddArg(v4)
  6395  		v3.AddArg(mem)
  6396  		v1.AddArg(v3)
  6397  		v.AddArg(v1)
  6398  		return true
  6399  	}
  6400  	// match: (Move [8] {t} dst src mem)
  6401  	// cond: t.(Type).Alignment()%4 == 0
  6402  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) 		(MOVWstore dst (MOVWload src mem) mem))
  6403  	for {
  6404  		if v.AuxInt != 8 {
  6405  			break
  6406  		}
  6407  		t := v.Aux
  6408  		dst := v.Args[0]
  6409  		src := v.Args[1]
  6410  		mem := v.Args[2]
  6411  		if !(t.(Type).Alignment()%4 == 0) {
  6412  			break
  6413  		}
  6414  		v.reset(OpMIPSMOVWstore)
  6415  		v.AuxInt = 4
  6416  		v.AddArg(dst)
  6417  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, types.UInt32)
  6418  		v0.AuxInt = 4
  6419  		v0.AddArg(src)
  6420  		v0.AddArg(mem)
  6421  		v.AddArg(v0)
  6422  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6423  		v1.AddArg(dst)
  6424  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, types.UInt32)
  6425  		v2.AddArg(src)
  6426  		v2.AddArg(mem)
  6427  		v1.AddArg(v2)
  6428  		v1.AddArg(mem)
  6429  		v.AddArg(v1)
  6430  		return true
  6431  	}
  6432  	// match: (Move [8] {t} dst src mem)
  6433  	// cond: t.(Type).Alignment()%2 == 0
  6434  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) 		(MOVHstore [4] dst (MOVHload [4] src mem) 			(MOVHstore [2] dst (MOVHload [2] src mem) 				(MOVHstore dst (MOVHload src mem) mem))))
  6435  	for {
  6436  		if v.AuxInt != 8 {
  6437  			break
  6438  		}
  6439  		t := v.Aux
  6440  		dst := v.Args[0]
  6441  		src := v.Args[1]
  6442  		mem := v.Args[2]
  6443  		if !(t.(Type).Alignment()%2 == 0) {
  6444  			break
  6445  		}
  6446  		v.reset(OpMIPSMOVHstore)
  6447  		v.AuxInt = 6
  6448  		v.AddArg(dst)
  6449  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, types.Int16)
  6450  		v0.AuxInt = 6
  6451  		v0.AddArg(src)
  6452  		v0.AddArg(mem)
  6453  		v.AddArg(v0)
  6454  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6455  		v1.AuxInt = 4
  6456  		v1.AddArg(dst)
  6457  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, types.Int16)
  6458  		v2.AuxInt = 4
  6459  		v2.AddArg(src)
  6460  		v2.AddArg(mem)
  6461  		v1.AddArg(v2)
  6462  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6463  		v3.AuxInt = 2
  6464  		v3.AddArg(dst)
  6465  		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, types.Int16)
  6466  		v4.AuxInt = 2
  6467  		v4.AddArg(src)
  6468  		v4.AddArg(mem)
  6469  		v3.AddArg(v4)
  6470  		v5 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6471  		v5.AddArg(dst)
  6472  		v6 := b.NewValue0(v.Pos, OpMIPSMOVHload, types.Int16)
  6473  		v6.AddArg(src)
  6474  		v6.AddArg(mem)
  6475  		v5.AddArg(v6)
  6476  		v5.AddArg(mem)
  6477  		v3.AddArg(v5)
  6478  		v1.AddArg(v3)
  6479  		v.AddArg(v1)
  6480  		return true
  6481  	}
  6482  	return false
  6483  }
  6484  func rewriteValueMIPS_OpMove_10(v *Value) bool {
  6485  	b := v.Block
  6486  	_ = b
  6487  	config := b.Func.Config
  6488  	_ = config
  6489  	types := &b.Func.Config.Types
  6490  	_ = types
  6491  	// match: (Move [6] {t} dst src mem)
  6492  	// cond: t.(Type).Alignment()%2 == 0
  6493  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) 		(MOVHstore [2] dst (MOVHload [2] src mem) 			(MOVHstore dst (MOVHload src mem) mem)))
  6494  	for {
  6495  		if v.AuxInt != 6 {
  6496  			break
  6497  		}
  6498  		t := v.Aux
  6499  		dst := v.Args[0]
  6500  		src := v.Args[1]
  6501  		mem := v.Args[2]
  6502  		if !(t.(Type).Alignment()%2 == 0) {
  6503  			break
  6504  		}
  6505  		v.reset(OpMIPSMOVHstore)
  6506  		v.AuxInt = 4
  6507  		v.AddArg(dst)
  6508  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, types.Int16)
  6509  		v0.AuxInt = 4
  6510  		v0.AddArg(src)
  6511  		v0.AddArg(mem)
  6512  		v.AddArg(v0)
  6513  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6514  		v1.AuxInt = 2
  6515  		v1.AddArg(dst)
  6516  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, types.Int16)
  6517  		v2.AuxInt = 2
  6518  		v2.AddArg(src)
  6519  		v2.AddArg(mem)
  6520  		v1.AddArg(v2)
  6521  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6522  		v3.AddArg(dst)
  6523  		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, types.Int16)
  6524  		v4.AddArg(src)
  6525  		v4.AddArg(mem)
  6526  		v3.AddArg(v4)
  6527  		v3.AddArg(mem)
  6528  		v1.AddArg(v3)
  6529  		v.AddArg(v1)
  6530  		return true
  6531  	}
  6532  	// match: (Move [12] {t} dst src mem)
  6533  	// cond: t.(Type).Alignment()%4 == 0
  6534  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) 		(MOVWstore [4] dst (MOVWload [4] src mem) 			(MOVWstore dst (MOVWload src mem) mem)))
  6535  	for {
  6536  		if v.AuxInt != 12 {
  6537  			break
  6538  		}
  6539  		t := v.Aux
  6540  		dst := v.Args[0]
  6541  		src := v.Args[1]
  6542  		mem := v.Args[2]
  6543  		if !(t.(Type).Alignment()%4 == 0) {
  6544  			break
  6545  		}
  6546  		v.reset(OpMIPSMOVWstore)
  6547  		v.AuxInt = 8
  6548  		v.AddArg(dst)
  6549  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, types.UInt32)
  6550  		v0.AuxInt = 8
  6551  		v0.AddArg(src)
  6552  		v0.AddArg(mem)
  6553  		v.AddArg(v0)
  6554  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6555  		v1.AuxInt = 4
  6556  		v1.AddArg(dst)
  6557  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, types.UInt32)
  6558  		v2.AuxInt = 4
  6559  		v2.AddArg(src)
  6560  		v2.AddArg(mem)
  6561  		v1.AddArg(v2)
  6562  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6563  		v3.AddArg(dst)
  6564  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, types.UInt32)
  6565  		v4.AddArg(src)
  6566  		v4.AddArg(mem)
  6567  		v3.AddArg(v4)
  6568  		v3.AddArg(mem)
  6569  		v1.AddArg(v3)
  6570  		v.AddArg(v1)
  6571  		return true
  6572  	}
  6573  	// match: (Move [16] {t} dst src mem)
  6574  	// cond: t.(Type).Alignment()%4 == 0
  6575  	// result: (MOVWstore [12] dst (MOVWload [12] src mem) 		(MOVWstore [8] dst (MOVWload [8] src mem) 			(MOVWstore [4] dst (MOVWload [4] src mem) 				(MOVWstore dst (MOVWload src mem) mem))))
  6576  	for {
  6577  		if v.AuxInt != 16 {
  6578  			break
  6579  		}
  6580  		t := v.Aux
  6581  		dst := v.Args[0]
  6582  		src := v.Args[1]
  6583  		mem := v.Args[2]
  6584  		if !(t.(Type).Alignment()%4 == 0) {
  6585  			break
  6586  		}
  6587  		v.reset(OpMIPSMOVWstore)
  6588  		v.AuxInt = 12
  6589  		v.AddArg(dst)
  6590  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, types.UInt32)
  6591  		v0.AuxInt = 12
  6592  		v0.AddArg(src)
  6593  		v0.AddArg(mem)
  6594  		v.AddArg(v0)
  6595  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6596  		v1.AuxInt = 8
  6597  		v1.AddArg(dst)
  6598  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, types.UInt32)
  6599  		v2.AuxInt = 8
  6600  		v2.AddArg(src)
  6601  		v2.AddArg(mem)
  6602  		v1.AddArg(v2)
  6603  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6604  		v3.AuxInt = 4
  6605  		v3.AddArg(dst)
  6606  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, types.UInt32)
  6607  		v4.AuxInt = 4
  6608  		v4.AddArg(src)
  6609  		v4.AddArg(mem)
  6610  		v3.AddArg(v4)
  6611  		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6612  		v5.AddArg(dst)
  6613  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWload, types.UInt32)
  6614  		v6.AddArg(src)
  6615  		v6.AddArg(mem)
  6616  		v5.AddArg(v6)
  6617  		v5.AddArg(mem)
  6618  		v3.AddArg(v5)
  6619  		v1.AddArg(v3)
  6620  		v.AddArg(v1)
  6621  		return true
  6622  	}
  6623  	// match: (Move [s] {t} dst src mem)
  6624  	// cond: (s > 16 || t.(Type).Alignment()%4 != 0)
  6625  	// result: (LoweredMove [t.(Type).Alignment()] 		dst 		src 		(ADDconst <src.Type> src [s-moveSize(t.(Type).Alignment(), config)]) 		mem)
  6626  	for {
  6627  		s := v.AuxInt
  6628  		t := v.Aux
  6629  		dst := v.Args[0]
  6630  		src := v.Args[1]
  6631  		mem := v.Args[2]
  6632  		if !(s > 16 || t.(Type).Alignment()%4 != 0) {
  6633  			break
  6634  		}
  6635  		v.reset(OpMIPSLoweredMove)
  6636  		v.AuxInt = t.(Type).Alignment()
  6637  		v.AddArg(dst)
  6638  		v.AddArg(src)
  6639  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type)
  6640  		v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
  6641  		v0.AddArg(src)
  6642  		v.AddArg(v0)
  6643  		v.AddArg(mem)
  6644  		return true
  6645  	}
  6646  	return false
  6647  }
  6648  func rewriteValueMIPS_OpMul16_0(v *Value) bool {
  6649  	// match: (Mul16 x y)
  6650  	// cond:
  6651  	// result: (MUL x y)
  6652  	for {
  6653  		x := v.Args[0]
  6654  		y := v.Args[1]
  6655  		v.reset(OpMIPSMUL)
  6656  		v.AddArg(x)
  6657  		v.AddArg(y)
  6658  		return true
  6659  	}
  6660  }
  6661  func rewriteValueMIPS_OpMul32_0(v *Value) bool {
  6662  	// match: (Mul32 x y)
  6663  	// cond:
  6664  	// result: (MUL x y)
  6665  	for {
  6666  		x := v.Args[0]
  6667  		y := v.Args[1]
  6668  		v.reset(OpMIPSMUL)
  6669  		v.AddArg(x)
  6670  		v.AddArg(y)
  6671  		return true
  6672  	}
  6673  }
  6674  func rewriteValueMIPS_OpMul32F_0(v *Value) bool {
  6675  	// match: (Mul32F x y)
  6676  	// cond:
  6677  	// result: (MULF x y)
  6678  	for {
  6679  		x := v.Args[0]
  6680  		y := v.Args[1]
  6681  		v.reset(OpMIPSMULF)
  6682  		v.AddArg(x)
  6683  		v.AddArg(y)
  6684  		return true
  6685  	}
  6686  }
  6687  func rewriteValueMIPS_OpMul32uhilo_0(v *Value) bool {
  6688  	// match: (Mul32uhilo x y)
  6689  	// cond:
  6690  	// result: (MULTU x y)
  6691  	for {
  6692  		x := v.Args[0]
  6693  		y := v.Args[1]
  6694  		v.reset(OpMIPSMULTU)
  6695  		v.AddArg(x)
  6696  		v.AddArg(y)
  6697  		return true
  6698  	}
  6699  }
  6700  func rewriteValueMIPS_OpMul64F_0(v *Value) bool {
  6701  	// match: (Mul64F x y)
  6702  	// cond:
  6703  	// result: (MULD x y)
  6704  	for {
  6705  		x := v.Args[0]
  6706  		y := v.Args[1]
  6707  		v.reset(OpMIPSMULD)
  6708  		v.AddArg(x)
  6709  		v.AddArg(y)
  6710  		return true
  6711  	}
  6712  }
  6713  func rewriteValueMIPS_OpMul8_0(v *Value) bool {
  6714  	// match: (Mul8 x y)
  6715  	// cond:
  6716  	// result: (MUL x y)
  6717  	for {
  6718  		x := v.Args[0]
  6719  		y := v.Args[1]
  6720  		v.reset(OpMIPSMUL)
  6721  		v.AddArg(x)
  6722  		v.AddArg(y)
  6723  		return true
  6724  	}
  6725  }
  6726  func rewriteValueMIPS_OpNeg16_0(v *Value) bool {
  6727  	// match: (Neg16 x)
  6728  	// cond:
  6729  	// result: (NEG x)
  6730  	for {
  6731  		x := v.Args[0]
  6732  		v.reset(OpMIPSNEG)
  6733  		v.AddArg(x)
  6734  		return true
  6735  	}
  6736  }
  6737  func rewriteValueMIPS_OpNeg32_0(v *Value) bool {
  6738  	// match: (Neg32 x)
  6739  	// cond:
  6740  	// result: (NEG x)
  6741  	for {
  6742  		x := v.Args[0]
  6743  		v.reset(OpMIPSNEG)
  6744  		v.AddArg(x)
  6745  		return true
  6746  	}
  6747  }
  6748  func rewriteValueMIPS_OpNeg32F_0(v *Value) bool {
  6749  	// match: (Neg32F x)
  6750  	// cond:
  6751  	// result: (NEGF x)
  6752  	for {
  6753  		x := v.Args[0]
  6754  		v.reset(OpMIPSNEGF)
  6755  		v.AddArg(x)
  6756  		return true
  6757  	}
  6758  }
  6759  func rewriteValueMIPS_OpNeg64F_0(v *Value) bool {
  6760  	// match: (Neg64F x)
  6761  	// cond:
  6762  	// result: (NEGD x)
  6763  	for {
  6764  		x := v.Args[0]
  6765  		v.reset(OpMIPSNEGD)
  6766  		v.AddArg(x)
  6767  		return true
  6768  	}
  6769  }
  6770  func rewriteValueMIPS_OpNeg8_0(v *Value) bool {
  6771  	// match: (Neg8 x)
  6772  	// cond:
  6773  	// result: (NEG x)
  6774  	for {
  6775  		x := v.Args[0]
  6776  		v.reset(OpMIPSNEG)
  6777  		v.AddArg(x)
  6778  		return true
  6779  	}
  6780  }
  6781  func rewriteValueMIPS_OpNeq16_0(v *Value) bool {
  6782  	b := v.Block
  6783  	_ = b
  6784  	types := &b.Func.Config.Types
  6785  	_ = types
  6786  	// match: (Neq16 x y)
  6787  	// cond:
  6788  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0]))
  6789  	for {
  6790  		x := v.Args[0]
  6791  		y := v.Args[1]
  6792  		v.reset(OpMIPSSGTU)
  6793  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, types.UInt32)
  6794  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  6795  		v1.AddArg(x)
  6796  		v0.AddArg(v1)
  6797  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  6798  		v2.AddArg(y)
  6799  		v0.AddArg(v2)
  6800  		v.AddArg(v0)
  6801  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  6802  		v3.AuxInt = 0
  6803  		v.AddArg(v3)
  6804  		return true
  6805  	}
  6806  }
  6807  func rewriteValueMIPS_OpNeq32_0(v *Value) bool {
  6808  	b := v.Block
  6809  	_ = b
  6810  	types := &b.Func.Config.Types
  6811  	_ = types
  6812  	// match: (Neq32 x y)
  6813  	// cond:
  6814  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  6815  	for {
  6816  		x := v.Args[0]
  6817  		y := v.Args[1]
  6818  		v.reset(OpMIPSSGTU)
  6819  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, types.UInt32)
  6820  		v0.AddArg(x)
  6821  		v0.AddArg(y)
  6822  		v.AddArg(v0)
  6823  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  6824  		v1.AuxInt = 0
  6825  		v.AddArg(v1)
  6826  		return true
  6827  	}
  6828  }
  6829  func rewriteValueMIPS_OpNeq32F_0(v *Value) bool {
  6830  	b := v.Block
  6831  	_ = b
  6832  	// match: (Neq32F x y)
  6833  	// cond:
  6834  	// result: (FPFlagFalse (CMPEQF x y))
  6835  	for {
  6836  		x := v.Args[0]
  6837  		y := v.Args[1]
  6838  		v.reset(OpMIPSFPFlagFalse)
  6839  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, TypeFlags)
  6840  		v0.AddArg(x)
  6841  		v0.AddArg(y)
  6842  		v.AddArg(v0)
  6843  		return true
  6844  	}
  6845  }
  6846  func rewriteValueMIPS_OpNeq64F_0(v *Value) bool {
  6847  	b := v.Block
  6848  	_ = b
  6849  	// match: (Neq64F x y)
  6850  	// cond:
  6851  	// result: (FPFlagFalse (CMPEQD x y))
  6852  	for {
  6853  		x := v.Args[0]
  6854  		y := v.Args[1]
  6855  		v.reset(OpMIPSFPFlagFalse)
  6856  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, TypeFlags)
  6857  		v0.AddArg(x)
  6858  		v0.AddArg(y)
  6859  		v.AddArg(v0)
  6860  		return true
  6861  	}
  6862  }
  6863  func rewriteValueMIPS_OpNeq8_0(v *Value) bool {
  6864  	b := v.Block
  6865  	_ = b
  6866  	types := &b.Func.Config.Types
  6867  	_ = types
  6868  	// match: (Neq8 x y)
  6869  	// cond:
  6870  	// result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0]))
  6871  	for {
  6872  		x := v.Args[0]
  6873  		y := v.Args[1]
  6874  		v.reset(OpMIPSSGTU)
  6875  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, types.UInt32)
  6876  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  6877  		v1.AddArg(x)
  6878  		v0.AddArg(v1)
  6879  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  6880  		v2.AddArg(y)
  6881  		v0.AddArg(v2)
  6882  		v.AddArg(v0)
  6883  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  6884  		v3.AuxInt = 0
  6885  		v.AddArg(v3)
  6886  		return true
  6887  	}
  6888  }
  6889  func rewriteValueMIPS_OpNeqB_0(v *Value) bool {
  6890  	// match: (NeqB x y)
  6891  	// cond:
  6892  	// result: (XOR x y)
  6893  	for {
  6894  		x := v.Args[0]
  6895  		y := v.Args[1]
  6896  		v.reset(OpMIPSXOR)
  6897  		v.AddArg(x)
  6898  		v.AddArg(y)
  6899  		return true
  6900  	}
  6901  }
  6902  func rewriteValueMIPS_OpNeqPtr_0(v *Value) bool {
  6903  	b := v.Block
  6904  	_ = b
  6905  	types := &b.Func.Config.Types
  6906  	_ = types
  6907  	// match: (NeqPtr x y)
  6908  	// cond:
  6909  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  6910  	for {
  6911  		x := v.Args[0]
  6912  		y := v.Args[1]
  6913  		v.reset(OpMIPSSGTU)
  6914  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, types.UInt32)
  6915  		v0.AddArg(x)
  6916  		v0.AddArg(y)
  6917  		v.AddArg(v0)
  6918  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  6919  		v1.AuxInt = 0
  6920  		v.AddArg(v1)
  6921  		return true
  6922  	}
  6923  }
  6924  func rewriteValueMIPS_OpNilCheck_0(v *Value) bool {
  6925  	// match: (NilCheck ptr mem)
  6926  	// cond:
  6927  	// result: (LoweredNilCheck ptr mem)
  6928  	for {
  6929  		ptr := v.Args[0]
  6930  		mem := v.Args[1]
  6931  		v.reset(OpMIPSLoweredNilCheck)
  6932  		v.AddArg(ptr)
  6933  		v.AddArg(mem)
  6934  		return true
  6935  	}
  6936  }
  6937  func rewriteValueMIPS_OpNot_0(v *Value) bool {
  6938  	// match: (Not x)
  6939  	// cond:
  6940  	// result: (XORconst [1] x)
  6941  	for {
  6942  		x := v.Args[0]
  6943  		v.reset(OpMIPSXORconst)
  6944  		v.AuxInt = 1
  6945  		v.AddArg(x)
  6946  		return true
  6947  	}
  6948  }
  6949  func rewriteValueMIPS_OpOffPtr_0(v *Value) bool {
  6950  	// match: (OffPtr [off] ptr:(SP))
  6951  	// cond:
  6952  	// result: (MOVWaddr [off] ptr)
  6953  	for {
  6954  		off := v.AuxInt
  6955  		ptr := v.Args[0]
  6956  		if ptr.Op != OpSP {
  6957  			break
  6958  		}
  6959  		v.reset(OpMIPSMOVWaddr)
  6960  		v.AuxInt = off
  6961  		v.AddArg(ptr)
  6962  		return true
  6963  	}
  6964  	// match: (OffPtr [off] ptr)
  6965  	// cond:
  6966  	// result: (ADDconst [off] ptr)
  6967  	for {
  6968  		off := v.AuxInt
  6969  		ptr := v.Args[0]
  6970  		v.reset(OpMIPSADDconst)
  6971  		v.AuxInt = off
  6972  		v.AddArg(ptr)
  6973  		return true
  6974  	}
  6975  }
  6976  func rewriteValueMIPS_OpOr16_0(v *Value) bool {
  6977  	// match: (Or16 x y)
  6978  	// cond:
  6979  	// result: (OR x y)
  6980  	for {
  6981  		x := v.Args[0]
  6982  		y := v.Args[1]
  6983  		v.reset(OpMIPSOR)
  6984  		v.AddArg(x)
  6985  		v.AddArg(y)
  6986  		return true
  6987  	}
  6988  }
  6989  func rewriteValueMIPS_OpOr32_0(v *Value) bool {
  6990  	// match: (Or32 x y)
  6991  	// cond:
  6992  	// result: (OR x y)
  6993  	for {
  6994  		x := v.Args[0]
  6995  		y := v.Args[1]
  6996  		v.reset(OpMIPSOR)
  6997  		v.AddArg(x)
  6998  		v.AddArg(y)
  6999  		return true
  7000  	}
  7001  }
  7002  func rewriteValueMIPS_OpOr8_0(v *Value) bool {
  7003  	// match: (Or8 x y)
  7004  	// cond:
  7005  	// result: (OR x y)
  7006  	for {
  7007  		x := v.Args[0]
  7008  		y := v.Args[1]
  7009  		v.reset(OpMIPSOR)
  7010  		v.AddArg(x)
  7011  		v.AddArg(y)
  7012  		return true
  7013  	}
  7014  }
  7015  func rewriteValueMIPS_OpOrB_0(v *Value) bool {
  7016  	// match: (OrB x y)
  7017  	// cond:
  7018  	// result: (OR x y)
  7019  	for {
  7020  		x := v.Args[0]
  7021  		y := v.Args[1]
  7022  		v.reset(OpMIPSOR)
  7023  		v.AddArg(x)
  7024  		v.AddArg(y)
  7025  		return true
  7026  	}
  7027  }
  7028  func rewriteValueMIPS_OpRound32F_0(v *Value) bool {
  7029  	// match: (Round32F x)
  7030  	// cond:
  7031  	// result: x
  7032  	for {
  7033  		x := v.Args[0]
  7034  		v.reset(OpCopy)
  7035  		v.Type = x.Type
  7036  		v.AddArg(x)
  7037  		return true
  7038  	}
  7039  }
  7040  func rewriteValueMIPS_OpRound64F_0(v *Value) bool {
  7041  	// match: (Round64F x)
  7042  	// cond:
  7043  	// result: x
  7044  	for {
  7045  		x := v.Args[0]
  7046  		v.reset(OpCopy)
  7047  		v.Type = x.Type
  7048  		v.AddArg(x)
  7049  		return true
  7050  	}
  7051  }
  7052  func rewriteValueMIPS_OpRsh16Ux16_0(v *Value) bool {
  7053  	b := v.Block
  7054  	_ = b
  7055  	types := &b.Func.Config.Types
  7056  	_ = types
  7057  	// match: (Rsh16Ux16 <t> x y)
  7058  	// cond:
  7059  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7060  	for {
  7061  		t := v.Type
  7062  		x := v.Args[0]
  7063  		y := v.Args[1]
  7064  		v.reset(OpMIPSCMOVZ)
  7065  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7066  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7067  		v1.AddArg(x)
  7068  		v0.AddArg(v1)
  7069  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7070  		v2.AddArg(y)
  7071  		v0.AddArg(v2)
  7072  		v.AddArg(v0)
  7073  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7074  		v3.AuxInt = 0
  7075  		v.AddArg(v3)
  7076  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7077  		v4.AuxInt = 32
  7078  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7079  		v5.AddArg(y)
  7080  		v4.AddArg(v5)
  7081  		v.AddArg(v4)
  7082  		return true
  7083  	}
  7084  }
  7085  func rewriteValueMIPS_OpRsh16Ux32_0(v *Value) bool {
  7086  	b := v.Block
  7087  	_ = b
  7088  	types := &b.Func.Config.Types
  7089  	_ = types
  7090  	// match: (Rsh16Ux32 <t> x y)
  7091  	// cond:
  7092  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7093  	for {
  7094  		t := v.Type
  7095  		x := v.Args[0]
  7096  		y := v.Args[1]
  7097  		v.reset(OpMIPSCMOVZ)
  7098  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7099  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7100  		v1.AddArg(x)
  7101  		v0.AddArg(v1)
  7102  		v0.AddArg(y)
  7103  		v.AddArg(v0)
  7104  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7105  		v2.AuxInt = 0
  7106  		v.AddArg(v2)
  7107  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7108  		v3.AuxInt = 32
  7109  		v3.AddArg(y)
  7110  		v.AddArg(v3)
  7111  		return true
  7112  	}
  7113  }
  7114  func rewriteValueMIPS_OpRsh16Ux64_0(v *Value) bool {
  7115  	b := v.Block
  7116  	_ = b
  7117  	types := &b.Func.Config.Types
  7118  	_ = types
  7119  	// match: (Rsh16Ux64 x (Const64 [c]))
  7120  	// cond: uint32(c) < 16
  7121  	// result: (SRLconst (SLLconst <types.UInt32> x [16]) [c+16])
  7122  	for {
  7123  		x := v.Args[0]
  7124  		v_1 := v.Args[1]
  7125  		if v_1.Op != OpConst64 {
  7126  			break
  7127  		}
  7128  		c := v_1.AuxInt
  7129  		if !(uint32(c) < 16) {
  7130  			break
  7131  		}
  7132  		v.reset(OpMIPSSRLconst)
  7133  		v.AuxInt = c + 16
  7134  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
  7135  		v0.AuxInt = 16
  7136  		v0.AddArg(x)
  7137  		v.AddArg(v0)
  7138  		return true
  7139  	}
  7140  	// match: (Rsh16Ux64 _ (Const64 [c]))
  7141  	// cond: uint32(c) >= 16
  7142  	// result: (MOVWconst [0])
  7143  	for {
  7144  		v_1 := v.Args[1]
  7145  		if v_1.Op != OpConst64 {
  7146  			break
  7147  		}
  7148  		c := v_1.AuxInt
  7149  		if !(uint32(c) >= 16) {
  7150  			break
  7151  		}
  7152  		v.reset(OpMIPSMOVWconst)
  7153  		v.AuxInt = 0
  7154  		return true
  7155  	}
  7156  	return false
  7157  }
  7158  func rewriteValueMIPS_OpRsh16Ux8_0(v *Value) bool {
  7159  	b := v.Block
  7160  	_ = b
  7161  	types := &b.Func.Config.Types
  7162  	_ = types
  7163  	// match: (Rsh16Ux8 <t> x y)
  7164  	// cond:
  7165  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7166  	for {
  7167  		t := v.Type
  7168  		x := v.Args[0]
  7169  		y := v.Args[1]
  7170  		v.reset(OpMIPSCMOVZ)
  7171  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7172  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7173  		v1.AddArg(x)
  7174  		v0.AddArg(v1)
  7175  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7176  		v2.AddArg(y)
  7177  		v0.AddArg(v2)
  7178  		v.AddArg(v0)
  7179  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7180  		v3.AuxInt = 0
  7181  		v.AddArg(v3)
  7182  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7183  		v4.AuxInt = 32
  7184  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7185  		v5.AddArg(y)
  7186  		v4.AddArg(v5)
  7187  		v.AddArg(v4)
  7188  		return true
  7189  	}
  7190  }
  7191  func rewriteValueMIPS_OpRsh16x16_0(v *Value) bool {
  7192  	b := v.Block
  7193  	_ = b
  7194  	types := &b.Func.Config.Types
  7195  	_ = types
  7196  	// match: (Rsh16x16 x y)
  7197  	// cond:
  7198  	// result: (SRA (SignExt16to32 x) ( CMOVZ <types.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7199  	for {
  7200  		x := v.Args[0]
  7201  		y := v.Args[1]
  7202  		v.reset(OpMIPSSRA)
  7203  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  7204  		v0.AddArg(x)
  7205  		v.AddArg(v0)
  7206  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, types.UInt32)
  7207  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7208  		v2.AddArg(y)
  7209  		v1.AddArg(v2)
  7210  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7211  		v3.AuxInt = -1
  7212  		v1.AddArg(v3)
  7213  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7214  		v4.AuxInt = 32
  7215  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7216  		v5.AddArg(y)
  7217  		v4.AddArg(v5)
  7218  		v1.AddArg(v4)
  7219  		v.AddArg(v1)
  7220  		return true
  7221  	}
  7222  }
  7223  func rewriteValueMIPS_OpRsh16x32_0(v *Value) bool {
  7224  	b := v.Block
  7225  	_ = b
  7226  	types := &b.Func.Config.Types
  7227  	_ = types
  7228  	// match: (Rsh16x32 x y)
  7229  	// cond:
  7230  	// result: (SRA (SignExt16to32 x) ( CMOVZ <types.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7231  	for {
  7232  		x := v.Args[0]
  7233  		y := v.Args[1]
  7234  		v.reset(OpMIPSSRA)
  7235  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  7236  		v0.AddArg(x)
  7237  		v.AddArg(v0)
  7238  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, types.UInt32)
  7239  		v1.AddArg(y)
  7240  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7241  		v2.AuxInt = -1
  7242  		v1.AddArg(v2)
  7243  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7244  		v3.AuxInt = 32
  7245  		v3.AddArg(y)
  7246  		v1.AddArg(v3)
  7247  		v.AddArg(v1)
  7248  		return true
  7249  	}
  7250  }
  7251  func rewriteValueMIPS_OpRsh16x64_0(v *Value) bool {
  7252  	b := v.Block
  7253  	_ = b
  7254  	types := &b.Func.Config.Types
  7255  	_ = types
  7256  	// match: (Rsh16x64 x (Const64 [c]))
  7257  	// cond: uint32(c) < 16
  7258  	// result: (SRAconst (SLLconst <types.UInt32> x [16]) [c+16])
  7259  	for {
  7260  		x := v.Args[0]
  7261  		v_1 := v.Args[1]
  7262  		if v_1.Op != OpConst64 {
  7263  			break
  7264  		}
  7265  		c := v_1.AuxInt
  7266  		if !(uint32(c) < 16) {
  7267  			break
  7268  		}
  7269  		v.reset(OpMIPSSRAconst)
  7270  		v.AuxInt = c + 16
  7271  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
  7272  		v0.AuxInt = 16
  7273  		v0.AddArg(x)
  7274  		v.AddArg(v0)
  7275  		return true
  7276  	}
  7277  	// match: (Rsh16x64 x (Const64 [c]))
  7278  	// cond: uint32(c) >= 16
  7279  	// result: (SRAconst (SLLconst <types.UInt32> x [16]) [31])
  7280  	for {
  7281  		x := v.Args[0]
  7282  		v_1 := v.Args[1]
  7283  		if v_1.Op != OpConst64 {
  7284  			break
  7285  		}
  7286  		c := v_1.AuxInt
  7287  		if !(uint32(c) >= 16) {
  7288  			break
  7289  		}
  7290  		v.reset(OpMIPSSRAconst)
  7291  		v.AuxInt = 31
  7292  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
  7293  		v0.AuxInt = 16
  7294  		v0.AddArg(x)
  7295  		v.AddArg(v0)
  7296  		return true
  7297  	}
  7298  	return false
  7299  }
  7300  func rewriteValueMIPS_OpRsh16x8_0(v *Value) bool {
  7301  	b := v.Block
  7302  	_ = b
  7303  	types := &b.Func.Config.Types
  7304  	_ = types
  7305  	// match: (Rsh16x8 x y)
  7306  	// cond:
  7307  	// result: (SRA (SignExt16to32 x) ( CMOVZ <types.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7308  	for {
  7309  		x := v.Args[0]
  7310  		y := v.Args[1]
  7311  		v.reset(OpMIPSSRA)
  7312  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  7313  		v0.AddArg(x)
  7314  		v.AddArg(v0)
  7315  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, types.UInt32)
  7316  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7317  		v2.AddArg(y)
  7318  		v1.AddArg(v2)
  7319  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7320  		v3.AuxInt = -1
  7321  		v1.AddArg(v3)
  7322  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7323  		v4.AuxInt = 32
  7324  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7325  		v5.AddArg(y)
  7326  		v4.AddArg(v5)
  7327  		v1.AddArg(v4)
  7328  		v.AddArg(v1)
  7329  		return true
  7330  	}
  7331  }
  7332  func rewriteValueMIPS_OpRsh32Ux16_0(v *Value) bool {
  7333  	b := v.Block
  7334  	_ = b
  7335  	types := &b.Func.Config.Types
  7336  	_ = types
  7337  	// match: (Rsh32Ux16 <t> x y)
  7338  	// cond:
  7339  	// result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7340  	for {
  7341  		t := v.Type
  7342  		x := v.Args[0]
  7343  		y := v.Args[1]
  7344  		v.reset(OpMIPSCMOVZ)
  7345  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7346  		v0.AddArg(x)
  7347  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7348  		v1.AddArg(y)
  7349  		v0.AddArg(v1)
  7350  		v.AddArg(v0)
  7351  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7352  		v2.AuxInt = 0
  7353  		v.AddArg(v2)
  7354  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7355  		v3.AuxInt = 32
  7356  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7357  		v4.AddArg(y)
  7358  		v3.AddArg(v4)
  7359  		v.AddArg(v3)
  7360  		return true
  7361  	}
  7362  }
  7363  func rewriteValueMIPS_OpRsh32Ux32_0(v *Value) bool {
  7364  	b := v.Block
  7365  	_ = b
  7366  	types := &b.Func.Config.Types
  7367  	_ = types
  7368  	// match: (Rsh32Ux32 <t> x y)
  7369  	// cond:
  7370  	// result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  7371  	for {
  7372  		t := v.Type
  7373  		x := v.Args[0]
  7374  		y := v.Args[1]
  7375  		v.reset(OpMIPSCMOVZ)
  7376  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7377  		v0.AddArg(x)
  7378  		v0.AddArg(y)
  7379  		v.AddArg(v0)
  7380  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7381  		v1.AuxInt = 0
  7382  		v.AddArg(v1)
  7383  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7384  		v2.AuxInt = 32
  7385  		v2.AddArg(y)
  7386  		v.AddArg(v2)
  7387  		return true
  7388  	}
  7389  }
  7390  func rewriteValueMIPS_OpRsh32Ux64_0(v *Value) bool {
  7391  	// match: (Rsh32Ux64 x (Const64 [c]))
  7392  	// cond: uint32(c) < 32
  7393  	// result: (SRLconst x [c])
  7394  	for {
  7395  		x := v.Args[0]
  7396  		v_1 := v.Args[1]
  7397  		if v_1.Op != OpConst64 {
  7398  			break
  7399  		}
  7400  		c := v_1.AuxInt
  7401  		if !(uint32(c) < 32) {
  7402  			break
  7403  		}
  7404  		v.reset(OpMIPSSRLconst)
  7405  		v.AuxInt = c
  7406  		v.AddArg(x)
  7407  		return true
  7408  	}
  7409  	// match: (Rsh32Ux64 _ (Const64 [c]))
  7410  	// cond: uint32(c) >= 32
  7411  	// result: (MOVWconst [0])
  7412  	for {
  7413  		v_1 := v.Args[1]
  7414  		if v_1.Op != OpConst64 {
  7415  			break
  7416  		}
  7417  		c := v_1.AuxInt
  7418  		if !(uint32(c) >= 32) {
  7419  			break
  7420  		}
  7421  		v.reset(OpMIPSMOVWconst)
  7422  		v.AuxInt = 0
  7423  		return true
  7424  	}
  7425  	return false
  7426  }
  7427  func rewriteValueMIPS_OpRsh32Ux8_0(v *Value) bool {
  7428  	b := v.Block
  7429  	_ = b
  7430  	types := &b.Func.Config.Types
  7431  	_ = types
  7432  	// match: (Rsh32Ux8 <t> x y)
  7433  	// cond:
  7434  	// result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7435  	for {
  7436  		t := v.Type
  7437  		x := v.Args[0]
  7438  		y := v.Args[1]
  7439  		v.reset(OpMIPSCMOVZ)
  7440  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7441  		v0.AddArg(x)
  7442  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7443  		v1.AddArg(y)
  7444  		v0.AddArg(v1)
  7445  		v.AddArg(v0)
  7446  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7447  		v2.AuxInt = 0
  7448  		v.AddArg(v2)
  7449  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7450  		v3.AuxInt = 32
  7451  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7452  		v4.AddArg(y)
  7453  		v3.AddArg(v4)
  7454  		v.AddArg(v3)
  7455  		return true
  7456  	}
  7457  }
  7458  func rewriteValueMIPS_OpRsh32x16_0(v *Value) bool {
  7459  	b := v.Block
  7460  	_ = b
  7461  	types := &b.Func.Config.Types
  7462  	_ = types
  7463  	// match: (Rsh32x16 x y)
  7464  	// cond:
  7465  	// result: (SRA x ( CMOVZ <types.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7466  	for {
  7467  		x := v.Args[0]
  7468  		y := v.Args[1]
  7469  		v.reset(OpMIPSSRA)
  7470  		v.AddArg(x)
  7471  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, types.UInt32)
  7472  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7473  		v1.AddArg(y)
  7474  		v0.AddArg(v1)
  7475  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7476  		v2.AuxInt = -1
  7477  		v0.AddArg(v2)
  7478  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7479  		v3.AuxInt = 32
  7480  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7481  		v4.AddArg(y)
  7482  		v3.AddArg(v4)
  7483  		v0.AddArg(v3)
  7484  		v.AddArg(v0)
  7485  		return true
  7486  	}
  7487  }
  7488  func rewriteValueMIPS_OpRsh32x32_0(v *Value) bool {
  7489  	b := v.Block
  7490  	_ = b
  7491  	types := &b.Func.Config.Types
  7492  	_ = types
  7493  	// match: (Rsh32x32 x y)
  7494  	// cond:
  7495  	// result: (SRA x ( CMOVZ <types.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7496  	for {
  7497  		x := v.Args[0]
  7498  		y := v.Args[1]
  7499  		v.reset(OpMIPSSRA)
  7500  		v.AddArg(x)
  7501  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, types.UInt32)
  7502  		v0.AddArg(y)
  7503  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7504  		v1.AuxInt = -1
  7505  		v0.AddArg(v1)
  7506  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7507  		v2.AuxInt = 32
  7508  		v2.AddArg(y)
  7509  		v0.AddArg(v2)
  7510  		v.AddArg(v0)
  7511  		return true
  7512  	}
  7513  }
  7514  func rewriteValueMIPS_OpRsh32x64_0(v *Value) bool {
  7515  	// match: (Rsh32x64 x (Const64 [c]))
  7516  	// cond: uint32(c) < 32
  7517  	// result: (SRAconst x [c])
  7518  	for {
  7519  		x := v.Args[0]
  7520  		v_1 := v.Args[1]
  7521  		if v_1.Op != OpConst64 {
  7522  			break
  7523  		}
  7524  		c := v_1.AuxInt
  7525  		if !(uint32(c) < 32) {
  7526  			break
  7527  		}
  7528  		v.reset(OpMIPSSRAconst)
  7529  		v.AuxInt = c
  7530  		v.AddArg(x)
  7531  		return true
  7532  	}
  7533  	// match: (Rsh32x64 x (Const64 [c]))
  7534  	// cond: uint32(c) >= 32
  7535  	// result: (SRAconst x [31])
  7536  	for {
  7537  		x := v.Args[0]
  7538  		v_1 := v.Args[1]
  7539  		if v_1.Op != OpConst64 {
  7540  			break
  7541  		}
  7542  		c := v_1.AuxInt
  7543  		if !(uint32(c) >= 32) {
  7544  			break
  7545  		}
  7546  		v.reset(OpMIPSSRAconst)
  7547  		v.AuxInt = 31
  7548  		v.AddArg(x)
  7549  		return true
  7550  	}
  7551  	return false
  7552  }
  7553  func rewriteValueMIPS_OpRsh32x8_0(v *Value) bool {
  7554  	b := v.Block
  7555  	_ = b
  7556  	types := &b.Func.Config.Types
  7557  	_ = types
  7558  	// match: (Rsh32x8 x y)
  7559  	// cond:
  7560  	// result: (SRA x ( CMOVZ <types.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7561  	for {
  7562  		x := v.Args[0]
  7563  		y := v.Args[1]
  7564  		v.reset(OpMIPSSRA)
  7565  		v.AddArg(x)
  7566  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, types.UInt32)
  7567  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7568  		v1.AddArg(y)
  7569  		v0.AddArg(v1)
  7570  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7571  		v2.AuxInt = -1
  7572  		v0.AddArg(v2)
  7573  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7574  		v3.AuxInt = 32
  7575  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7576  		v4.AddArg(y)
  7577  		v3.AddArg(v4)
  7578  		v0.AddArg(v3)
  7579  		v.AddArg(v0)
  7580  		return true
  7581  	}
  7582  }
  7583  func rewriteValueMIPS_OpRsh8Ux16_0(v *Value) bool {
  7584  	b := v.Block
  7585  	_ = b
  7586  	types := &b.Func.Config.Types
  7587  	_ = types
  7588  	// match: (Rsh8Ux16 <t> x y)
  7589  	// cond:
  7590  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7591  	for {
  7592  		t := v.Type
  7593  		x := v.Args[0]
  7594  		y := v.Args[1]
  7595  		v.reset(OpMIPSCMOVZ)
  7596  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7597  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7598  		v1.AddArg(x)
  7599  		v0.AddArg(v1)
  7600  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7601  		v2.AddArg(y)
  7602  		v0.AddArg(v2)
  7603  		v.AddArg(v0)
  7604  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7605  		v3.AuxInt = 0
  7606  		v.AddArg(v3)
  7607  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7608  		v4.AuxInt = 32
  7609  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7610  		v5.AddArg(y)
  7611  		v4.AddArg(v5)
  7612  		v.AddArg(v4)
  7613  		return true
  7614  	}
  7615  }
  7616  func rewriteValueMIPS_OpRsh8Ux32_0(v *Value) bool {
  7617  	b := v.Block
  7618  	_ = b
  7619  	types := &b.Func.Config.Types
  7620  	_ = types
  7621  	// match: (Rsh8Ux32 <t> x y)
  7622  	// cond:
  7623  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7624  	for {
  7625  		t := v.Type
  7626  		x := v.Args[0]
  7627  		y := v.Args[1]
  7628  		v.reset(OpMIPSCMOVZ)
  7629  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7630  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7631  		v1.AddArg(x)
  7632  		v0.AddArg(v1)
  7633  		v0.AddArg(y)
  7634  		v.AddArg(v0)
  7635  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7636  		v2.AuxInt = 0
  7637  		v.AddArg(v2)
  7638  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7639  		v3.AuxInt = 32
  7640  		v3.AddArg(y)
  7641  		v.AddArg(v3)
  7642  		return true
  7643  	}
  7644  }
  7645  func rewriteValueMIPS_OpRsh8Ux64_0(v *Value) bool {
  7646  	b := v.Block
  7647  	_ = b
  7648  	types := &b.Func.Config.Types
  7649  	_ = types
  7650  	// match: (Rsh8Ux64 x (Const64 [c]))
  7651  	// cond: uint32(c) < 8
  7652  	// result: (SRLconst (SLLconst <types.UInt32> x [24]) [c+24])
  7653  	for {
  7654  		x := v.Args[0]
  7655  		v_1 := v.Args[1]
  7656  		if v_1.Op != OpConst64 {
  7657  			break
  7658  		}
  7659  		c := v_1.AuxInt
  7660  		if !(uint32(c) < 8) {
  7661  			break
  7662  		}
  7663  		v.reset(OpMIPSSRLconst)
  7664  		v.AuxInt = c + 24
  7665  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
  7666  		v0.AuxInt = 24
  7667  		v0.AddArg(x)
  7668  		v.AddArg(v0)
  7669  		return true
  7670  	}
  7671  	// match: (Rsh8Ux64 _ (Const64 [c]))
  7672  	// cond: uint32(c) >= 8
  7673  	// result: (MOVWconst [0])
  7674  	for {
  7675  		v_1 := v.Args[1]
  7676  		if v_1.Op != OpConst64 {
  7677  			break
  7678  		}
  7679  		c := v_1.AuxInt
  7680  		if !(uint32(c) >= 8) {
  7681  			break
  7682  		}
  7683  		v.reset(OpMIPSMOVWconst)
  7684  		v.AuxInt = 0
  7685  		return true
  7686  	}
  7687  	return false
  7688  }
  7689  func rewriteValueMIPS_OpRsh8Ux8_0(v *Value) bool {
  7690  	b := v.Block
  7691  	_ = b
  7692  	types := &b.Func.Config.Types
  7693  	_ = types
  7694  	// match: (Rsh8Ux8 <t> x y)
  7695  	// cond:
  7696  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7697  	for {
  7698  		t := v.Type
  7699  		x := v.Args[0]
  7700  		y := v.Args[1]
  7701  		v.reset(OpMIPSCMOVZ)
  7702  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7703  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7704  		v1.AddArg(x)
  7705  		v0.AddArg(v1)
  7706  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7707  		v2.AddArg(y)
  7708  		v0.AddArg(v2)
  7709  		v.AddArg(v0)
  7710  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7711  		v3.AuxInt = 0
  7712  		v.AddArg(v3)
  7713  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7714  		v4.AuxInt = 32
  7715  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7716  		v5.AddArg(y)
  7717  		v4.AddArg(v5)
  7718  		v.AddArg(v4)
  7719  		return true
  7720  	}
  7721  }
  7722  func rewriteValueMIPS_OpRsh8x16_0(v *Value) bool {
  7723  	b := v.Block
  7724  	_ = b
  7725  	types := &b.Func.Config.Types
  7726  	_ = types
  7727  	// match: (Rsh8x16 x y)
  7728  	// cond:
  7729  	// result: (SRA (SignExt16to32 x) ( CMOVZ <types.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7730  	for {
  7731  		x := v.Args[0]
  7732  		y := v.Args[1]
  7733  		v.reset(OpMIPSSRA)
  7734  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  7735  		v0.AddArg(x)
  7736  		v.AddArg(v0)
  7737  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, types.UInt32)
  7738  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7739  		v2.AddArg(y)
  7740  		v1.AddArg(v2)
  7741  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7742  		v3.AuxInt = -1
  7743  		v1.AddArg(v3)
  7744  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7745  		v4.AuxInt = 32
  7746  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, types.UInt32)
  7747  		v5.AddArg(y)
  7748  		v4.AddArg(v5)
  7749  		v1.AddArg(v4)
  7750  		v.AddArg(v1)
  7751  		return true
  7752  	}
  7753  }
  7754  func rewriteValueMIPS_OpRsh8x32_0(v *Value) bool {
  7755  	b := v.Block
  7756  	_ = b
  7757  	types := &b.Func.Config.Types
  7758  	_ = types
  7759  	// match: (Rsh8x32 x y)
  7760  	// cond:
  7761  	// result: (SRA (SignExt16to32 x) ( CMOVZ <types.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7762  	for {
  7763  		x := v.Args[0]
  7764  		y := v.Args[1]
  7765  		v.reset(OpMIPSSRA)
  7766  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  7767  		v0.AddArg(x)
  7768  		v.AddArg(v0)
  7769  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, types.UInt32)
  7770  		v1.AddArg(y)
  7771  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7772  		v2.AuxInt = -1
  7773  		v1.AddArg(v2)
  7774  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7775  		v3.AuxInt = 32
  7776  		v3.AddArg(y)
  7777  		v1.AddArg(v3)
  7778  		v.AddArg(v1)
  7779  		return true
  7780  	}
  7781  }
  7782  func rewriteValueMIPS_OpRsh8x64_0(v *Value) bool {
  7783  	b := v.Block
  7784  	_ = b
  7785  	types := &b.Func.Config.Types
  7786  	_ = types
  7787  	// match: (Rsh8x64 x (Const64 [c]))
  7788  	// cond: uint32(c) < 8
  7789  	// result: (SRAconst (SLLconst <types.UInt32> x [24]) [c+24])
  7790  	for {
  7791  		x := v.Args[0]
  7792  		v_1 := v.Args[1]
  7793  		if v_1.Op != OpConst64 {
  7794  			break
  7795  		}
  7796  		c := v_1.AuxInt
  7797  		if !(uint32(c) < 8) {
  7798  			break
  7799  		}
  7800  		v.reset(OpMIPSSRAconst)
  7801  		v.AuxInt = c + 24
  7802  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
  7803  		v0.AuxInt = 24
  7804  		v0.AddArg(x)
  7805  		v.AddArg(v0)
  7806  		return true
  7807  	}
  7808  	// match: (Rsh8x64 x (Const64 [c]))
  7809  	// cond: uint32(c) >= 8
  7810  	// result: (SRAconst (SLLconst <types.UInt32> x [24]) [31])
  7811  	for {
  7812  		x := v.Args[0]
  7813  		v_1 := v.Args[1]
  7814  		if v_1.Op != OpConst64 {
  7815  			break
  7816  		}
  7817  		c := v_1.AuxInt
  7818  		if !(uint32(c) >= 8) {
  7819  			break
  7820  		}
  7821  		v.reset(OpMIPSSRAconst)
  7822  		v.AuxInt = 31
  7823  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, types.UInt32)
  7824  		v0.AuxInt = 24
  7825  		v0.AddArg(x)
  7826  		v.AddArg(v0)
  7827  		return true
  7828  	}
  7829  	return false
  7830  }
  7831  func rewriteValueMIPS_OpRsh8x8_0(v *Value) bool {
  7832  	b := v.Block
  7833  	_ = b
  7834  	types := &b.Func.Config.Types
  7835  	_ = types
  7836  	// match: (Rsh8x8 x y)
  7837  	// cond:
  7838  	// result: (SRA (SignExt16to32 x) ( CMOVZ <types.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7839  	for {
  7840  		x := v.Args[0]
  7841  		y := v.Args[1]
  7842  		v.reset(OpMIPSSRA)
  7843  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, types.Int32)
  7844  		v0.AddArg(x)
  7845  		v.AddArg(v0)
  7846  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, types.UInt32)
  7847  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7848  		v2.AddArg(y)
  7849  		v1.AddArg(v2)
  7850  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  7851  		v3.AuxInt = -1
  7852  		v1.AddArg(v3)
  7853  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, types.Bool)
  7854  		v4.AuxInt = 32
  7855  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, types.UInt32)
  7856  		v5.AddArg(y)
  7857  		v4.AddArg(v5)
  7858  		v1.AddArg(v4)
  7859  		v.AddArg(v1)
  7860  		return true
  7861  	}
  7862  }
  7863  func rewriteValueMIPS_OpSelect0_0(v *Value) bool {
  7864  	b := v.Block
  7865  	_ = b
  7866  	types := &b.Func.Config.Types
  7867  	_ = types
  7868  	// match: (Select0 (Add32carry <t> x y))
  7869  	// cond:
  7870  	// result: (ADD <t.FieldType(0)> x y)
  7871  	for {
  7872  		v_0 := v.Args[0]
  7873  		if v_0.Op != OpAdd32carry {
  7874  			break
  7875  		}
  7876  		t := v_0.Type
  7877  		x := v_0.Args[0]
  7878  		y := v_0.Args[1]
  7879  		v.reset(OpMIPSADD)
  7880  		v.Type = t.FieldType(0)
  7881  		v.AddArg(x)
  7882  		v.AddArg(y)
  7883  		return true
  7884  	}
  7885  	// match: (Select0 (Sub32carry <t> x y))
  7886  	// cond:
  7887  	// result: (SUB <t.FieldType(0)> x y)
  7888  	for {
  7889  		v_0 := v.Args[0]
  7890  		if v_0.Op != OpSub32carry {
  7891  			break
  7892  		}
  7893  		t := v_0.Type
  7894  		x := v_0.Args[0]
  7895  		y := v_0.Args[1]
  7896  		v.reset(OpMIPSSUB)
  7897  		v.Type = t.FieldType(0)
  7898  		v.AddArg(x)
  7899  		v.AddArg(y)
  7900  		return true
  7901  	}
  7902  	// match: (Select0 (MULTU (MOVWconst [0]) _))
  7903  	// cond:
  7904  	// result: (MOVWconst [0])
  7905  	for {
  7906  		v_0 := v.Args[0]
  7907  		if v_0.Op != OpMIPSMULTU {
  7908  			break
  7909  		}
  7910  		v_0_0 := v_0.Args[0]
  7911  		if v_0_0.Op != OpMIPSMOVWconst {
  7912  			break
  7913  		}
  7914  		if v_0_0.AuxInt != 0 {
  7915  			break
  7916  		}
  7917  		v.reset(OpMIPSMOVWconst)
  7918  		v.AuxInt = 0
  7919  		return true
  7920  	}
  7921  	// match: (Select0 (MULTU _ (MOVWconst [0])))
  7922  	// cond:
  7923  	// result: (MOVWconst [0])
  7924  	for {
  7925  		v_0 := v.Args[0]
  7926  		if v_0.Op != OpMIPSMULTU {
  7927  			break
  7928  		}
  7929  		v_0_1 := v_0.Args[1]
  7930  		if v_0_1.Op != OpMIPSMOVWconst {
  7931  			break
  7932  		}
  7933  		if v_0_1.AuxInt != 0 {
  7934  			break
  7935  		}
  7936  		v.reset(OpMIPSMOVWconst)
  7937  		v.AuxInt = 0
  7938  		return true
  7939  	}
  7940  	// match: (Select0 (MULTU (MOVWconst [1]) _))
  7941  	// cond:
  7942  	// result: (MOVWconst [0])
  7943  	for {
  7944  		v_0 := v.Args[0]
  7945  		if v_0.Op != OpMIPSMULTU {
  7946  			break
  7947  		}
  7948  		v_0_0 := v_0.Args[0]
  7949  		if v_0_0.Op != OpMIPSMOVWconst {
  7950  			break
  7951  		}
  7952  		if v_0_0.AuxInt != 1 {
  7953  			break
  7954  		}
  7955  		v.reset(OpMIPSMOVWconst)
  7956  		v.AuxInt = 0
  7957  		return true
  7958  	}
  7959  	// match: (Select0 (MULTU _ (MOVWconst [1])))
  7960  	// cond:
  7961  	// result: (MOVWconst [0])
  7962  	for {
  7963  		v_0 := v.Args[0]
  7964  		if v_0.Op != OpMIPSMULTU {
  7965  			break
  7966  		}
  7967  		v_0_1 := v_0.Args[1]
  7968  		if v_0_1.Op != OpMIPSMOVWconst {
  7969  			break
  7970  		}
  7971  		if v_0_1.AuxInt != 1 {
  7972  			break
  7973  		}
  7974  		v.reset(OpMIPSMOVWconst)
  7975  		v.AuxInt = 0
  7976  		return true
  7977  	}
  7978  	// match: (Select0 (MULTU (MOVWconst [-1]) x))
  7979  	// cond:
  7980  	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  7981  	for {
  7982  		v_0 := v.Args[0]
  7983  		if v_0.Op != OpMIPSMULTU {
  7984  			break
  7985  		}
  7986  		v_0_0 := v_0.Args[0]
  7987  		if v_0_0.Op != OpMIPSMOVWconst {
  7988  			break
  7989  		}
  7990  		if v_0_0.AuxInt != -1 {
  7991  			break
  7992  		}
  7993  		x := v_0.Args[1]
  7994  		v.reset(OpMIPSCMOVZ)
  7995  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  7996  		v0.AuxInt = -1
  7997  		v0.AddArg(x)
  7998  		v.AddArg(v0)
  7999  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8000  		v1.AuxInt = 0
  8001  		v.AddArg(v1)
  8002  		v.AddArg(x)
  8003  		return true
  8004  	}
  8005  	// match: (Select0 (MULTU x (MOVWconst [-1])))
  8006  	// cond:
  8007  	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  8008  	for {
  8009  		v_0 := v.Args[0]
  8010  		if v_0.Op != OpMIPSMULTU {
  8011  			break
  8012  		}
  8013  		x := v_0.Args[0]
  8014  		v_0_1 := v_0.Args[1]
  8015  		if v_0_1.Op != OpMIPSMOVWconst {
  8016  			break
  8017  		}
  8018  		if v_0_1.AuxInt != -1 {
  8019  			break
  8020  		}
  8021  		v.reset(OpMIPSCMOVZ)
  8022  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  8023  		v0.AuxInt = -1
  8024  		v0.AddArg(x)
  8025  		v.AddArg(v0)
  8026  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8027  		v1.AuxInt = 0
  8028  		v.AddArg(v1)
  8029  		v.AddArg(x)
  8030  		return true
  8031  	}
  8032  	// match: (Select0 (MULTU (MOVWconst [c]) x))
  8033  	// cond: isPowerOfTwo(int64(uint32(c)))
  8034  	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  8035  	for {
  8036  		v_0 := v.Args[0]
  8037  		if v_0.Op != OpMIPSMULTU {
  8038  			break
  8039  		}
  8040  		v_0_0 := v_0.Args[0]
  8041  		if v_0_0.Op != OpMIPSMOVWconst {
  8042  			break
  8043  		}
  8044  		c := v_0_0.AuxInt
  8045  		x := v_0.Args[1]
  8046  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8047  			break
  8048  		}
  8049  		v.reset(OpMIPSSRLconst)
  8050  		v.AuxInt = 32 - log2(int64(uint32(c)))
  8051  		v.AddArg(x)
  8052  		return true
  8053  	}
  8054  	// match: (Select0 (MULTU x (MOVWconst [c])))
  8055  	// cond: isPowerOfTwo(int64(uint32(c)))
  8056  	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  8057  	for {
  8058  		v_0 := v.Args[0]
  8059  		if v_0.Op != OpMIPSMULTU {
  8060  			break
  8061  		}
  8062  		x := v_0.Args[0]
  8063  		v_0_1 := v_0.Args[1]
  8064  		if v_0_1.Op != OpMIPSMOVWconst {
  8065  			break
  8066  		}
  8067  		c := v_0_1.AuxInt
  8068  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8069  			break
  8070  		}
  8071  		v.reset(OpMIPSSRLconst)
  8072  		v.AuxInt = 32 - log2(int64(uint32(c)))
  8073  		v.AddArg(x)
  8074  		return true
  8075  	}
  8076  	return false
  8077  }
  8078  func rewriteValueMIPS_OpSelect0_10(v *Value) bool {
  8079  	// match: (Select0 (MULTU (MOVWconst [c]) (MOVWconst [d])))
  8080  	// cond:
  8081  	// result: (MOVWconst [(c*d)>>32])
  8082  	for {
  8083  		v_0 := v.Args[0]
  8084  		if v_0.Op != OpMIPSMULTU {
  8085  			break
  8086  		}
  8087  		v_0_0 := v_0.Args[0]
  8088  		if v_0_0.Op != OpMIPSMOVWconst {
  8089  			break
  8090  		}
  8091  		c := v_0_0.AuxInt
  8092  		v_0_1 := v_0.Args[1]
  8093  		if v_0_1.Op != OpMIPSMOVWconst {
  8094  			break
  8095  		}
  8096  		d := v_0_1.AuxInt
  8097  		v.reset(OpMIPSMOVWconst)
  8098  		v.AuxInt = (c * d) >> 32
  8099  		return true
  8100  	}
  8101  	// match: (Select0 (MULTU (MOVWconst [d]) (MOVWconst [c])))
  8102  	// cond:
  8103  	// result: (MOVWconst [(c*d)>>32])
  8104  	for {
  8105  		v_0 := v.Args[0]
  8106  		if v_0.Op != OpMIPSMULTU {
  8107  			break
  8108  		}
  8109  		v_0_0 := v_0.Args[0]
  8110  		if v_0_0.Op != OpMIPSMOVWconst {
  8111  			break
  8112  		}
  8113  		d := v_0_0.AuxInt
  8114  		v_0_1 := v_0.Args[1]
  8115  		if v_0_1.Op != OpMIPSMOVWconst {
  8116  			break
  8117  		}
  8118  		c := v_0_1.AuxInt
  8119  		v.reset(OpMIPSMOVWconst)
  8120  		v.AuxInt = (c * d) >> 32
  8121  		return true
  8122  	}
  8123  	// match: (Select0 (DIV (MOVWconst [c]) (MOVWconst [d])))
  8124  	// cond:
  8125  	// result: (MOVWconst [int64(int32(c)%int32(d))])
  8126  	for {
  8127  		v_0 := v.Args[0]
  8128  		if v_0.Op != OpMIPSDIV {
  8129  			break
  8130  		}
  8131  		v_0_0 := v_0.Args[0]
  8132  		if v_0_0.Op != OpMIPSMOVWconst {
  8133  			break
  8134  		}
  8135  		c := v_0_0.AuxInt
  8136  		v_0_1 := v_0.Args[1]
  8137  		if v_0_1.Op != OpMIPSMOVWconst {
  8138  			break
  8139  		}
  8140  		d := v_0_1.AuxInt
  8141  		v.reset(OpMIPSMOVWconst)
  8142  		v.AuxInt = int64(int32(c) % int32(d))
  8143  		return true
  8144  	}
  8145  	// match: (Select0 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8146  	// cond:
  8147  	// result: (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
  8148  	for {
  8149  		v_0 := v.Args[0]
  8150  		if v_0.Op != OpMIPSDIVU {
  8151  			break
  8152  		}
  8153  		v_0_0 := v_0.Args[0]
  8154  		if v_0_0.Op != OpMIPSMOVWconst {
  8155  			break
  8156  		}
  8157  		c := v_0_0.AuxInt
  8158  		v_0_1 := v_0.Args[1]
  8159  		if v_0_1.Op != OpMIPSMOVWconst {
  8160  			break
  8161  		}
  8162  		d := v_0_1.AuxInt
  8163  		v.reset(OpMIPSMOVWconst)
  8164  		v.AuxInt = int64(int32(uint32(c) % uint32(d)))
  8165  		return true
  8166  	}
  8167  	return false
  8168  }
  8169  func rewriteValueMIPS_OpSelect1_0(v *Value) bool {
  8170  	b := v.Block
  8171  	_ = b
  8172  	types := &b.Func.Config.Types
  8173  	_ = types
  8174  	// match: (Select1 (Add32carry <t> x y))
  8175  	// cond:
  8176  	// result: (SGTU <types.Bool> x (ADD <t.FieldType(0)> x y))
  8177  	for {
  8178  		v_0 := v.Args[0]
  8179  		if v_0.Op != OpAdd32carry {
  8180  			break
  8181  		}
  8182  		t := v_0.Type
  8183  		x := v_0.Args[0]
  8184  		y := v_0.Args[1]
  8185  		v.reset(OpMIPSSGTU)
  8186  		v.Type = types.Bool
  8187  		v.AddArg(x)
  8188  		v0 := b.NewValue0(v.Pos, OpMIPSADD, t.FieldType(0))
  8189  		v0.AddArg(x)
  8190  		v0.AddArg(y)
  8191  		v.AddArg(v0)
  8192  		return true
  8193  	}
  8194  	// match: (Select1 (Sub32carry <t> x y))
  8195  	// cond:
  8196  	// result: (SGTU <types.Bool> (SUB <t.FieldType(0)> x y) x)
  8197  	for {
  8198  		v_0 := v.Args[0]
  8199  		if v_0.Op != OpSub32carry {
  8200  			break
  8201  		}
  8202  		t := v_0.Type
  8203  		x := v_0.Args[0]
  8204  		y := v_0.Args[1]
  8205  		v.reset(OpMIPSSGTU)
  8206  		v.Type = types.Bool
  8207  		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t.FieldType(0))
  8208  		v0.AddArg(x)
  8209  		v0.AddArg(y)
  8210  		v.AddArg(v0)
  8211  		v.AddArg(x)
  8212  		return true
  8213  	}
  8214  	// match: (Select1 (MULTU (MOVWconst [0]) _))
  8215  	// cond:
  8216  	// result: (MOVWconst [0])
  8217  	for {
  8218  		v_0 := v.Args[0]
  8219  		if v_0.Op != OpMIPSMULTU {
  8220  			break
  8221  		}
  8222  		v_0_0 := v_0.Args[0]
  8223  		if v_0_0.Op != OpMIPSMOVWconst {
  8224  			break
  8225  		}
  8226  		if v_0_0.AuxInt != 0 {
  8227  			break
  8228  		}
  8229  		v.reset(OpMIPSMOVWconst)
  8230  		v.AuxInt = 0
  8231  		return true
  8232  	}
  8233  	// match: (Select1 (MULTU _ (MOVWconst [0])))
  8234  	// cond:
  8235  	// result: (MOVWconst [0])
  8236  	for {
  8237  		v_0 := v.Args[0]
  8238  		if v_0.Op != OpMIPSMULTU {
  8239  			break
  8240  		}
  8241  		v_0_1 := v_0.Args[1]
  8242  		if v_0_1.Op != OpMIPSMOVWconst {
  8243  			break
  8244  		}
  8245  		if v_0_1.AuxInt != 0 {
  8246  			break
  8247  		}
  8248  		v.reset(OpMIPSMOVWconst)
  8249  		v.AuxInt = 0
  8250  		return true
  8251  	}
  8252  	// match: (Select1 (MULTU (MOVWconst [1]) x))
  8253  	// cond:
  8254  	// result: x
  8255  	for {
  8256  		v_0 := v.Args[0]
  8257  		if v_0.Op != OpMIPSMULTU {
  8258  			break
  8259  		}
  8260  		v_0_0 := v_0.Args[0]
  8261  		if v_0_0.Op != OpMIPSMOVWconst {
  8262  			break
  8263  		}
  8264  		if v_0_0.AuxInt != 1 {
  8265  			break
  8266  		}
  8267  		x := v_0.Args[1]
  8268  		v.reset(OpCopy)
  8269  		v.Type = x.Type
  8270  		v.AddArg(x)
  8271  		return true
  8272  	}
  8273  	// match: (Select1 (MULTU x (MOVWconst [1])))
  8274  	// cond:
  8275  	// result: x
  8276  	for {
  8277  		v_0 := v.Args[0]
  8278  		if v_0.Op != OpMIPSMULTU {
  8279  			break
  8280  		}
  8281  		x := v_0.Args[0]
  8282  		v_0_1 := v_0.Args[1]
  8283  		if v_0_1.Op != OpMIPSMOVWconst {
  8284  			break
  8285  		}
  8286  		if v_0_1.AuxInt != 1 {
  8287  			break
  8288  		}
  8289  		v.reset(OpCopy)
  8290  		v.Type = x.Type
  8291  		v.AddArg(x)
  8292  		return true
  8293  	}
  8294  	// match: (Select1 (MULTU (MOVWconst [-1]) x))
  8295  	// cond:
  8296  	// result: (NEG <x.Type> x)
  8297  	for {
  8298  		v_0 := v.Args[0]
  8299  		if v_0.Op != OpMIPSMULTU {
  8300  			break
  8301  		}
  8302  		v_0_0 := v_0.Args[0]
  8303  		if v_0_0.Op != OpMIPSMOVWconst {
  8304  			break
  8305  		}
  8306  		if v_0_0.AuxInt != -1 {
  8307  			break
  8308  		}
  8309  		x := v_0.Args[1]
  8310  		v.reset(OpMIPSNEG)
  8311  		v.Type = x.Type
  8312  		v.AddArg(x)
  8313  		return true
  8314  	}
  8315  	// match: (Select1 (MULTU x (MOVWconst [-1])))
  8316  	// cond:
  8317  	// result: (NEG <x.Type> x)
  8318  	for {
  8319  		v_0 := v.Args[0]
  8320  		if v_0.Op != OpMIPSMULTU {
  8321  			break
  8322  		}
  8323  		x := v_0.Args[0]
  8324  		v_0_1 := v_0.Args[1]
  8325  		if v_0_1.Op != OpMIPSMOVWconst {
  8326  			break
  8327  		}
  8328  		if v_0_1.AuxInt != -1 {
  8329  			break
  8330  		}
  8331  		v.reset(OpMIPSNEG)
  8332  		v.Type = x.Type
  8333  		v.AddArg(x)
  8334  		return true
  8335  	}
  8336  	// match: (Select1 (MULTU (MOVWconst [c]) x))
  8337  	// cond: isPowerOfTwo(int64(uint32(c)))
  8338  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  8339  	for {
  8340  		v_0 := v.Args[0]
  8341  		if v_0.Op != OpMIPSMULTU {
  8342  			break
  8343  		}
  8344  		v_0_0 := v_0.Args[0]
  8345  		if v_0_0.Op != OpMIPSMOVWconst {
  8346  			break
  8347  		}
  8348  		c := v_0_0.AuxInt
  8349  		x := v_0.Args[1]
  8350  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8351  			break
  8352  		}
  8353  		v.reset(OpMIPSSLLconst)
  8354  		v.AuxInt = log2(int64(uint32(c)))
  8355  		v.AddArg(x)
  8356  		return true
  8357  	}
  8358  	// match: (Select1 (MULTU x (MOVWconst [c])))
  8359  	// cond: isPowerOfTwo(int64(uint32(c)))
  8360  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  8361  	for {
  8362  		v_0 := v.Args[0]
  8363  		if v_0.Op != OpMIPSMULTU {
  8364  			break
  8365  		}
  8366  		x := v_0.Args[0]
  8367  		v_0_1 := v_0.Args[1]
  8368  		if v_0_1.Op != OpMIPSMOVWconst {
  8369  			break
  8370  		}
  8371  		c := v_0_1.AuxInt
  8372  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8373  			break
  8374  		}
  8375  		v.reset(OpMIPSSLLconst)
  8376  		v.AuxInt = log2(int64(uint32(c)))
  8377  		v.AddArg(x)
  8378  		return true
  8379  	}
  8380  	return false
  8381  }
  8382  func rewriteValueMIPS_OpSelect1_10(v *Value) bool {
  8383  	// match: (Select1 (MULTU (MOVWconst [c]) (MOVWconst [d])))
  8384  	// cond:
  8385  	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8386  	for {
  8387  		v_0 := v.Args[0]
  8388  		if v_0.Op != OpMIPSMULTU {
  8389  			break
  8390  		}
  8391  		v_0_0 := v_0.Args[0]
  8392  		if v_0_0.Op != OpMIPSMOVWconst {
  8393  			break
  8394  		}
  8395  		c := v_0_0.AuxInt
  8396  		v_0_1 := v_0.Args[1]
  8397  		if v_0_1.Op != OpMIPSMOVWconst {
  8398  			break
  8399  		}
  8400  		d := v_0_1.AuxInt
  8401  		v.reset(OpMIPSMOVWconst)
  8402  		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8403  		return true
  8404  	}
  8405  	// match: (Select1 (MULTU (MOVWconst [d]) (MOVWconst [c])))
  8406  	// cond:
  8407  	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8408  	for {
  8409  		v_0 := v.Args[0]
  8410  		if v_0.Op != OpMIPSMULTU {
  8411  			break
  8412  		}
  8413  		v_0_0 := v_0.Args[0]
  8414  		if v_0_0.Op != OpMIPSMOVWconst {
  8415  			break
  8416  		}
  8417  		d := v_0_0.AuxInt
  8418  		v_0_1 := v_0.Args[1]
  8419  		if v_0_1.Op != OpMIPSMOVWconst {
  8420  			break
  8421  		}
  8422  		c := v_0_1.AuxInt
  8423  		v.reset(OpMIPSMOVWconst)
  8424  		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8425  		return true
  8426  	}
  8427  	// match: (Select1 (DIV (MOVWconst [c]) (MOVWconst [d])))
  8428  	// cond:
  8429  	// result: (MOVWconst [int64(int32(c)/int32(d))])
  8430  	for {
  8431  		v_0 := v.Args[0]
  8432  		if v_0.Op != OpMIPSDIV {
  8433  			break
  8434  		}
  8435  		v_0_0 := v_0.Args[0]
  8436  		if v_0_0.Op != OpMIPSMOVWconst {
  8437  			break
  8438  		}
  8439  		c := v_0_0.AuxInt
  8440  		v_0_1 := v_0.Args[1]
  8441  		if v_0_1.Op != OpMIPSMOVWconst {
  8442  			break
  8443  		}
  8444  		d := v_0_1.AuxInt
  8445  		v.reset(OpMIPSMOVWconst)
  8446  		v.AuxInt = int64(int32(c) / int32(d))
  8447  		return true
  8448  	}
  8449  	// match: (Select1 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8450  	// cond:
  8451  	// result: (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
  8452  	for {
  8453  		v_0 := v.Args[0]
  8454  		if v_0.Op != OpMIPSDIVU {
  8455  			break
  8456  		}
  8457  		v_0_0 := v_0.Args[0]
  8458  		if v_0_0.Op != OpMIPSMOVWconst {
  8459  			break
  8460  		}
  8461  		c := v_0_0.AuxInt
  8462  		v_0_1 := v_0.Args[1]
  8463  		if v_0_1.Op != OpMIPSMOVWconst {
  8464  			break
  8465  		}
  8466  		d := v_0_1.AuxInt
  8467  		v.reset(OpMIPSMOVWconst)
  8468  		v.AuxInt = int64(int32(uint32(c) / uint32(d)))
  8469  		return true
  8470  	}
  8471  	return false
  8472  }
  8473  func rewriteValueMIPS_OpSignExt16to32_0(v *Value) bool {
  8474  	// match: (SignExt16to32 x)
  8475  	// cond:
  8476  	// result: (MOVHreg x)
  8477  	for {
  8478  		x := v.Args[0]
  8479  		v.reset(OpMIPSMOVHreg)
  8480  		v.AddArg(x)
  8481  		return true
  8482  	}
  8483  }
  8484  func rewriteValueMIPS_OpSignExt8to16_0(v *Value) bool {
  8485  	// match: (SignExt8to16 x)
  8486  	// cond:
  8487  	// result: (MOVBreg x)
  8488  	for {
  8489  		x := v.Args[0]
  8490  		v.reset(OpMIPSMOVBreg)
  8491  		v.AddArg(x)
  8492  		return true
  8493  	}
  8494  }
  8495  func rewriteValueMIPS_OpSignExt8to32_0(v *Value) bool {
  8496  	// match: (SignExt8to32 x)
  8497  	// cond:
  8498  	// result: (MOVBreg x)
  8499  	for {
  8500  		x := v.Args[0]
  8501  		v.reset(OpMIPSMOVBreg)
  8502  		v.AddArg(x)
  8503  		return true
  8504  	}
  8505  }
  8506  func rewriteValueMIPS_OpSignmask_0(v *Value) bool {
  8507  	// match: (Signmask x)
  8508  	// cond:
  8509  	// result: (SRAconst x [31])
  8510  	for {
  8511  		x := v.Args[0]
  8512  		v.reset(OpMIPSSRAconst)
  8513  		v.AuxInt = 31
  8514  		v.AddArg(x)
  8515  		return true
  8516  	}
  8517  }
  8518  func rewriteValueMIPS_OpSlicemask_0(v *Value) bool {
  8519  	b := v.Block
  8520  	_ = b
  8521  	// match: (Slicemask <t> x)
  8522  	// cond:
  8523  	// result: (SRAconst (NEG <t> x) [31])
  8524  	for {
  8525  		t := v.Type
  8526  		x := v.Args[0]
  8527  		v.reset(OpMIPSSRAconst)
  8528  		v.AuxInt = 31
  8529  		v0 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  8530  		v0.AddArg(x)
  8531  		v.AddArg(v0)
  8532  		return true
  8533  	}
  8534  }
  8535  func rewriteValueMIPS_OpSqrt_0(v *Value) bool {
  8536  	// match: (Sqrt x)
  8537  	// cond:
  8538  	// result: (SQRTD x)
  8539  	for {
  8540  		x := v.Args[0]
  8541  		v.reset(OpMIPSSQRTD)
  8542  		v.AddArg(x)
  8543  		return true
  8544  	}
  8545  }
  8546  func rewriteValueMIPS_OpStaticCall_0(v *Value) bool {
  8547  	// match: (StaticCall [argwid] {target} mem)
  8548  	// cond:
  8549  	// result: (CALLstatic [argwid] {target} mem)
  8550  	for {
  8551  		argwid := v.AuxInt
  8552  		target := v.Aux
  8553  		mem := v.Args[0]
  8554  		v.reset(OpMIPSCALLstatic)
  8555  		v.AuxInt = argwid
  8556  		v.Aux = target
  8557  		v.AddArg(mem)
  8558  		return true
  8559  	}
  8560  }
  8561  func rewriteValueMIPS_OpStore_0(v *Value) bool {
  8562  	// match: (Store {t} ptr val mem)
  8563  	// cond: t.(Type).Size() == 1
  8564  	// result: (MOVBstore ptr val mem)
  8565  	for {
  8566  		t := v.Aux
  8567  		ptr := v.Args[0]
  8568  		val := v.Args[1]
  8569  		mem := v.Args[2]
  8570  		if !(t.(Type).Size() == 1) {
  8571  			break
  8572  		}
  8573  		v.reset(OpMIPSMOVBstore)
  8574  		v.AddArg(ptr)
  8575  		v.AddArg(val)
  8576  		v.AddArg(mem)
  8577  		return true
  8578  	}
  8579  	// match: (Store {t} ptr val mem)
  8580  	// cond: t.(Type).Size() == 2
  8581  	// result: (MOVHstore ptr val mem)
  8582  	for {
  8583  		t := v.Aux
  8584  		ptr := v.Args[0]
  8585  		val := v.Args[1]
  8586  		mem := v.Args[2]
  8587  		if !(t.(Type).Size() == 2) {
  8588  			break
  8589  		}
  8590  		v.reset(OpMIPSMOVHstore)
  8591  		v.AddArg(ptr)
  8592  		v.AddArg(val)
  8593  		v.AddArg(mem)
  8594  		return true
  8595  	}
  8596  	// match: (Store {t} ptr val mem)
  8597  	// cond: t.(Type).Size() == 4 && !is32BitFloat(val.Type)
  8598  	// result: (MOVWstore ptr val mem)
  8599  	for {
  8600  		t := v.Aux
  8601  		ptr := v.Args[0]
  8602  		val := v.Args[1]
  8603  		mem := v.Args[2]
  8604  		if !(t.(Type).Size() == 4 && !is32BitFloat(val.Type)) {
  8605  			break
  8606  		}
  8607  		v.reset(OpMIPSMOVWstore)
  8608  		v.AddArg(ptr)
  8609  		v.AddArg(val)
  8610  		v.AddArg(mem)
  8611  		return true
  8612  	}
  8613  	// match: (Store {t} ptr val mem)
  8614  	// cond: t.(Type).Size() == 4 && is32BitFloat(val.Type)
  8615  	// result: (MOVFstore ptr val mem)
  8616  	for {
  8617  		t := v.Aux
  8618  		ptr := v.Args[0]
  8619  		val := v.Args[1]
  8620  		mem := v.Args[2]
  8621  		if !(t.(Type).Size() == 4 && is32BitFloat(val.Type)) {
  8622  			break
  8623  		}
  8624  		v.reset(OpMIPSMOVFstore)
  8625  		v.AddArg(ptr)
  8626  		v.AddArg(val)
  8627  		v.AddArg(mem)
  8628  		return true
  8629  	}
  8630  	// match: (Store {t} ptr val mem)
  8631  	// cond: t.(Type).Size() == 8 && is64BitFloat(val.Type)
  8632  	// result: (MOVDstore ptr val mem)
  8633  	for {
  8634  		t := v.Aux
  8635  		ptr := v.Args[0]
  8636  		val := v.Args[1]
  8637  		mem := v.Args[2]
  8638  		if !(t.(Type).Size() == 8 && is64BitFloat(val.Type)) {
  8639  			break
  8640  		}
  8641  		v.reset(OpMIPSMOVDstore)
  8642  		v.AddArg(ptr)
  8643  		v.AddArg(val)
  8644  		v.AddArg(mem)
  8645  		return true
  8646  	}
  8647  	return false
  8648  }
  8649  func rewriteValueMIPS_OpSub16_0(v *Value) bool {
  8650  	// match: (Sub16 x y)
  8651  	// cond:
  8652  	// result: (SUB x y)
  8653  	for {
  8654  		x := v.Args[0]
  8655  		y := v.Args[1]
  8656  		v.reset(OpMIPSSUB)
  8657  		v.AddArg(x)
  8658  		v.AddArg(y)
  8659  		return true
  8660  	}
  8661  }
  8662  func rewriteValueMIPS_OpSub32_0(v *Value) bool {
  8663  	// match: (Sub32 x y)
  8664  	// cond:
  8665  	// result: (SUB x y)
  8666  	for {
  8667  		x := v.Args[0]
  8668  		y := v.Args[1]
  8669  		v.reset(OpMIPSSUB)
  8670  		v.AddArg(x)
  8671  		v.AddArg(y)
  8672  		return true
  8673  	}
  8674  }
  8675  func rewriteValueMIPS_OpSub32F_0(v *Value) bool {
  8676  	// match: (Sub32F x y)
  8677  	// cond:
  8678  	// result: (SUBF x y)
  8679  	for {
  8680  		x := v.Args[0]
  8681  		y := v.Args[1]
  8682  		v.reset(OpMIPSSUBF)
  8683  		v.AddArg(x)
  8684  		v.AddArg(y)
  8685  		return true
  8686  	}
  8687  }
  8688  func rewriteValueMIPS_OpSub32withcarry_0(v *Value) bool {
  8689  	b := v.Block
  8690  	_ = b
  8691  	// match: (Sub32withcarry <t> x y c)
  8692  	// cond:
  8693  	// result: (SUB (SUB <t> x y) c)
  8694  	for {
  8695  		t := v.Type
  8696  		x := v.Args[0]
  8697  		y := v.Args[1]
  8698  		c := v.Args[2]
  8699  		v.reset(OpMIPSSUB)
  8700  		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t)
  8701  		v0.AddArg(x)
  8702  		v0.AddArg(y)
  8703  		v.AddArg(v0)
  8704  		v.AddArg(c)
  8705  		return true
  8706  	}
  8707  }
  8708  func rewriteValueMIPS_OpSub64F_0(v *Value) bool {
  8709  	// match: (Sub64F x y)
  8710  	// cond:
  8711  	// result: (SUBD x y)
  8712  	for {
  8713  		x := v.Args[0]
  8714  		y := v.Args[1]
  8715  		v.reset(OpMIPSSUBD)
  8716  		v.AddArg(x)
  8717  		v.AddArg(y)
  8718  		return true
  8719  	}
  8720  }
  8721  func rewriteValueMIPS_OpSub8_0(v *Value) bool {
  8722  	// match: (Sub8 x y)
  8723  	// cond:
  8724  	// result: (SUB x y)
  8725  	for {
  8726  		x := v.Args[0]
  8727  		y := v.Args[1]
  8728  		v.reset(OpMIPSSUB)
  8729  		v.AddArg(x)
  8730  		v.AddArg(y)
  8731  		return true
  8732  	}
  8733  }
  8734  func rewriteValueMIPS_OpSubPtr_0(v *Value) bool {
  8735  	// match: (SubPtr x y)
  8736  	// cond:
  8737  	// result: (SUB x y)
  8738  	for {
  8739  		x := v.Args[0]
  8740  		y := v.Args[1]
  8741  		v.reset(OpMIPSSUB)
  8742  		v.AddArg(x)
  8743  		v.AddArg(y)
  8744  		return true
  8745  	}
  8746  }
  8747  func rewriteValueMIPS_OpTrunc16to8_0(v *Value) bool {
  8748  	// match: (Trunc16to8 x)
  8749  	// cond:
  8750  	// result: x
  8751  	for {
  8752  		x := v.Args[0]
  8753  		v.reset(OpCopy)
  8754  		v.Type = x.Type
  8755  		v.AddArg(x)
  8756  		return true
  8757  	}
  8758  }
  8759  func rewriteValueMIPS_OpTrunc32to16_0(v *Value) bool {
  8760  	// match: (Trunc32to16 x)
  8761  	// cond:
  8762  	// result: x
  8763  	for {
  8764  		x := v.Args[0]
  8765  		v.reset(OpCopy)
  8766  		v.Type = x.Type
  8767  		v.AddArg(x)
  8768  		return true
  8769  	}
  8770  }
  8771  func rewriteValueMIPS_OpTrunc32to8_0(v *Value) bool {
  8772  	// match: (Trunc32to8 x)
  8773  	// cond:
  8774  	// result: x
  8775  	for {
  8776  		x := v.Args[0]
  8777  		v.reset(OpCopy)
  8778  		v.Type = x.Type
  8779  		v.AddArg(x)
  8780  		return true
  8781  	}
  8782  }
  8783  func rewriteValueMIPS_OpXor16_0(v *Value) bool {
  8784  	// match: (Xor16 x y)
  8785  	// cond:
  8786  	// result: (XOR x y)
  8787  	for {
  8788  		x := v.Args[0]
  8789  		y := v.Args[1]
  8790  		v.reset(OpMIPSXOR)
  8791  		v.AddArg(x)
  8792  		v.AddArg(y)
  8793  		return true
  8794  	}
  8795  }
  8796  func rewriteValueMIPS_OpXor32_0(v *Value) bool {
  8797  	// match: (Xor32 x y)
  8798  	// cond:
  8799  	// result: (XOR x y)
  8800  	for {
  8801  		x := v.Args[0]
  8802  		y := v.Args[1]
  8803  		v.reset(OpMIPSXOR)
  8804  		v.AddArg(x)
  8805  		v.AddArg(y)
  8806  		return true
  8807  	}
  8808  }
  8809  func rewriteValueMIPS_OpXor8_0(v *Value) bool {
  8810  	// match: (Xor8 x y)
  8811  	// cond:
  8812  	// result: (XOR x y)
  8813  	for {
  8814  		x := v.Args[0]
  8815  		y := v.Args[1]
  8816  		v.reset(OpMIPSXOR)
  8817  		v.AddArg(x)
  8818  		v.AddArg(y)
  8819  		return true
  8820  	}
  8821  }
  8822  func rewriteValueMIPS_OpZero_0(v *Value) bool {
  8823  	b := v.Block
  8824  	_ = b
  8825  	types := &b.Func.Config.Types
  8826  	_ = types
  8827  	// match: (Zero [0] _ mem)
  8828  	// cond:
  8829  	// result: mem
  8830  	for {
  8831  		if v.AuxInt != 0 {
  8832  			break
  8833  		}
  8834  		mem := v.Args[1]
  8835  		v.reset(OpCopy)
  8836  		v.Type = mem.Type
  8837  		v.AddArg(mem)
  8838  		return true
  8839  	}
  8840  	// match: (Zero [1] ptr mem)
  8841  	// cond:
  8842  	// result: (MOVBstore ptr (MOVWconst [0]) mem)
  8843  	for {
  8844  		if v.AuxInt != 1 {
  8845  			break
  8846  		}
  8847  		ptr := v.Args[0]
  8848  		mem := v.Args[1]
  8849  		v.reset(OpMIPSMOVBstore)
  8850  		v.AddArg(ptr)
  8851  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8852  		v0.AuxInt = 0
  8853  		v.AddArg(v0)
  8854  		v.AddArg(mem)
  8855  		return true
  8856  	}
  8857  	// match: (Zero [2] {t} ptr mem)
  8858  	// cond: t.(Type).Alignment()%2 == 0
  8859  	// result: (MOVHstore ptr (MOVWconst [0]) mem)
  8860  	for {
  8861  		if v.AuxInt != 2 {
  8862  			break
  8863  		}
  8864  		t := v.Aux
  8865  		ptr := v.Args[0]
  8866  		mem := v.Args[1]
  8867  		if !(t.(Type).Alignment()%2 == 0) {
  8868  			break
  8869  		}
  8870  		v.reset(OpMIPSMOVHstore)
  8871  		v.AddArg(ptr)
  8872  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8873  		v0.AuxInt = 0
  8874  		v.AddArg(v0)
  8875  		v.AddArg(mem)
  8876  		return true
  8877  	}
  8878  	// match: (Zero [2] ptr mem)
  8879  	// cond:
  8880  	// result: (MOVBstore [1] ptr (MOVWconst [0]) 		(MOVBstore [0] ptr (MOVWconst [0]) mem))
  8881  	for {
  8882  		if v.AuxInt != 2 {
  8883  			break
  8884  		}
  8885  		ptr := v.Args[0]
  8886  		mem := v.Args[1]
  8887  		v.reset(OpMIPSMOVBstore)
  8888  		v.AuxInt = 1
  8889  		v.AddArg(ptr)
  8890  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8891  		v0.AuxInt = 0
  8892  		v.AddArg(v0)
  8893  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  8894  		v1.AuxInt = 0
  8895  		v1.AddArg(ptr)
  8896  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8897  		v2.AuxInt = 0
  8898  		v1.AddArg(v2)
  8899  		v1.AddArg(mem)
  8900  		v.AddArg(v1)
  8901  		return true
  8902  	}
  8903  	// match: (Zero [4] {t} ptr mem)
  8904  	// cond: t.(Type).Alignment()%4 == 0
  8905  	// result: (MOVWstore ptr (MOVWconst [0]) mem)
  8906  	for {
  8907  		if v.AuxInt != 4 {
  8908  			break
  8909  		}
  8910  		t := v.Aux
  8911  		ptr := v.Args[0]
  8912  		mem := v.Args[1]
  8913  		if !(t.(Type).Alignment()%4 == 0) {
  8914  			break
  8915  		}
  8916  		v.reset(OpMIPSMOVWstore)
  8917  		v.AddArg(ptr)
  8918  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8919  		v0.AuxInt = 0
  8920  		v.AddArg(v0)
  8921  		v.AddArg(mem)
  8922  		return true
  8923  	}
  8924  	// match: (Zero [4] {t} ptr mem)
  8925  	// cond: t.(Type).Alignment()%2 == 0
  8926  	// result: (MOVHstore [2] ptr (MOVWconst [0]) 		(MOVHstore [0] ptr (MOVWconst [0]) mem))
  8927  	for {
  8928  		if v.AuxInt != 4 {
  8929  			break
  8930  		}
  8931  		t := v.Aux
  8932  		ptr := v.Args[0]
  8933  		mem := v.Args[1]
  8934  		if !(t.(Type).Alignment()%2 == 0) {
  8935  			break
  8936  		}
  8937  		v.reset(OpMIPSMOVHstore)
  8938  		v.AuxInt = 2
  8939  		v.AddArg(ptr)
  8940  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8941  		v0.AuxInt = 0
  8942  		v.AddArg(v0)
  8943  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  8944  		v1.AuxInt = 0
  8945  		v1.AddArg(ptr)
  8946  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8947  		v2.AuxInt = 0
  8948  		v1.AddArg(v2)
  8949  		v1.AddArg(mem)
  8950  		v.AddArg(v1)
  8951  		return true
  8952  	}
  8953  	// match: (Zero [4] ptr mem)
  8954  	// cond:
  8955  	// result: (MOVBstore [3] ptr (MOVWconst [0]) 		(MOVBstore [2] ptr (MOVWconst [0]) 			(MOVBstore [1] ptr (MOVWconst [0]) 				(MOVBstore [0] ptr (MOVWconst [0]) mem))))
  8956  	for {
  8957  		if v.AuxInt != 4 {
  8958  			break
  8959  		}
  8960  		ptr := v.Args[0]
  8961  		mem := v.Args[1]
  8962  		v.reset(OpMIPSMOVBstore)
  8963  		v.AuxInt = 3
  8964  		v.AddArg(ptr)
  8965  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8966  		v0.AuxInt = 0
  8967  		v.AddArg(v0)
  8968  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  8969  		v1.AuxInt = 2
  8970  		v1.AddArg(ptr)
  8971  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8972  		v2.AuxInt = 0
  8973  		v1.AddArg(v2)
  8974  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  8975  		v3.AuxInt = 1
  8976  		v3.AddArg(ptr)
  8977  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8978  		v4.AuxInt = 0
  8979  		v3.AddArg(v4)
  8980  		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  8981  		v5.AuxInt = 0
  8982  		v5.AddArg(ptr)
  8983  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  8984  		v6.AuxInt = 0
  8985  		v5.AddArg(v6)
  8986  		v5.AddArg(mem)
  8987  		v3.AddArg(v5)
  8988  		v1.AddArg(v3)
  8989  		v.AddArg(v1)
  8990  		return true
  8991  	}
  8992  	// match: (Zero [3] ptr mem)
  8993  	// cond:
  8994  	// result: (MOVBstore [2] ptr (MOVWconst [0]) 		(MOVBstore [1] ptr (MOVWconst [0]) 			(MOVBstore [0] ptr (MOVWconst [0]) mem)))
  8995  	for {
  8996  		if v.AuxInt != 3 {
  8997  			break
  8998  		}
  8999  		ptr := v.Args[0]
  9000  		mem := v.Args[1]
  9001  		v.reset(OpMIPSMOVBstore)
  9002  		v.AuxInt = 2
  9003  		v.AddArg(ptr)
  9004  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9005  		v0.AuxInt = 0
  9006  		v.AddArg(v0)
  9007  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  9008  		v1.AuxInt = 1
  9009  		v1.AddArg(ptr)
  9010  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9011  		v2.AuxInt = 0
  9012  		v1.AddArg(v2)
  9013  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  9014  		v3.AuxInt = 0
  9015  		v3.AddArg(ptr)
  9016  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9017  		v4.AuxInt = 0
  9018  		v3.AddArg(v4)
  9019  		v3.AddArg(mem)
  9020  		v1.AddArg(v3)
  9021  		v.AddArg(v1)
  9022  		return true
  9023  	}
  9024  	// match: (Zero [6] {t} ptr mem)
  9025  	// cond: t.(Type).Alignment()%2 == 0
  9026  	// result: (MOVHstore [4] ptr (MOVWconst [0]) 		(MOVHstore [2] ptr (MOVWconst [0]) 			(MOVHstore [0] ptr (MOVWconst [0]) mem)))
  9027  	for {
  9028  		if v.AuxInt != 6 {
  9029  			break
  9030  		}
  9031  		t := v.Aux
  9032  		ptr := v.Args[0]
  9033  		mem := v.Args[1]
  9034  		if !(t.(Type).Alignment()%2 == 0) {
  9035  			break
  9036  		}
  9037  		v.reset(OpMIPSMOVHstore)
  9038  		v.AuxInt = 4
  9039  		v.AddArg(ptr)
  9040  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9041  		v0.AuxInt = 0
  9042  		v.AddArg(v0)
  9043  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  9044  		v1.AuxInt = 2
  9045  		v1.AddArg(ptr)
  9046  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9047  		v2.AuxInt = 0
  9048  		v1.AddArg(v2)
  9049  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  9050  		v3.AuxInt = 0
  9051  		v3.AddArg(ptr)
  9052  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9053  		v4.AuxInt = 0
  9054  		v3.AddArg(v4)
  9055  		v3.AddArg(mem)
  9056  		v1.AddArg(v3)
  9057  		v.AddArg(v1)
  9058  		return true
  9059  	}
  9060  	// match: (Zero [8] {t} ptr mem)
  9061  	// cond: t.(Type).Alignment()%4 == 0
  9062  	// result: (MOVWstore [4] ptr (MOVWconst [0]) 			(MOVWstore [0] ptr (MOVWconst [0]) mem))
  9063  	for {
  9064  		if v.AuxInt != 8 {
  9065  			break
  9066  		}
  9067  		t := v.Aux
  9068  		ptr := v.Args[0]
  9069  		mem := v.Args[1]
  9070  		if !(t.(Type).Alignment()%4 == 0) {
  9071  			break
  9072  		}
  9073  		v.reset(OpMIPSMOVWstore)
  9074  		v.AuxInt = 4
  9075  		v.AddArg(ptr)
  9076  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9077  		v0.AuxInt = 0
  9078  		v.AddArg(v0)
  9079  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  9080  		v1.AuxInt = 0
  9081  		v1.AddArg(ptr)
  9082  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9083  		v2.AuxInt = 0
  9084  		v1.AddArg(v2)
  9085  		v1.AddArg(mem)
  9086  		v.AddArg(v1)
  9087  		return true
  9088  	}
  9089  	return false
  9090  }
  9091  func rewriteValueMIPS_OpZero_10(v *Value) bool {
  9092  	b := v.Block
  9093  	_ = b
  9094  	config := b.Func.Config
  9095  	_ = config
  9096  	types := &b.Func.Config.Types
  9097  	_ = types
  9098  	// match: (Zero [12] {t} ptr mem)
  9099  	// cond: t.(Type).Alignment()%4 == 0
  9100  	// result: (MOVWstore [8] ptr (MOVWconst [0]) 		(MOVWstore [4] ptr (MOVWconst [0]) 			(MOVWstore [0] ptr (MOVWconst [0]) mem)))
  9101  	for {
  9102  		if v.AuxInt != 12 {
  9103  			break
  9104  		}
  9105  		t := v.Aux
  9106  		ptr := v.Args[0]
  9107  		mem := v.Args[1]
  9108  		if !(t.(Type).Alignment()%4 == 0) {
  9109  			break
  9110  		}
  9111  		v.reset(OpMIPSMOVWstore)
  9112  		v.AuxInt = 8
  9113  		v.AddArg(ptr)
  9114  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9115  		v0.AuxInt = 0
  9116  		v.AddArg(v0)
  9117  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  9118  		v1.AuxInt = 4
  9119  		v1.AddArg(ptr)
  9120  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9121  		v2.AuxInt = 0
  9122  		v1.AddArg(v2)
  9123  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  9124  		v3.AuxInt = 0
  9125  		v3.AddArg(ptr)
  9126  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9127  		v4.AuxInt = 0
  9128  		v3.AddArg(v4)
  9129  		v3.AddArg(mem)
  9130  		v1.AddArg(v3)
  9131  		v.AddArg(v1)
  9132  		return true
  9133  	}
  9134  	// match: (Zero [16] {t} ptr mem)
  9135  	// cond: t.(Type).Alignment()%4 == 0
  9136  	// result: (MOVWstore [12] ptr (MOVWconst [0]) 		(MOVWstore [8] ptr (MOVWconst [0]) 			(MOVWstore [4] ptr (MOVWconst [0]) 				(MOVWstore [0] ptr (MOVWconst [0]) mem))))
  9137  	for {
  9138  		if v.AuxInt != 16 {
  9139  			break
  9140  		}
  9141  		t := v.Aux
  9142  		ptr := v.Args[0]
  9143  		mem := v.Args[1]
  9144  		if !(t.(Type).Alignment()%4 == 0) {
  9145  			break
  9146  		}
  9147  		v.reset(OpMIPSMOVWstore)
  9148  		v.AuxInt = 12
  9149  		v.AddArg(ptr)
  9150  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9151  		v0.AuxInt = 0
  9152  		v.AddArg(v0)
  9153  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  9154  		v1.AuxInt = 8
  9155  		v1.AddArg(ptr)
  9156  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9157  		v2.AuxInt = 0
  9158  		v1.AddArg(v2)
  9159  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  9160  		v3.AuxInt = 4
  9161  		v3.AddArg(ptr)
  9162  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9163  		v4.AuxInt = 0
  9164  		v3.AddArg(v4)
  9165  		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  9166  		v5.AuxInt = 0
  9167  		v5.AddArg(ptr)
  9168  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9169  		v6.AuxInt = 0
  9170  		v5.AddArg(v6)
  9171  		v5.AddArg(mem)
  9172  		v3.AddArg(v5)
  9173  		v1.AddArg(v3)
  9174  		v.AddArg(v1)
  9175  		return true
  9176  	}
  9177  	// match: (Zero [s] {t} ptr mem)
  9178  	// cond: (s > 16  || t.(Type).Alignment()%4 != 0)
  9179  	// result: (LoweredZero [t.(Type).Alignment()] 		ptr 		(ADDconst <ptr.Type> ptr [s-moveSize(t.(Type).Alignment(), config)]) 		mem)
  9180  	for {
  9181  		s := v.AuxInt
  9182  		t := v.Aux
  9183  		ptr := v.Args[0]
  9184  		mem := v.Args[1]
  9185  		if !(s > 16 || t.(Type).Alignment()%4 != 0) {
  9186  			break
  9187  		}
  9188  		v.reset(OpMIPSLoweredZero)
  9189  		v.AuxInt = t.(Type).Alignment()
  9190  		v.AddArg(ptr)
  9191  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type)
  9192  		v0.AuxInt = s - moveSize(t.(Type).Alignment(), config)
  9193  		v0.AddArg(ptr)
  9194  		v.AddArg(v0)
  9195  		v.AddArg(mem)
  9196  		return true
  9197  	}
  9198  	return false
  9199  }
  9200  func rewriteValueMIPS_OpZeroExt16to32_0(v *Value) bool {
  9201  	// match: (ZeroExt16to32 x)
  9202  	// cond:
  9203  	// result: (MOVHUreg x)
  9204  	for {
  9205  		x := v.Args[0]
  9206  		v.reset(OpMIPSMOVHUreg)
  9207  		v.AddArg(x)
  9208  		return true
  9209  	}
  9210  }
  9211  func rewriteValueMIPS_OpZeroExt8to16_0(v *Value) bool {
  9212  	// match: (ZeroExt8to16 x)
  9213  	// cond:
  9214  	// result: (MOVBUreg x)
  9215  	for {
  9216  		x := v.Args[0]
  9217  		v.reset(OpMIPSMOVBUreg)
  9218  		v.AddArg(x)
  9219  		return true
  9220  	}
  9221  }
  9222  func rewriteValueMIPS_OpZeroExt8to32_0(v *Value) bool {
  9223  	// match: (ZeroExt8to32 x)
  9224  	// cond:
  9225  	// result: (MOVBUreg x)
  9226  	for {
  9227  		x := v.Args[0]
  9228  		v.reset(OpMIPSMOVBUreg)
  9229  		v.AddArg(x)
  9230  		return true
  9231  	}
  9232  }
  9233  func rewriteValueMIPS_OpZeromask_0(v *Value) bool {
  9234  	b := v.Block
  9235  	_ = b
  9236  	types := &b.Func.Config.Types
  9237  	_ = types
  9238  	// match: (Zeromask x)
  9239  	// cond:
  9240  	// result: (NEG (SGTU x (MOVWconst [0])))
  9241  	for {
  9242  		x := v.Args[0]
  9243  		v.reset(OpMIPSNEG)
  9244  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, types.Bool)
  9245  		v0.AddArg(x)
  9246  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, types.UInt32)
  9247  		v1.AuxInt = 0
  9248  		v0.AddArg(v1)
  9249  		v.AddArg(v0)
  9250  		return true
  9251  	}
  9252  }
  9253  func rewriteBlockMIPS(b *Block) bool {
  9254  	config := b.Func.Config
  9255  	_ = config
  9256  	fe := b.Func.fe
  9257  	_ = fe
  9258  	types := &config.Types
  9259  	_ = types
  9260  	switch b.Kind {
  9261  	case BlockMIPSEQ:
  9262  		// match: (EQ (FPFlagTrue cmp) yes no)
  9263  		// cond:
  9264  		// result: (FPF cmp yes no)
  9265  		for {
  9266  			v := b.Control
  9267  			if v.Op != OpMIPSFPFlagTrue {
  9268  				break
  9269  			}
  9270  			cmp := v.Args[0]
  9271  			b.Kind = BlockMIPSFPF
  9272  			b.SetControl(cmp)
  9273  			return true
  9274  		}
  9275  		// match: (EQ (FPFlagFalse cmp) yes no)
  9276  		// cond:
  9277  		// result: (FPT cmp yes no)
  9278  		for {
  9279  			v := b.Control
  9280  			if v.Op != OpMIPSFPFlagFalse {
  9281  				break
  9282  			}
  9283  			cmp := v.Args[0]
  9284  			b.Kind = BlockMIPSFPT
  9285  			b.SetControl(cmp)
  9286  			return true
  9287  		}
  9288  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  9289  		// cond:
  9290  		// result: (NE cmp yes no)
  9291  		for {
  9292  			v := b.Control
  9293  			if v.Op != OpMIPSXORconst {
  9294  				break
  9295  			}
  9296  			if v.AuxInt != 1 {
  9297  				break
  9298  			}
  9299  			cmp := v.Args[0]
  9300  			if cmp.Op != OpMIPSSGT {
  9301  				break
  9302  			}
  9303  			b.Kind = BlockMIPSNE
  9304  			b.SetControl(cmp)
  9305  			return true
  9306  		}
  9307  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  9308  		// cond:
  9309  		// result: (NE cmp yes no)
  9310  		for {
  9311  			v := b.Control
  9312  			if v.Op != OpMIPSXORconst {
  9313  				break
  9314  			}
  9315  			if v.AuxInt != 1 {
  9316  				break
  9317  			}
  9318  			cmp := v.Args[0]
  9319  			if cmp.Op != OpMIPSSGTU {
  9320  				break
  9321  			}
  9322  			b.Kind = BlockMIPSNE
  9323  			b.SetControl(cmp)
  9324  			return true
  9325  		}
  9326  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  9327  		// cond:
  9328  		// result: (NE cmp yes no)
  9329  		for {
  9330  			v := b.Control
  9331  			if v.Op != OpMIPSXORconst {
  9332  				break
  9333  			}
  9334  			if v.AuxInt != 1 {
  9335  				break
  9336  			}
  9337  			cmp := v.Args[0]
  9338  			if cmp.Op != OpMIPSSGTconst {
  9339  				break
  9340  			}
  9341  			b.Kind = BlockMIPSNE
  9342  			b.SetControl(cmp)
  9343  			return true
  9344  		}
  9345  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  9346  		// cond:
  9347  		// result: (NE cmp yes no)
  9348  		for {
  9349  			v := b.Control
  9350  			if v.Op != OpMIPSXORconst {
  9351  				break
  9352  			}
  9353  			if v.AuxInt != 1 {
  9354  				break
  9355  			}
  9356  			cmp := v.Args[0]
  9357  			if cmp.Op != OpMIPSSGTUconst {
  9358  				break
  9359  			}
  9360  			b.Kind = BlockMIPSNE
  9361  			b.SetControl(cmp)
  9362  			return true
  9363  		}
  9364  		// match: (EQ (XORconst [1] cmp:(SGTzero _)) yes no)
  9365  		// cond:
  9366  		// result: (NE cmp yes no)
  9367  		for {
  9368  			v := b.Control
  9369  			if v.Op != OpMIPSXORconst {
  9370  				break
  9371  			}
  9372  			if v.AuxInt != 1 {
  9373  				break
  9374  			}
  9375  			cmp := v.Args[0]
  9376  			if cmp.Op != OpMIPSSGTzero {
  9377  				break
  9378  			}
  9379  			b.Kind = BlockMIPSNE
  9380  			b.SetControl(cmp)
  9381  			return true
  9382  		}
  9383  		// match: (EQ (XORconst [1] cmp:(SGTUzero _)) yes no)
  9384  		// cond:
  9385  		// result: (NE cmp yes no)
  9386  		for {
  9387  			v := b.Control
  9388  			if v.Op != OpMIPSXORconst {
  9389  				break
  9390  			}
  9391  			if v.AuxInt != 1 {
  9392  				break
  9393  			}
  9394  			cmp := v.Args[0]
  9395  			if cmp.Op != OpMIPSSGTUzero {
  9396  				break
  9397  			}
  9398  			b.Kind = BlockMIPSNE
  9399  			b.SetControl(cmp)
  9400  			return true
  9401  		}
  9402  		// match: (EQ (SGTUconst [1] x) yes no)
  9403  		// cond:
  9404  		// result: (NE x yes no)
  9405  		for {
  9406  			v := b.Control
  9407  			if v.Op != OpMIPSSGTUconst {
  9408  				break
  9409  			}
  9410  			if v.AuxInt != 1 {
  9411  				break
  9412  			}
  9413  			x := v.Args[0]
  9414  			b.Kind = BlockMIPSNE
  9415  			b.SetControl(x)
  9416  			return true
  9417  		}
  9418  		// match: (EQ (SGTUzero x) yes no)
  9419  		// cond:
  9420  		// result: (EQ x yes no)
  9421  		for {
  9422  			v := b.Control
  9423  			if v.Op != OpMIPSSGTUzero {
  9424  				break
  9425  			}
  9426  			x := v.Args[0]
  9427  			b.Kind = BlockMIPSEQ
  9428  			b.SetControl(x)
  9429  			return true
  9430  		}
  9431  		// match: (EQ (SGTconst [0] x) yes no)
  9432  		// cond:
  9433  		// result: (GEZ x yes no)
  9434  		for {
  9435  			v := b.Control
  9436  			if v.Op != OpMIPSSGTconst {
  9437  				break
  9438  			}
  9439  			if v.AuxInt != 0 {
  9440  				break
  9441  			}
  9442  			x := v.Args[0]
  9443  			b.Kind = BlockMIPSGEZ
  9444  			b.SetControl(x)
  9445  			return true
  9446  		}
  9447  		// match: (EQ (SGTzero x) yes no)
  9448  		// cond:
  9449  		// result: (LEZ x yes no)
  9450  		for {
  9451  			v := b.Control
  9452  			if v.Op != OpMIPSSGTzero {
  9453  				break
  9454  			}
  9455  			x := v.Args[0]
  9456  			b.Kind = BlockMIPSLEZ
  9457  			b.SetControl(x)
  9458  			return true
  9459  		}
  9460  		// match: (EQ (MOVWconst [0]) yes no)
  9461  		// cond:
  9462  		// result: (First nil yes no)
  9463  		for {
  9464  			v := b.Control
  9465  			if v.Op != OpMIPSMOVWconst {
  9466  				break
  9467  			}
  9468  			if v.AuxInt != 0 {
  9469  				break
  9470  			}
  9471  			b.Kind = BlockFirst
  9472  			b.SetControl(nil)
  9473  			return true
  9474  		}
  9475  		// match: (EQ (MOVWconst [c]) yes no)
  9476  		// cond: c != 0
  9477  		// result: (First nil no yes)
  9478  		for {
  9479  			v := b.Control
  9480  			if v.Op != OpMIPSMOVWconst {
  9481  				break
  9482  			}
  9483  			c := v.AuxInt
  9484  			if !(c != 0) {
  9485  				break
  9486  			}
  9487  			b.Kind = BlockFirst
  9488  			b.SetControl(nil)
  9489  			b.swapSuccessors()
  9490  			return true
  9491  		}
  9492  	case BlockMIPSGEZ:
  9493  		// match: (GEZ (MOVWconst [c]) yes no)
  9494  		// cond: int32(c) >= 0
  9495  		// result: (First nil yes no)
  9496  		for {
  9497  			v := b.Control
  9498  			if v.Op != OpMIPSMOVWconst {
  9499  				break
  9500  			}
  9501  			c := v.AuxInt
  9502  			if !(int32(c) >= 0) {
  9503  				break
  9504  			}
  9505  			b.Kind = BlockFirst
  9506  			b.SetControl(nil)
  9507  			return true
  9508  		}
  9509  		// match: (GEZ (MOVWconst [c]) yes no)
  9510  		// cond: int32(c) <  0
  9511  		// result: (First nil no yes)
  9512  		for {
  9513  			v := b.Control
  9514  			if v.Op != OpMIPSMOVWconst {
  9515  				break
  9516  			}
  9517  			c := v.AuxInt
  9518  			if !(int32(c) < 0) {
  9519  				break
  9520  			}
  9521  			b.Kind = BlockFirst
  9522  			b.SetControl(nil)
  9523  			b.swapSuccessors()
  9524  			return true
  9525  		}
  9526  	case BlockMIPSGTZ:
  9527  		// match: (GTZ (MOVWconst [c]) yes no)
  9528  		// cond: int32(c) >  0
  9529  		// result: (First nil yes no)
  9530  		for {
  9531  			v := b.Control
  9532  			if v.Op != OpMIPSMOVWconst {
  9533  				break
  9534  			}
  9535  			c := v.AuxInt
  9536  			if !(int32(c) > 0) {
  9537  				break
  9538  			}
  9539  			b.Kind = BlockFirst
  9540  			b.SetControl(nil)
  9541  			return true
  9542  		}
  9543  		// match: (GTZ (MOVWconst [c]) yes no)
  9544  		// cond: int32(c) <= 0
  9545  		// result: (First nil no yes)
  9546  		for {
  9547  			v := b.Control
  9548  			if v.Op != OpMIPSMOVWconst {
  9549  				break
  9550  			}
  9551  			c := v.AuxInt
  9552  			if !(int32(c) <= 0) {
  9553  				break
  9554  			}
  9555  			b.Kind = BlockFirst
  9556  			b.SetControl(nil)
  9557  			b.swapSuccessors()
  9558  			return true
  9559  		}
  9560  	case BlockIf:
  9561  		// match: (If cond yes no)
  9562  		// cond:
  9563  		// result: (NE cond yes no)
  9564  		for {
  9565  			v := b.Control
  9566  			_ = v
  9567  			cond := b.Control
  9568  			b.Kind = BlockMIPSNE
  9569  			b.SetControl(cond)
  9570  			return true
  9571  		}
  9572  	case BlockMIPSLEZ:
  9573  		// match: (LEZ (MOVWconst [c]) yes no)
  9574  		// cond: int32(c) <= 0
  9575  		// result: (First nil yes no)
  9576  		for {
  9577  			v := b.Control
  9578  			if v.Op != OpMIPSMOVWconst {
  9579  				break
  9580  			}
  9581  			c := v.AuxInt
  9582  			if !(int32(c) <= 0) {
  9583  				break
  9584  			}
  9585  			b.Kind = BlockFirst
  9586  			b.SetControl(nil)
  9587  			return true
  9588  		}
  9589  		// match: (LEZ (MOVWconst [c]) yes no)
  9590  		// cond: int32(c) >  0
  9591  		// result: (First nil no yes)
  9592  		for {
  9593  			v := b.Control
  9594  			if v.Op != OpMIPSMOVWconst {
  9595  				break
  9596  			}
  9597  			c := v.AuxInt
  9598  			if !(int32(c) > 0) {
  9599  				break
  9600  			}
  9601  			b.Kind = BlockFirst
  9602  			b.SetControl(nil)
  9603  			b.swapSuccessors()
  9604  			return true
  9605  		}
  9606  	case BlockMIPSLTZ:
  9607  		// match: (LTZ (MOVWconst [c]) yes no)
  9608  		// cond: int32(c) <  0
  9609  		// result: (First nil yes no)
  9610  		for {
  9611  			v := b.Control
  9612  			if v.Op != OpMIPSMOVWconst {
  9613  				break
  9614  			}
  9615  			c := v.AuxInt
  9616  			if !(int32(c) < 0) {
  9617  				break
  9618  			}
  9619  			b.Kind = BlockFirst
  9620  			b.SetControl(nil)
  9621  			return true
  9622  		}
  9623  		// match: (LTZ (MOVWconst [c]) yes no)
  9624  		// cond: int32(c) >= 0
  9625  		// result: (First nil no yes)
  9626  		for {
  9627  			v := b.Control
  9628  			if v.Op != OpMIPSMOVWconst {
  9629  				break
  9630  			}
  9631  			c := v.AuxInt
  9632  			if !(int32(c) >= 0) {
  9633  				break
  9634  			}
  9635  			b.Kind = BlockFirst
  9636  			b.SetControl(nil)
  9637  			b.swapSuccessors()
  9638  			return true
  9639  		}
  9640  	case BlockMIPSNE:
  9641  		// match: (NE (FPFlagTrue cmp) yes no)
  9642  		// cond:
  9643  		// result: (FPT cmp yes no)
  9644  		for {
  9645  			v := b.Control
  9646  			if v.Op != OpMIPSFPFlagTrue {
  9647  				break
  9648  			}
  9649  			cmp := v.Args[0]
  9650  			b.Kind = BlockMIPSFPT
  9651  			b.SetControl(cmp)
  9652  			return true
  9653  		}
  9654  		// match: (NE (FPFlagFalse cmp) yes no)
  9655  		// cond:
  9656  		// result: (FPF cmp yes no)
  9657  		for {
  9658  			v := b.Control
  9659  			if v.Op != OpMIPSFPFlagFalse {
  9660  				break
  9661  			}
  9662  			cmp := v.Args[0]
  9663  			b.Kind = BlockMIPSFPF
  9664  			b.SetControl(cmp)
  9665  			return true
  9666  		}
  9667  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
  9668  		// cond:
  9669  		// result: (EQ cmp yes no)
  9670  		for {
  9671  			v := b.Control
  9672  			if v.Op != OpMIPSXORconst {
  9673  				break
  9674  			}
  9675  			if v.AuxInt != 1 {
  9676  				break
  9677  			}
  9678  			cmp := v.Args[0]
  9679  			if cmp.Op != OpMIPSSGT {
  9680  				break
  9681  			}
  9682  			b.Kind = BlockMIPSEQ
  9683  			b.SetControl(cmp)
  9684  			return true
  9685  		}
  9686  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
  9687  		// cond:
  9688  		// result: (EQ cmp yes no)
  9689  		for {
  9690  			v := b.Control
  9691  			if v.Op != OpMIPSXORconst {
  9692  				break
  9693  			}
  9694  			if v.AuxInt != 1 {
  9695  				break
  9696  			}
  9697  			cmp := v.Args[0]
  9698  			if cmp.Op != OpMIPSSGTU {
  9699  				break
  9700  			}
  9701  			b.Kind = BlockMIPSEQ
  9702  			b.SetControl(cmp)
  9703  			return true
  9704  		}
  9705  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
  9706  		// cond:
  9707  		// result: (EQ cmp yes no)
  9708  		for {
  9709  			v := b.Control
  9710  			if v.Op != OpMIPSXORconst {
  9711  				break
  9712  			}
  9713  			if v.AuxInt != 1 {
  9714  				break
  9715  			}
  9716  			cmp := v.Args[0]
  9717  			if cmp.Op != OpMIPSSGTconst {
  9718  				break
  9719  			}
  9720  			b.Kind = BlockMIPSEQ
  9721  			b.SetControl(cmp)
  9722  			return true
  9723  		}
  9724  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
  9725  		// cond:
  9726  		// result: (EQ cmp yes no)
  9727  		for {
  9728  			v := b.Control
  9729  			if v.Op != OpMIPSXORconst {
  9730  				break
  9731  			}
  9732  			if v.AuxInt != 1 {
  9733  				break
  9734  			}
  9735  			cmp := v.Args[0]
  9736  			if cmp.Op != OpMIPSSGTUconst {
  9737  				break
  9738  			}
  9739  			b.Kind = BlockMIPSEQ
  9740  			b.SetControl(cmp)
  9741  			return true
  9742  		}
  9743  		// match: (NE (XORconst [1] cmp:(SGTzero _)) yes no)
  9744  		// cond:
  9745  		// result: (EQ cmp yes no)
  9746  		for {
  9747  			v := b.Control
  9748  			if v.Op != OpMIPSXORconst {
  9749  				break
  9750  			}
  9751  			if v.AuxInt != 1 {
  9752  				break
  9753  			}
  9754  			cmp := v.Args[0]
  9755  			if cmp.Op != OpMIPSSGTzero {
  9756  				break
  9757  			}
  9758  			b.Kind = BlockMIPSEQ
  9759  			b.SetControl(cmp)
  9760  			return true
  9761  		}
  9762  		// match: (NE (XORconst [1] cmp:(SGTUzero _)) yes no)
  9763  		// cond:
  9764  		// result: (EQ cmp yes no)
  9765  		for {
  9766  			v := b.Control
  9767  			if v.Op != OpMIPSXORconst {
  9768  				break
  9769  			}
  9770  			if v.AuxInt != 1 {
  9771  				break
  9772  			}
  9773  			cmp := v.Args[0]
  9774  			if cmp.Op != OpMIPSSGTUzero {
  9775  				break
  9776  			}
  9777  			b.Kind = BlockMIPSEQ
  9778  			b.SetControl(cmp)
  9779  			return true
  9780  		}
  9781  		// match: (NE (SGTUconst [1] x) yes no)
  9782  		// cond:
  9783  		// result: (EQ x yes no)
  9784  		for {
  9785  			v := b.Control
  9786  			if v.Op != OpMIPSSGTUconst {
  9787  				break
  9788  			}
  9789  			if v.AuxInt != 1 {
  9790  				break
  9791  			}
  9792  			x := v.Args[0]
  9793  			b.Kind = BlockMIPSEQ
  9794  			b.SetControl(x)
  9795  			return true
  9796  		}
  9797  		// match: (NE (SGTUzero x) yes no)
  9798  		// cond:
  9799  		// result: (NE x yes no)
  9800  		for {
  9801  			v := b.Control
  9802  			if v.Op != OpMIPSSGTUzero {
  9803  				break
  9804  			}
  9805  			x := v.Args[0]
  9806  			b.Kind = BlockMIPSNE
  9807  			b.SetControl(x)
  9808  			return true
  9809  		}
  9810  		// match: (NE (SGTconst [0] x) yes no)
  9811  		// cond:
  9812  		// result: (LTZ x yes no)
  9813  		for {
  9814  			v := b.Control
  9815  			if v.Op != OpMIPSSGTconst {
  9816  				break
  9817  			}
  9818  			if v.AuxInt != 0 {
  9819  				break
  9820  			}
  9821  			x := v.Args[0]
  9822  			b.Kind = BlockMIPSLTZ
  9823  			b.SetControl(x)
  9824  			return true
  9825  		}
  9826  		// match: (NE (SGTzero x) yes no)
  9827  		// cond:
  9828  		// result: (GTZ x yes no)
  9829  		for {
  9830  			v := b.Control
  9831  			if v.Op != OpMIPSSGTzero {
  9832  				break
  9833  			}
  9834  			x := v.Args[0]
  9835  			b.Kind = BlockMIPSGTZ
  9836  			b.SetControl(x)
  9837  			return true
  9838  		}
  9839  		// match: (NE (MOVWconst [0]) yes no)
  9840  		// cond:
  9841  		// result: (First nil no yes)
  9842  		for {
  9843  			v := b.Control
  9844  			if v.Op != OpMIPSMOVWconst {
  9845  				break
  9846  			}
  9847  			if v.AuxInt != 0 {
  9848  				break
  9849  			}
  9850  			b.Kind = BlockFirst
  9851  			b.SetControl(nil)
  9852  			b.swapSuccessors()
  9853  			return true
  9854  		}
  9855  		// match: (NE (MOVWconst [c]) yes no)
  9856  		// cond: c != 0
  9857  		// result: (First nil yes no)
  9858  		for {
  9859  			v := b.Control
  9860  			if v.Op != OpMIPSMOVWconst {
  9861  				break
  9862  			}
  9863  			c := v.AuxInt
  9864  			if !(c != 0) {
  9865  				break
  9866  			}
  9867  			b.Kind = BlockFirst
  9868  			b.SetControl(nil)
  9869  			return true
  9870  		}
  9871  	}
  9872  	return false
  9873  }