github.com/corona10/go@v0.0.0-20180224231303-7a218942be57/src/cmd/compile/internal/ssa/rewriteMIPS.go (about)

     1  // Code generated from gen/MIPS.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  import "cmd/internal/obj"
     8  import "cmd/internal/objabi"
     9  import "cmd/compile/internal/types"
    10  
    11  var _ = math.MinInt8  // in case not otherwise used
    12  var _ = obj.ANOP      // in case not otherwise used
    13  var _ = objabi.GOROOT // in case not otherwise used
    14  var _ = types.TypeMem // in case not otherwise used
    15  
    16  func rewriteValueMIPS(v *Value) bool {
    17  	switch v.Op {
    18  	case OpAdd16:
    19  		return rewriteValueMIPS_OpAdd16_0(v)
    20  	case OpAdd32:
    21  		return rewriteValueMIPS_OpAdd32_0(v)
    22  	case OpAdd32F:
    23  		return rewriteValueMIPS_OpAdd32F_0(v)
    24  	case OpAdd32withcarry:
    25  		return rewriteValueMIPS_OpAdd32withcarry_0(v)
    26  	case OpAdd64F:
    27  		return rewriteValueMIPS_OpAdd64F_0(v)
    28  	case OpAdd8:
    29  		return rewriteValueMIPS_OpAdd8_0(v)
    30  	case OpAddPtr:
    31  		return rewriteValueMIPS_OpAddPtr_0(v)
    32  	case OpAddr:
    33  		return rewriteValueMIPS_OpAddr_0(v)
    34  	case OpAnd16:
    35  		return rewriteValueMIPS_OpAnd16_0(v)
    36  	case OpAnd32:
    37  		return rewriteValueMIPS_OpAnd32_0(v)
    38  	case OpAnd8:
    39  		return rewriteValueMIPS_OpAnd8_0(v)
    40  	case OpAndB:
    41  		return rewriteValueMIPS_OpAndB_0(v)
    42  	case OpAtomicAdd32:
    43  		return rewriteValueMIPS_OpAtomicAdd32_0(v)
    44  	case OpAtomicAnd8:
    45  		return rewriteValueMIPS_OpAtomicAnd8_0(v)
    46  	case OpAtomicCompareAndSwap32:
    47  		return rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v)
    48  	case OpAtomicExchange32:
    49  		return rewriteValueMIPS_OpAtomicExchange32_0(v)
    50  	case OpAtomicLoad32:
    51  		return rewriteValueMIPS_OpAtomicLoad32_0(v)
    52  	case OpAtomicLoadPtr:
    53  		return rewriteValueMIPS_OpAtomicLoadPtr_0(v)
    54  	case OpAtomicOr8:
    55  		return rewriteValueMIPS_OpAtomicOr8_0(v)
    56  	case OpAtomicStore32:
    57  		return rewriteValueMIPS_OpAtomicStore32_0(v)
    58  	case OpAtomicStorePtrNoWB:
    59  		return rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v)
    60  	case OpAvg32u:
    61  		return rewriteValueMIPS_OpAvg32u_0(v)
    62  	case OpBitLen32:
    63  		return rewriteValueMIPS_OpBitLen32_0(v)
    64  	case OpClosureCall:
    65  		return rewriteValueMIPS_OpClosureCall_0(v)
    66  	case OpCom16:
    67  		return rewriteValueMIPS_OpCom16_0(v)
    68  	case OpCom32:
    69  		return rewriteValueMIPS_OpCom32_0(v)
    70  	case OpCom8:
    71  		return rewriteValueMIPS_OpCom8_0(v)
    72  	case OpConst16:
    73  		return rewriteValueMIPS_OpConst16_0(v)
    74  	case OpConst32:
    75  		return rewriteValueMIPS_OpConst32_0(v)
    76  	case OpConst32F:
    77  		return rewriteValueMIPS_OpConst32F_0(v)
    78  	case OpConst64F:
    79  		return rewriteValueMIPS_OpConst64F_0(v)
    80  	case OpConst8:
    81  		return rewriteValueMIPS_OpConst8_0(v)
    82  	case OpConstBool:
    83  		return rewriteValueMIPS_OpConstBool_0(v)
    84  	case OpConstNil:
    85  		return rewriteValueMIPS_OpConstNil_0(v)
    86  	case OpConvert:
    87  		return rewriteValueMIPS_OpConvert_0(v)
    88  	case OpCtz32:
    89  		return rewriteValueMIPS_OpCtz32_0(v)
    90  	case OpCvt32Fto32:
    91  		return rewriteValueMIPS_OpCvt32Fto32_0(v)
    92  	case OpCvt32Fto64F:
    93  		return rewriteValueMIPS_OpCvt32Fto64F_0(v)
    94  	case OpCvt32to32F:
    95  		return rewriteValueMIPS_OpCvt32to32F_0(v)
    96  	case OpCvt32to64F:
    97  		return rewriteValueMIPS_OpCvt32to64F_0(v)
    98  	case OpCvt64Fto32:
    99  		return rewriteValueMIPS_OpCvt64Fto32_0(v)
   100  	case OpCvt64Fto32F:
   101  		return rewriteValueMIPS_OpCvt64Fto32F_0(v)
   102  	case OpDiv16:
   103  		return rewriteValueMIPS_OpDiv16_0(v)
   104  	case OpDiv16u:
   105  		return rewriteValueMIPS_OpDiv16u_0(v)
   106  	case OpDiv32:
   107  		return rewriteValueMIPS_OpDiv32_0(v)
   108  	case OpDiv32F:
   109  		return rewriteValueMIPS_OpDiv32F_0(v)
   110  	case OpDiv32u:
   111  		return rewriteValueMIPS_OpDiv32u_0(v)
   112  	case OpDiv64F:
   113  		return rewriteValueMIPS_OpDiv64F_0(v)
   114  	case OpDiv8:
   115  		return rewriteValueMIPS_OpDiv8_0(v)
   116  	case OpDiv8u:
   117  		return rewriteValueMIPS_OpDiv8u_0(v)
   118  	case OpEq16:
   119  		return rewriteValueMIPS_OpEq16_0(v)
   120  	case OpEq32:
   121  		return rewriteValueMIPS_OpEq32_0(v)
   122  	case OpEq32F:
   123  		return rewriteValueMIPS_OpEq32F_0(v)
   124  	case OpEq64F:
   125  		return rewriteValueMIPS_OpEq64F_0(v)
   126  	case OpEq8:
   127  		return rewriteValueMIPS_OpEq8_0(v)
   128  	case OpEqB:
   129  		return rewriteValueMIPS_OpEqB_0(v)
   130  	case OpEqPtr:
   131  		return rewriteValueMIPS_OpEqPtr_0(v)
   132  	case OpGeq16:
   133  		return rewriteValueMIPS_OpGeq16_0(v)
   134  	case OpGeq16U:
   135  		return rewriteValueMIPS_OpGeq16U_0(v)
   136  	case OpGeq32:
   137  		return rewriteValueMIPS_OpGeq32_0(v)
   138  	case OpGeq32F:
   139  		return rewriteValueMIPS_OpGeq32F_0(v)
   140  	case OpGeq32U:
   141  		return rewriteValueMIPS_OpGeq32U_0(v)
   142  	case OpGeq64F:
   143  		return rewriteValueMIPS_OpGeq64F_0(v)
   144  	case OpGeq8:
   145  		return rewriteValueMIPS_OpGeq8_0(v)
   146  	case OpGeq8U:
   147  		return rewriteValueMIPS_OpGeq8U_0(v)
   148  	case OpGetCallerSP:
   149  		return rewriteValueMIPS_OpGetCallerSP_0(v)
   150  	case OpGetClosurePtr:
   151  		return rewriteValueMIPS_OpGetClosurePtr_0(v)
   152  	case OpGreater16:
   153  		return rewriteValueMIPS_OpGreater16_0(v)
   154  	case OpGreater16U:
   155  		return rewriteValueMIPS_OpGreater16U_0(v)
   156  	case OpGreater32:
   157  		return rewriteValueMIPS_OpGreater32_0(v)
   158  	case OpGreater32F:
   159  		return rewriteValueMIPS_OpGreater32F_0(v)
   160  	case OpGreater32U:
   161  		return rewriteValueMIPS_OpGreater32U_0(v)
   162  	case OpGreater64F:
   163  		return rewriteValueMIPS_OpGreater64F_0(v)
   164  	case OpGreater8:
   165  		return rewriteValueMIPS_OpGreater8_0(v)
   166  	case OpGreater8U:
   167  		return rewriteValueMIPS_OpGreater8U_0(v)
   168  	case OpHmul32:
   169  		return rewriteValueMIPS_OpHmul32_0(v)
   170  	case OpHmul32u:
   171  		return rewriteValueMIPS_OpHmul32u_0(v)
   172  	case OpInterCall:
   173  		return rewriteValueMIPS_OpInterCall_0(v)
   174  	case OpIsInBounds:
   175  		return rewriteValueMIPS_OpIsInBounds_0(v)
   176  	case OpIsNonNil:
   177  		return rewriteValueMIPS_OpIsNonNil_0(v)
   178  	case OpIsSliceInBounds:
   179  		return rewriteValueMIPS_OpIsSliceInBounds_0(v)
   180  	case OpLeq16:
   181  		return rewriteValueMIPS_OpLeq16_0(v)
   182  	case OpLeq16U:
   183  		return rewriteValueMIPS_OpLeq16U_0(v)
   184  	case OpLeq32:
   185  		return rewriteValueMIPS_OpLeq32_0(v)
   186  	case OpLeq32F:
   187  		return rewriteValueMIPS_OpLeq32F_0(v)
   188  	case OpLeq32U:
   189  		return rewriteValueMIPS_OpLeq32U_0(v)
   190  	case OpLeq64F:
   191  		return rewriteValueMIPS_OpLeq64F_0(v)
   192  	case OpLeq8:
   193  		return rewriteValueMIPS_OpLeq8_0(v)
   194  	case OpLeq8U:
   195  		return rewriteValueMIPS_OpLeq8U_0(v)
   196  	case OpLess16:
   197  		return rewriteValueMIPS_OpLess16_0(v)
   198  	case OpLess16U:
   199  		return rewriteValueMIPS_OpLess16U_0(v)
   200  	case OpLess32:
   201  		return rewriteValueMIPS_OpLess32_0(v)
   202  	case OpLess32F:
   203  		return rewriteValueMIPS_OpLess32F_0(v)
   204  	case OpLess32U:
   205  		return rewriteValueMIPS_OpLess32U_0(v)
   206  	case OpLess64F:
   207  		return rewriteValueMIPS_OpLess64F_0(v)
   208  	case OpLess8:
   209  		return rewriteValueMIPS_OpLess8_0(v)
   210  	case OpLess8U:
   211  		return rewriteValueMIPS_OpLess8U_0(v)
   212  	case OpLoad:
   213  		return rewriteValueMIPS_OpLoad_0(v)
   214  	case OpLsh16x16:
   215  		return rewriteValueMIPS_OpLsh16x16_0(v)
   216  	case OpLsh16x32:
   217  		return rewriteValueMIPS_OpLsh16x32_0(v)
   218  	case OpLsh16x64:
   219  		return rewriteValueMIPS_OpLsh16x64_0(v)
   220  	case OpLsh16x8:
   221  		return rewriteValueMIPS_OpLsh16x8_0(v)
   222  	case OpLsh32x16:
   223  		return rewriteValueMIPS_OpLsh32x16_0(v)
   224  	case OpLsh32x32:
   225  		return rewriteValueMIPS_OpLsh32x32_0(v)
   226  	case OpLsh32x64:
   227  		return rewriteValueMIPS_OpLsh32x64_0(v)
   228  	case OpLsh32x8:
   229  		return rewriteValueMIPS_OpLsh32x8_0(v)
   230  	case OpLsh8x16:
   231  		return rewriteValueMIPS_OpLsh8x16_0(v)
   232  	case OpLsh8x32:
   233  		return rewriteValueMIPS_OpLsh8x32_0(v)
   234  	case OpLsh8x64:
   235  		return rewriteValueMIPS_OpLsh8x64_0(v)
   236  	case OpLsh8x8:
   237  		return rewriteValueMIPS_OpLsh8x8_0(v)
   238  	case OpMIPSADD:
   239  		return rewriteValueMIPS_OpMIPSADD_0(v)
   240  	case OpMIPSADDconst:
   241  		return rewriteValueMIPS_OpMIPSADDconst_0(v)
   242  	case OpMIPSAND:
   243  		return rewriteValueMIPS_OpMIPSAND_0(v)
   244  	case OpMIPSANDconst:
   245  		return rewriteValueMIPS_OpMIPSANDconst_0(v)
   246  	case OpMIPSCMOVZ:
   247  		return rewriteValueMIPS_OpMIPSCMOVZ_0(v)
   248  	case OpMIPSCMOVZzero:
   249  		return rewriteValueMIPS_OpMIPSCMOVZzero_0(v)
   250  	case OpMIPSLoweredAtomicAdd:
   251  		return rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v)
   252  	case OpMIPSLoweredAtomicStore:
   253  		return rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v)
   254  	case OpMIPSMOVBUload:
   255  		return rewriteValueMIPS_OpMIPSMOVBUload_0(v)
   256  	case OpMIPSMOVBUreg:
   257  		return rewriteValueMIPS_OpMIPSMOVBUreg_0(v)
   258  	case OpMIPSMOVBload:
   259  		return rewriteValueMIPS_OpMIPSMOVBload_0(v)
   260  	case OpMIPSMOVBreg:
   261  		return rewriteValueMIPS_OpMIPSMOVBreg_0(v)
   262  	case OpMIPSMOVBstore:
   263  		return rewriteValueMIPS_OpMIPSMOVBstore_0(v)
   264  	case OpMIPSMOVBstorezero:
   265  		return rewriteValueMIPS_OpMIPSMOVBstorezero_0(v)
   266  	case OpMIPSMOVDload:
   267  		return rewriteValueMIPS_OpMIPSMOVDload_0(v)
   268  	case OpMIPSMOVDstore:
   269  		return rewriteValueMIPS_OpMIPSMOVDstore_0(v)
   270  	case OpMIPSMOVFload:
   271  		return rewriteValueMIPS_OpMIPSMOVFload_0(v)
   272  	case OpMIPSMOVFstore:
   273  		return rewriteValueMIPS_OpMIPSMOVFstore_0(v)
   274  	case OpMIPSMOVHUload:
   275  		return rewriteValueMIPS_OpMIPSMOVHUload_0(v)
   276  	case OpMIPSMOVHUreg:
   277  		return rewriteValueMIPS_OpMIPSMOVHUreg_0(v)
   278  	case OpMIPSMOVHload:
   279  		return rewriteValueMIPS_OpMIPSMOVHload_0(v)
   280  	case OpMIPSMOVHreg:
   281  		return rewriteValueMIPS_OpMIPSMOVHreg_0(v)
   282  	case OpMIPSMOVHstore:
   283  		return rewriteValueMIPS_OpMIPSMOVHstore_0(v)
   284  	case OpMIPSMOVHstorezero:
   285  		return rewriteValueMIPS_OpMIPSMOVHstorezero_0(v)
   286  	case OpMIPSMOVWload:
   287  		return rewriteValueMIPS_OpMIPSMOVWload_0(v)
   288  	case OpMIPSMOVWreg:
   289  		return rewriteValueMIPS_OpMIPSMOVWreg_0(v)
   290  	case OpMIPSMOVWstore:
   291  		return rewriteValueMIPS_OpMIPSMOVWstore_0(v)
   292  	case OpMIPSMOVWstorezero:
   293  		return rewriteValueMIPS_OpMIPSMOVWstorezero_0(v)
   294  	case OpMIPSMUL:
   295  		return rewriteValueMIPS_OpMIPSMUL_0(v)
   296  	case OpMIPSNEG:
   297  		return rewriteValueMIPS_OpMIPSNEG_0(v)
   298  	case OpMIPSNOR:
   299  		return rewriteValueMIPS_OpMIPSNOR_0(v)
   300  	case OpMIPSNORconst:
   301  		return rewriteValueMIPS_OpMIPSNORconst_0(v)
   302  	case OpMIPSOR:
   303  		return rewriteValueMIPS_OpMIPSOR_0(v)
   304  	case OpMIPSORconst:
   305  		return rewriteValueMIPS_OpMIPSORconst_0(v)
   306  	case OpMIPSSGT:
   307  		return rewriteValueMIPS_OpMIPSSGT_0(v)
   308  	case OpMIPSSGTU:
   309  		return rewriteValueMIPS_OpMIPSSGTU_0(v)
   310  	case OpMIPSSGTUconst:
   311  		return rewriteValueMIPS_OpMIPSSGTUconst_0(v)
   312  	case OpMIPSSGTUzero:
   313  		return rewriteValueMIPS_OpMIPSSGTUzero_0(v)
   314  	case OpMIPSSGTconst:
   315  		return rewriteValueMIPS_OpMIPSSGTconst_0(v) || rewriteValueMIPS_OpMIPSSGTconst_10(v)
   316  	case OpMIPSSGTzero:
   317  		return rewriteValueMIPS_OpMIPSSGTzero_0(v)
   318  	case OpMIPSSLL:
   319  		return rewriteValueMIPS_OpMIPSSLL_0(v)
   320  	case OpMIPSSLLconst:
   321  		return rewriteValueMIPS_OpMIPSSLLconst_0(v)
   322  	case OpMIPSSRA:
   323  		return rewriteValueMIPS_OpMIPSSRA_0(v)
   324  	case OpMIPSSRAconst:
   325  		return rewriteValueMIPS_OpMIPSSRAconst_0(v)
   326  	case OpMIPSSRL:
   327  		return rewriteValueMIPS_OpMIPSSRL_0(v)
   328  	case OpMIPSSRLconst:
   329  		return rewriteValueMIPS_OpMIPSSRLconst_0(v)
   330  	case OpMIPSSUB:
   331  		return rewriteValueMIPS_OpMIPSSUB_0(v)
   332  	case OpMIPSSUBconst:
   333  		return rewriteValueMIPS_OpMIPSSUBconst_0(v)
   334  	case OpMIPSXOR:
   335  		return rewriteValueMIPS_OpMIPSXOR_0(v)
   336  	case OpMIPSXORconst:
   337  		return rewriteValueMIPS_OpMIPSXORconst_0(v)
   338  	case OpMod16:
   339  		return rewriteValueMIPS_OpMod16_0(v)
   340  	case OpMod16u:
   341  		return rewriteValueMIPS_OpMod16u_0(v)
   342  	case OpMod32:
   343  		return rewriteValueMIPS_OpMod32_0(v)
   344  	case OpMod32u:
   345  		return rewriteValueMIPS_OpMod32u_0(v)
   346  	case OpMod8:
   347  		return rewriteValueMIPS_OpMod8_0(v)
   348  	case OpMod8u:
   349  		return rewriteValueMIPS_OpMod8u_0(v)
   350  	case OpMove:
   351  		return rewriteValueMIPS_OpMove_0(v) || rewriteValueMIPS_OpMove_10(v)
   352  	case OpMul16:
   353  		return rewriteValueMIPS_OpMul16_0(v)
   354  	case OpMul32:
   355  		return rewriteValueMIPS_OpMul32_0(v)
   356  	case OpMul32F:
   357  		return rewriteValueMIPS_OpMul32F_0(v)
   358  	case OpMul32uhilo:
   359  		return rewriteValueMIPS_OpMul32uhilo_0(v)
   360  	case OpMul64F:
   361  		return rewriteValueMIPS_OpMul64F_0(v)
   362  	case OpMul8:
   363  		return rewriteValueMIPS_OpMul8_0(v)
   364  	case OpNeg16:
   365  		return rewriteValueMIPS_OpNeg16_0(v)
   366  	case OpNeg32:
   367  		return rewriteValueMIPS_OpNeg32_0(v)
   368  	case OpNeg32F:
   369  		return rewriteValueMIPS_OpNeg32F_0(v)
   370  	case OpNeg64F:
   371  		return rewriteValueMIPS_OpNeg64F_0(v)
   372  	case OpNeg8:
   373  		return rewriteValueMIPS_OpNeg8_0(v)
   374  	case OpNeq16:
   375  		return rewriteValueMIPS_OpNeq16_0(v)
   376  	case OpNeq32:
   377  		return rewriteValueMIPS_OpNeq32_0(v)
   378  	case OpNeq32F:
   379  		return rewriteValueMIPS_OpNeq32F_0(v)
   380  	case OpNeq64F:
   381  		return rewriteValueMIPS_OpNeq64F_0(v)
   382  	case OpNeq8:
   383  		return rewriteValueMIPS_OpNeq8_0(v)
   384  	case OpNeqB:
   385  		return rewriteValueMIPS_OpNeqB_0(v)
   386  	case OpNeqPtr:
   387  		return rewriteValueMIPS_OpNeqPtr_0(v)
   388  	case OpNilCheck:
   389  		return rewriteValueMIPS_OpNilCheck_0(v)
   390  	case OpNot:
   391  		return rewriteValueMIPS_OpNot_0(v)
   392  	case OpOffPtr:
   393  		return rewriteValueMIPS_OpOffPtr_0(v)
   394  	case OpOr16:
   395  		return rewriteValueMIPS_OpOr16_0(v)
   396  	case OpOr32:
   397  		return rewriteValueMIPS_OpOr32_0(v)
   398  	case OpOr8:
   399  		return rewriteValueMIPS_OpOr8_0(v)
   400  	case OpOrB:
   401  		return rewriteValueMIPS_OpOrB_0(v)
   402  	case OpRound32F:
   403  		return rewriteValueMIPS_OpRound32F_0(v)
   404  	case OpRound64F:
   405  		return rewriteValueMIPS_OpRound64F_0(v)
   406  	case OpRsh16Ux16:
   407  		return rewriteValueMIPS_OpRsh16Ux16_0(v)
   408  	case OpRsh16Ux32:
   409  		return rewriteValueMIPS_OpRsh16Ux32_0(v)
   410  	case OpRsh16Ux64:
   411  		return rewriteValueMIPS_OpRsh16Ux64_0(v)
   412  	case OpRsh16Ux8:
   413  		return rewriteValueMIPS_OpRsh16Ux8_0(v)
   414  	case OpRsh16x16:
   415  		return rewriteValueMIPS_OpRsh16x16_0(v)
   416  	case OpRsh16x32:
   417  		return rewriteValueMIPS_OpRsh16x32_0(v)
   418  	case OpRsh16x64:
   419  		return rewriteValueMIPS_OpRsh16x64_0(v)
   420  	case OpRsh16x8:
   421  		return rewriteValueMIPS_OpRsh16x8_0(v)
   422  	case OpRsh32Ux16:
   423  		return rewriteValueMIPS_OpRsh32Ux16_0(v)
   424  	case OpRsh32Ux32:
   425  		return rewriteValueMIPS_OpRsh32Ux32_0(v)
   426  	case OpRsh32Ux64:
   427  		return rewriteValueMIPS_OpRsh32Ux64_0(v)
   428  	case OpRsh32Ux8:
   429  		return rewriteValueMIPS_OpRsh32Ux8_0(v)
   430  	case OpRsh32x16:
   431  		return rewriteValueMIPS_OpRsh32x16_0(v)
   432  	case OpRsh32x32:
   433  		return rewriteValueMIPS_OpRsh32x32_0(v)
   434  	case OpRsh32x64:
   435  		return rewriteValueMIPS_OpRsh32x64_0(v)
   436  	case OpRsh32x8:
   437  		return rewriteValueMIPS_OpRsh32x8_0(v)
   438  	case OpRsh8Ux16:
   439  		return rewriteValueMIPS_OpRsh8Ux16_0(v)
   440  	case OpRsh8Ux32:
   441  		return rewriteValueMIPS_OpRsh8Ux32_0(v)
   442  	case OpRsh8Ux64:
   443  		return rewriteValueMIPS_OpRsh8Ux64_0(v)
   444  	case OpRsh8Ux8:
   445  		return rewriteValueMIPS_OpRsh8Ux8_0(v)
   446  	case OpRsh8x16:
   447  		return rewriteValueMIPS_OpRsh8x16_0(v)
   448  	case OpRsh8x32:
   449  		return rewriteValueMIPS_OpRsh8x32_0(v)
   450  	case OpRsh8x64:
   451  		return rewriteValueMIPS_OpRsh8x64_0(v)
   452  	case OpRsh8x8:
   453  		return rewriteValueMIPS_OpRsh8x8_0(v)
   454  	case OpSelect0:
   455  		return rewriteValueMIPS_OpSelect0_0(v) || rewriteValueMIPS_OpSelect0_10(v)
   456  	case OpSelect1:
   457  		return rewriteValueMIPS_OpSelect1_0(v) || rewriteValueMIPS_OpSelect1_10(v)
   458  	case OpSignExt16to32:
   459  		return rewriteValueMIPS_OpSignExt16to32_0(v)
   460  	case OpSignExt8to16:
   461  		return rewriteValueMIPS_OpSignExt8to16_0(v)
   462  	case OpSignExt8to32:
   463  		return rewriteValueMIPS_OpSignExt8to32_0(v)
   464  	case OpSignmask:
   465  		return rewriteValueMIPS_OpSignmask_0(v)
   466  	case OpSlicemask:
   467  		return rewriteValueMIPS_OpSlicemask_0(v)
   468  	case OpSqrt:
   469  		return rewriteValueMIPS_OpSqrt_0(v)
   470  	case OpStaticCall:
   471  		return rewriteValueMIPS_OpStaticCall_0(v)
   472  	case OpStore:
   473  		return rewriteValueMIPS_OpStore_0(v)
   474  	case OpSub16:
   475  		return rewriteValueMIPS_OpSub16_0(v)
   476  	case OpSub32:
   477  		return rewriteValueMIPS_OpSub32_0(v)
   478  	case OpSub32F:
   479  		return rewriteValueMIPS_OpSub32F_0(v)
   480  	case OpSub32withcarry:
   481  		return rewriteValueMIPS_OpSub32withcarry_0(v)
   482  	case OpSub64F:
   483  		return rewriteValueMIPS_OpSub64F_0(v)
   484  	case OpSub8:
   485  		return rewriteValueMIPS_OpSub8_0(v)
   486  	case OpSubPtr:
   487  		return rewriteValueMIPS_OpSubPtr_0(v)
   488  	case OpTrunc16to8:
   489  		return rewriteValueMIPS_OpTrunc16to8_0(v)
   490  	case OpTrunc32to16:
   491  		return rewriteValueMIPS_OpTrunc32to16_0(v)
   492  	case OpTrunc32to8:
   493  		return rewriteValueMIPS_OpTrunc32to8_0(v)
   494  	case OpWB:
   495  		return rewriteValueMIPS_OpWB_0(v)
   496  	case OpXor16:
   497  		return rewriteValueMIPS_OpXor16_0(v)
   498  	case OpXor32:
   499  		return rewriteValueMIPS_OpXor32_0(v)
   500  	case OpXor8:
   501  		return rewriteValueMIPS_OpXor8_0(v)
   502  	case OpZero:
   503  		return rewriteValueMIPS_OpZero_0(v) || rewriteValueMIPS_OpZero_10(v)
   504  	case OpZeroExt16to32:
   505  		return rewriteValueMIPS_OpZeroExt16to32_0(v)
   506  	case OpZeroExt8to16:
   507  		return rewriteValueMIPS_OpZeroExt8to16_0(v)
   508  	case OpZeroExt8to32:
   509  		return rewriteValueMIPS_OpZeroExt8to32_0(v)
   510  	case OpZeromask:
   511  		return rewriteValueMIPS_OpZeromask_0(v)
   512  	}
   513  	return false
   514  }
   515  func rewriteValueMIPS_OpAdd16_0(v *Value) bool {
   516  	// match: (Add16 x y)
   517  	// cond:
   518  	// result: (ADD x y)
   519  	for {
   520  		_ = v.Args[1]
   521  		x := v.Args[0]
   522  		y := v.Args[1]
   523  		v.reset(OpMIPSADD)
   524  		v.AddArg(x)
   525  		v.AddArg(y)
   526  		return true
   527  	}
   528  }
   529  func rewriteValueMIPS_OpAdd32_0(v *Value) bool {
   530  	// match: (Add32 x y)
   531  	// cond:
   532  	// result: (ADD x y)
   533  	for {
   534  		_ = v.Args[1]
   535  		x := v.Args[0]
   536  		y := v.Args[1]
   537  		v.reset(OpMIPSADD)
   538  		v.AddArg(x)
   539  		v.AddArg(y)
   540  		return true
   541  	}
   542  }
   543  func rewriteValueMIPS_OpAdd32F_0(v *Value) bool {
   544  	// match: (Add32F x y)
   545  	// cond:
   546  	// result: (ADDF x y)
   547  	for {
   548  		_ = v.Args[1]
   549  		x := v.Args[0]
   550  		y := v.Args[1]
   551  		v.reset(OpMIPSADDF)
   552  		v.AddArg(x)
   553  		v.AddArg(y)
   554  		return true
   555  	}
   556  }
   557  func rewriteValueMIPS_OpAdd32withcarry_0(v *Value) bool {
   558  	b := v.Block
   559  	_ = b
   560  	// match: (Add32withcarry <t> x y c)
   561  	// cond:
   562  	// result: (ADD c (ADD <t> x y))
   563  	for {
   564  		t := v.Type
   565  		_ = v.Args[2]
   566  		x := v.Args[0]
   567  		y := v.Args[1]
   568  		c := v.Args[2]
   569  		v.reset(OpMIPSADD)
   570  		v.AddArg(c)
   571  		v0 := b.NewValue0(v.Pos, OpMIPSADD, t)
   572  		v0.AddArg(x)
   573  		v0.AddArg(y)
   574  		v.AddArg(v0)
   575  		return true
   576  	}
   577  }
   578  func rewriteValueMIPS_OpAdd64F_0(v *Value) bool {
   579  	// match: (Add64F x y)
   580  	// cond:
   581  	// result: (ADDD x y)
   582  	for {
   583  		_ = v.Args[1]
   584  		x := v.Args[0]
   585  		y := v.Args[1]
   586  		v.reset(OpMIPSADDD)
   587  		v.AddArg(x)
   588  		v.AddArg(y)
   589  		return true
   590  	}
   591  }
   592  func rewriteValueMIPS_OpAdd8_0(v *Value) bool {
   593  	// match: (Add8 x y)
   594  	// cond:
   595  	// result: (ADD x y)
   596  	for {
   597  		_ = v.Args[1]
   598  		x := v.Args[0]
   599  		y := v.Args[1]
   600  		v.reset(OpMIPSADD)
   601  		v.AddArg(x)
   602  		v.AddArg(y)
   603  		return true
   604  	}
   605  }
   606  func rewriteValueMIPS_OpAddPtr_0(v *Value) bool {
   607  	// match: (AddPtr x y)
   608  	// cond:
   609  	// result: (ADD x y)
   610  	for {
   611  		_ = v.Args[1]
   612  		x := v.Args[0]
   613  		y := v.Args[1]
   614  		v.reset(OpMIPSADD)
   615  		v.AddArg(x)
   616  		v.AddArg(y)
   617  		return true
   618  	}
   619  }
   620  func rewriteValueMIPS_OpAddr_0(v *Value) bool {
   621  	// match: (Addr {sym} base)
   622  	// cond:
   623  	// result: (MOVWaddr {sym} base)
   624  	for {
   625  		sym := v.Aux
   626  		base := v.Args[0]
   627  		v.reset(OpMIPSMOVWaddr)
   628  		v.Aux = sym
   629  		v.AddArg(base)
   630  		return true
   631  	}
   632  }
   633  func rewriteValueMIPS_OpAnd16_0(v *Value) bool {
   634  	// match: (And16 x y)
   635  	// cond:
   636  	// result: (AND x y)
   637  	for {
   638  		_ = v.Args[1]
   639  		x := v.Args[0]
   640  		y := v.Args[1]
   641  		v.reset(OpMIPSAND)
   642  		v.AddArg(x)
   643  		v.AddArg(y)
   644  		return true
   645  	}
   646  }
   647  func rewriteValueMIPS_OpAnd32_0(v *Value) bool {
   648  	// match: (And32 x y)
   649  	// cond:
   650  	// result: (AND x y)
   651  	for {
   652  		_ = v.Args[1]
   653  		x := v.Args[0]
   654  		y := v.Args[1]
   655  		v.reset(OpMIPSAND)
   656  		v.AddArg(x)
   657  		v.AddArg(y)
   658  		return true
   659  	}
   660  }
   661  func rewriteValueMIPS_OpAnd8_0(v *Value) bool {
   662  	// match: (And8 x y)
   663  	// cond:
   664  	// result: (AND x y)
   665  	for {
   666  		_ = v.Args[1]
   667  		x := v.Args[0]
   668  		y := v.Args[1]
   669  		v.reset(OpMIPSAND)
   670  		v.AddArg(x)
   671  		v.AddArg(y)
   672  		return true
   673  	}
   674  }
   675  func rewriteValueMIPS_OpAndB_0(v *Value) bool {
   676  	// match: (AndB x y)
   677  	// cond:
   678  	// result: (AND x y)
   679  	for {
   680  		_ = v.Args[1]
   681  		x := v.Args[0]
   682  		y := v.Args[1]
   683  		v.reset(OpMIPSAND)
   684  		v.AddArg(x)
   685  		v.AddArg(y)
   686  		return true
   687  	}
   688  }
   689  func rewriteValueMIPS_OpAtomicAdd32_0(v *Value) bool {
   690  	// match: (AtomicAdd32 ptr val mem)
   691  	// cond:
   692  	// result: (LoweredAtomicAdd ptr val mem)
   693  	for {
   694  		_ = v.Args[2]
   695  		ptr := v.Args[0]
   696  		val := v.Args[1]
   697  		mem := v.Args[2]
   698  		v.reset(OpMIPSLoweredAtomicAdd)
   699  		v.AddArg(ptr)
   700  		v.AddArg(val)
   701  		v.AddArg(mem)
   702  		return true
   703  	}
   704  }
   705  func rewriteValueMIPS_OpAtomicAnd8_0(v *Value) bool {
   706  	b := v.Block
   707  	_ = b
   708  	config := b.Func.Config
   709  	_ = config
   710  	typ := &b.Func.Config.Types
   711  	_ = typ
   712  	// match: (AtomicAnd8 ptr val mem)
   713  	// cond: !config.BigEndian
   714  	// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))))) mem)
   715  	for {
   716  		_ = v.Args[2]
   717  		ptr := v.Args[0]
   718  		val := v.Args[1]
   719  		mem := v.Args[2]
   720  		if !(!config.BigEndian) {
   721  			break
   722  		}
   723  		v.reset(OpMIPSLoweredAtomicAnd)
   724  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   725  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   726  		v1.AuxInt = ^3
   727  		v0.AddArg(v1)
   728  		v0.AddArg(ptr)
   729  		v.AddArg(v0)
   730  		v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32)
   731  		v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   732  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   733  		v4.AddArg(val)
   734  		v3.AddArg(v4)
   735  		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   736  		v5.AuxInt = 3
   737  		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   738  		v6.AuxInt = 3
   739  		v6.AddArg(ptr)
   740  		v5.AddArg(v6)
   741  		v3.AddArg(v5)
   742  		v2.AddArg(v3)
   743  		v7 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32)
   744  		v7.AuxInt = 0
   745  		v8 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   746  		v9 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   747  		v9.AuxInt = 0xff
   748  		v8.AddArg(v9)
   749  		v10 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   750  		v10.AuxInt = 3
   751  		v11 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   752  		v11.AuxInt = 3
   753  		v11.AddArg(ptr)
   754  		v10.AddArg(v11)
   755  		v8.AddArg(v10)
   756  		v7.AddArg(v8)
   757  		v2.AddArg(v7)
   758  		v.AddArg(v2)
   759  		v.AddArg(mem)
   760  		return true
   761  	}
   762  	// match: (AtomicAnd8 ptr val mem)
   763  	// cond: config.BigEndian
   764  	// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))))) mem)
   765  	for {
   766  		_ = v.Args[2]
   767  		ptr := v.Args[0]
   768  		val := v.Args[1]
   769  		mem := v.Args[2]
   770  		if !(config.BigEndian) {
   771  			break
   772  		}
   773  		v.reset(OpMIPSLoweredAtomicAnd)
   774  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   775  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   776  		v1.AuxInt = ^3
   777  		v0.AddArg(v1)
   778  		v0.AddArg(ptr)
   779  		v.AddArg(v0)
   780  		v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32)
   781  		v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   782  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   783  		v4.AddArg(val)
   784  		v3.AddArg(v4)
   785  		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   786  		v5.AuxInt = 3
   787  		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   788  		v6.AuxInt = 3
   789  		v7 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   790  		v7.AuxInt = 3
   791  		v7.AddArg(ptr)
   792  		v6.AddArg(v7)
   793  		v5.AddArg(v6)
   794  		v3.AddArg(v5)
   795  		v2.AddArg(v3)
   796  		v8 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32)
   797  		v8.AuxInt = 0
   798  		v9 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   799  		v10 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   800  		v10.AuxInt = 0xff
   801  		v9.AddArg(v10)
   802  		v11 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   803  		v11.AuxInt = 3
   804  		v12 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   805  		v12.AuxInt = 3
   806  		v13 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   807  		v13.AuxInt = 3
   808  		v13.AddArg(ptr)
   809  		v12.AddArg(v13)
   810  		v11.AddArg(v12)
   811  		v9.AddArg(v11)
   812  		v8.AddArg(v9)
   813  		v2.AddArg(v8)
   814  		v.AddArg(v2)
   815  		v.AddArg(mem)
   816  		return true
   817  	}
   818  	return false
   819  }
   820  func rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v *Value) bool {
   821  	// match: (AtomicCompareAndSwap32 ptr old new_ mem)
   822  	// cond:
   823  	// result: (LoweredAtomicCas ptr old new_ mem)
   824  	for {
   825  		_ = v.Args[3]
   826  		ptr := v.Args[0]
   827  		old := v.Args[1]
   828  		new_ := v.Args[2]
   829  		mem := v.Args[3]
   830  		v.reset(OpMIPSLoweredAtomicCas)
   831  		v.AddArg(ptr)
   832  		v.AddArg(old)
   833  		v.AddArg(new_)
   834  		v.AddArg(mem)
   835  		return true
   836  	}
   837  }
   838  func rewriteValueMIPS_OpAtomicExchange32_0(v *Value) bool {
   839  	// match: (AtomicExchange32 ptr val mem)
   840  	// cond:
   841  	// result: (LoweredAtomicExchange ptr val mem)
   842  	for {
   843  		_ = v.Args[2]
   844  		ptr := v.Args[0]
   845  		val := v.Args[1]
   846  		mem := v.Args[2]
   847  		v.reset(OpMIPSLoweredAtomicExchange)
   848  		v.AddArg(ptr)
   849  		v.AddArg(val)
   850  		v.AddArg(mem)
   851  		return true
   852  	}
   853  }
   854  func rewriteValueMIPS_OpAtomicLoad32_0(v *Value) bool {
   855  	// match: (AtomicLoad32 ptr mem)
   856  	// cond:
   857  	// result: (LoweredAtomicLoad ptr mem)
   858  	for {
   859  		_ = v.Args[1]
   860  		ptr := v.Args[0]
   861  		mem := v.Args[1]
   862  		v.reset(OpMIPSLoweredAtomicLoad)
   863  		v.AddArg(ptr)
   864  		v.AddArg(mem)
   865  		return true
   866  	}
   867  }
   868  func rewriteValueMIPS_OpAtomicLoadPtr_0(v *Value) bool {
   869  	// match: (AtomicLoadPtr ptr mem)
   870  	// cond:
   871  	// result: (LoweredAtomicLoad ptr mem)
   872  	for {
   873  		_ = v.Args[1]
   874  		ptr := v.Args[0]
   875  		mem := v.Args[1]
   876  		v.reset(OpMIPSLoweredAtomicLoad)
   877  		v.AddArg(ptr)
   878  		v.AddArg(mem)
   879  		return true
   880  	}
   881  }
   882  func rewriteValueMIPS_OpAtomicOr8_0(v *Value) bool {
   883  	b := v.Block
   884  	_ = b
   885  	config := b.Func.Config
   886  	_ = config
   887  	typ := &b.Func.Config.Types
   888  	_ = typ
   889  	// match: (AtomicOr8 ptr val mem)
   890  	// cond: !config.BigEndian
   891  	// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) mem)
   892  	for {
   893  		_ = v.Args[2]
   894  		ptr := v.Args[0]
   895  		val := v.Args[1]
   896  		mem := v.Args[2]
   897  		if !(!config.BigEndian) {
   898  			break
   899  		}
   900  		v.reset(OpMIPSLoweredAtomicOr)
   901  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   902  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   903  		v1.AuxInt = ^3
   904  		v0.AddArg(v1)
   905  		v0.AddArg(ptr)
   906  		v.AddArg(v0)
   907  		v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   908  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   909  		v3.AddArg(val)
   910  		v2.AddArg(v3)
   911  		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   912  		v4.AuxInt = 3
   913  		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   914  		v5.AuxInt = 3
   915  		v5.AddArg(ptr)
   916  		v4.AddArg(v5)
   917  		v2.AddArg(v4)
   918  		v.AddArg(v2)
   919  		v.AddArg(mem)
   920  		return true
   921  	}
   922  	// match: (AtomicOr8 ptr val mem)
   923  	// cond: config.BigEndian
   924  	// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) mem)
   925  	for {
   926  		_ = v.Args[2]
   927  		ptr := v.Args[0]
   928  		val := v.Args[1]
   929  		mem := v.Args[2]
   930  		if !(config.BigEndian) {
   931  			break
   932  		}
   933  		v.reset(OpMIPSLoweredAtomicOr)
   934  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   935  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   936  		v1.AuxInt = ^3
   937  		v0.AddArg(v1)
   938  		v0.AddArg(ptr)
   939  		v.AddArg(v0)
   940  		v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   941  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   942  		v3.AddArg(val)
   943  		v2.AddArg(v3)
   944  		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   945  		v4.AuxInt = 3
   946  		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   947  		v5.AuxInt = 3
   948  		v6 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   949  		v6.AuxInt = 3
   950  		v6.AddArg(ptr)
   951  		v5.AddArg(v6)
   952  		v4.AddArg(v5)
   953  		v2.AddArg(v4)
   954  		v.AddArg(v2)
   955  		v.AddArg(mem)
   956  		return true
   957  	}
   958  	return false
   959  }
   960  func rewriteValueMIPS_OpAtomicStore32_0(v *Value) bool {
   961  	// match: (AtomicStore32 ptr val mem)
   962  	// cond:
   963  	// result: (LoweredAtomicStore ptr val mem)
   964  	for {
   965  		_ = v.Args[2]
   966  		ptr := v.Args[0]
   967  		val := v.Args[1]
   968  		mem := v.Args[2]
   969  		v.reset(OpMIPSLoweredAtomicStore)
   970  		v.AddArg(ptr)
   971  		v.AddArg(val)
   972  		v.AddArg(mem)
   973  		return true
   974  	}
   975  }
   976  func rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v *Value) bool {
   977  	// match: (AtomicStorePtrNoWB ptr val mem)
   978  	// cond:
   979  	// result: (LoweredAtomicStore ptr val mem)
   980  	for {
   981  		_ = v.Args[2]
   982  		ptr := v.Args[0]
   983  		val := v.Args[1]
   984  		mem := v.Args[2]
   985  		v.reset(OpMIPSLoweredAtomicStore)
   986  		v.AddArg(ptr)
   987  		v.AddArg(val)
   988  		v.AddArg(mem)
   989  		return true
   990  	}
   991  }
   992  func rewriteValueMIPS_OpAvg32u_0(v *Value) bool {
   993  	b := v.Block
   994  	_ = b
   995  	// match: (Avg32u <t> x y)
   996  	// cond:
   997  	// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
   998  	for {
   999  		t := v.Type
  1000  		_ = v.Args[1]
  1001  		x := v.Args[0]
  1002  		y := v.Args[1]
  1003  		v.reset(OpMIPSADD)
  1004  		v0 := b.NewValue0(v.Pos, OpMIPSSRLconst, t)
  1005  		v0.AuxInt = 1
  1006  		v1 := b.NewValue0(v.Pos, OpMIPSSUB, t)
  1007  		v1.AddArg(x)
  1008  		v1.AddArg(y)
  1009  		v0.AddArg(v1)
  1010  		v.AddArg(v0)
  1011  		v.AddArg(y)
  1012  		return true
  1013  	}
  1014  }
  1015  func rewriteValueMIPS_OpBitLen32_0(v *Value) bool {
  1016  	b := v.Block
  1017  	_ = b
  1018  	typ := &b.Func.Config.Types
  1019  	_ = typ
  1020  	// match: (BitLen32 <t> x)
  1021  	// cond:
  1022  	// result: (SUB (MOVWconst [32]) (CLZ <t> x))
  1023  	for {
  1024  		t := v.Type
  1025  		x := v.Args[0]
  1026  		v.reset(OpMIPSSUB)
  1027  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  1028  		v0.AuxInt = 32
  1029  		v.AddArg(v0)
  1030  		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1031  		v1.AddArg(x)
  1032  		v.AddArg(v1)
  1033  		return true
  1034  	}
  1035  }
  1036  func rewriteValueMIPS_OpClosureCall_0(v *Value) bool {
  1037  	// match: (ClosureCall [argwid] entry closure mem)
  1038  	// cond:
  1039  	// result: (CALLclosure [argwid] entry closure mem)
  1040  	for {
  1041  		argwid := v.AuxInt
  1042  		_ = v.Args[2]
  1043  		entry := v.Args[0]
  1044  		closure := v.Args[1]
  1045  		mem := v.Args[2]
  1046  		v.reset(OpMIPSCALLclosure)
  1047  		v.AuxInt = argwid
  1048  		v.AddArg(entry)
  1049  		v.AddArg(closure)
  1050  		v.AddArg(mem)
  1051  		return true
  1052  	}
  1053  }
  1054  func rewriteValueMIPS_OpCom16_0(v *Value) bool {
  1055  	// match: (Com16 x)
  1056  	// cond:
  1057  	// result: (NORconst [0] x)
  1058  	for {
  1059  		x := v.Args[0]
  1060  		v.reset(OpMIPSNORconst)
  1061  		v.AuxInt = 0
  1062  		v.AddArg(x)
  1063  		return true
  1064  	}
  1065  }
  1066  func rewriteValueMIPS_OpCom32_0(v *Value) bool {
  1067  	// match: (Com32 x)
  1068  	// cond:
  1069  	// result: (NORconst [0] x)
  1070  	for {
  1071  		x := v.Args[0]
  1072  		v.reset(OpMIPSNORconst)
  1073  		v.AuxInt = 0
  1074  		v.AddArg(x)
  1075  		return true
  1076  	}
  1077  }
  1078  func rewriteValueMIPS_OpCom8_0(v *Value) bool {
  1079  	// match: (Com8 x)
  1080  	// cond:
  1081  	// result: (NORconst [0] x)
  1082  	for {
  1083  		x := v.Args[0]
  1084  		v.reset(OpMIPSNORconst)
  1085  		v.AuxInt = 0
  1086  		v.AddArg(x)
  1087  		return true
  1088  	}
  1089  }
  1090  func rewriteValueMIPS_OpConst16_0(v *Value) bool {
  1091  	// match: (Const16 [val])
  1092  	// cond:
  1093  	// result: (MOVWconst [val])
  1094  	for {
  1095  		val := v.AuxInt
  1096  		v.reset(OpMIPSMOVWconst)
  1097  		v.AuxInt = val
  1098  		return true
  1099  	}
  1100  }
  1101  func rewriteValueMIPS_OpConst32_0(v *Value) bool {
  1102  	// match: (Const32 [val])
  1103  	// cond:
  1104  	// result: (MOVWconst [val])
  1105  	for {
  1106  		val := v.AuxInt
  1107  		v.reset(OpMIPSMOVWconst)
  1108  		v.AuxInt = val
  1109  		return true
  1110  	}
  1111  }
  1112  func rewriteValueMIPS_OpConst32F_0(v *Value) bool {
  1113  	// match: (Const32F [val])
  1114  	// cond:
  1115  	// result: (MOVFconst [val])
  1116  	for {
  1117  		val := v.AuxInt
  1118  		v.reset(OpMIPSMOVFconst)
  1119  		v.AuxInt = val
  1120  		return true
  1121  	}
  1122  }
  1123  func rewriteValueMIPS_OpConst64F_0(v *Value) bool {
  1124  	// match: (Const64F [val])
  1125  	// cond:
  1126  	// result: (MOVDconst [val])
  1127  	for {
  1128  		val := v.AuxInt
  1129  		v.reset(OpMIPSMOVDconst)
  1130  		v.AuxInt = val
  1131  		return true
  1132  	}
  1133  }
  1134  func rewriteValueMIPS_OpConst8_0(v *Value) bool {
  1135  	// match: (Const8 [val])
  1136  	// cond:
  1137  	// result: (MOVWconst [val])
  1138  	for {
  1139  		val := v.AuxInt
  1140  		v.reset(OpMIPSMOVWconst)
  1141  		v.AuxInt = val
  1142  		return true
  1143  	}
  1144  }
  1145  func rewriteValueMIPS_OpConstBool_0(v *Value) bool {
  1146  	// match: (ConstBool [b])
  1147  	// cond:
  1148  	// result: (MOVWconst [b])
  1149  	for {
  1150  		b := v.AuxInt
  1151  		v.reset(OpMIPSMOVWconst)
  1152  		v.AuxInt = b
  1153  		return true
  1154  	}
  1155  }
  1156  func rewriteValueMIPS_OpConstNil_0(v *Value) bool {
  1157  	// match: (ConstNil)
  1158  	// cond:
  1159  	// result: (MOVWconst [0])
  1160  	for {
  1161  		v.reset(OpMIPSMOVWconst)
  1162  		v.AuxInt = 0
  1163  		return true
  1164  	}
  1165  }
  1166  func rewriteValueMIPS_OpConvert_0(v *Value) bool {
  1167  	// match: (Convert x mem)
  1168  	// cond:
  1169  	// result: (MOVWconvert x mem)
  1170  	for {
  1171  		_ = v.Args[1]
  1172  		x := v.Args[0]
  1173  		mem := v.Args[1]
  1174  		v.reset(OpMIPSMOVWconvert)
  1175  		v.AddArg(x)
  1176  		v.AddArg(mem)
  1177  		return true
  1178  	}
  1179  }
  1180  func rewriteValueMIPS_OpCtz32_0(v *Value) bool {
  1181  	b := v.Block
  1182  	_ = b
  1183  	typ := &b.Func.Config.Types
  1184  	_ = typ
  1185  	// match: (Ctz32 <t> x)
  1186  	// cond:
  1187  	// result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x)))))
  1188  	for {
  1189  		t := v.Type
  1190  		x := v.Args[0]
  1191  		v.reset(OpMIPSSUB)
  1192  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  1193  		v0.AuxInt = 32
  1194  		v.AddArg(v0)
  1195  		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1196  		v2 := b.NewValue0(v.Pos, OpMIPSSUBconst, t)
  1197  		v2.AuxInt = 1
  1198  		v3 := b.NewValue0(v.Pos, OpMIPSAND, t)
  1199  		v3.AddArg(x)
  1200  		v4 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  1201  		v4.AddArg(x)
  1202  		v3.AddArg(v4)
  1203  		v2.AddArg(v3)
  1204  		v1.AddArg(v2)
  1205  		v.AddArg(v1)
  1206  		return true
  1207  	}
  1208  }
  1209  func rewriteValueMIPS_OpCvt32Fto32_0(v *Value) bool {
  1210  	// match: (Cvt32Fto32 x)
  1211  	// cond:
  1212  	// result: (TRUNCFW x)
  1213  	for {
  1214  		x := v.Args[0]
  1215  		v.reset(OpMIPSTRUNCFW)
  1216  		v.AddArg(x)
  1217  		return true
  1218  	}
  1219  }
  1220  func rewriteValueMIPS_OpCvt32Fto64F_0(v *Value) bool {
  1221  	// match: (Cvt32Fto64F x)
  1222  	// cond:
  1223  	// result: (MOVFD x)
  1224  	for {
  1225  		x := v.Args[0]
  1226  		v.reset(OpMIPSMOVFD)
  1227  		v.AddArg(x)
  1228  		return true
  1229  	}
  1230  }
  1231  func rewriteValueMIPS_OpCvt32to32F_0(v *Value) bool {
  1232  	// match: (Cvt32to32F x)
  1233  	// cond:
  1234  	// result: (MOVWF x)
  1235  	for {
  1236  		x := v.Args[0]
  1237  		v.reset(OpMIPSMOVWF)
  1238  		v.AddArg(x)
  1239  		return true
  1240  	}
  1241  }
  1242  func rewriteValueMIPS_OpCvt32to64F_0(v *Value) bool {
  1243  	// match: (Cvt32to64F x)
  1244  	// cond:
  1245  	// result: (MOVWD x)
  1246  	for {
  1247  		x := v.Args[0]
  1248  		v.reset(OpMIPSMOVWD)
  1249  		v.AddArg(x)
  1250  		return true
  1251  	}
  1252  }
  1253  func rewriteValueMIPS_OpCvt64Fto32_0(v *Value) bool {
  1254  	// match: (Cvt64Fto32 x)
  1255  	// cond:
  1256  	// result: (TRUNCDW x)
  1257  	for {
  1258  		x := v.Args[0]
  1259  		v.reset(OpMIPSTRUNCDW)
  1260  		v.AddArg(x)
  1261  		return true
  1262  	}
  1263  }
  1264  func rewriteValueMIPS_OpCvt64Fto32F_0(v *Value) bool {
  1265  	// match: (Cvt64Fto32F x)
  1266  	// cond:
  1267  	// result: (MOVDF x)
  1268  	for {
  1269  		x := v.Args[0]
  1270  		v.reset(OpMIPSMOVDF)
  1271  		v.AddArg(x)
  1272  		return true
  1273  	}
  1274  }
  1275  func rewriteValueMIPS_OpDiv16_0(v *Value) bool {
  1276  	b := v.Block
  1277  	_ = b
  1278  	typ := &b.Func.Config.Types
  1279  	_ = typ
  1280  	// match: (Div16 x y)
  1281  	// cond:
  1282  	// result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  1283  	for {
  1284  		_ = v.Args[1]
  1285  		x := v.Args[0]
  1286  		y := v.Args[1]
  1287  		v.reset(OpSelect1)
  1288  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1289  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1290  		v1.AddArg(x)
  1291  		v0.AddArg(v1)
  1292  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1293  		v2.AddArg(y)
  1294  		v0.AddArg(v2)
  1295  		v.AddArg(v0)
  1296  		return true
  1297  	}
  1298  }
  1299  func rewriteValueMIPS_OpDiv16u_0(v *Value) bool {
  1300  	b := v.Block
  1301  	_ = b
  1302  	typ := &b.Func.Config.Types
  1303  	_ = typ
  1304  	// match: (Div16u x y)
  1305  	// cond:
  1306  	// result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1307  	for {
  1308  		_ = v.Args[1]
  1309  		x := v.Args[0]
  1310  		y := v.Args[1]
  1311  		v.reset(OpSelect1)
  1312  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1313  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1314  		v1.AddArg(x)
  1315  		v0.AddArg(v1)
  1316  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1317  		v2.AddArg(y)
  1318  		v0.AddArg(v2)
  1319  		v.AddArg(v0)
  1320  		return true
  1321  	}
  1322  }
  1323  func rewriteValueMIPS_OpDiv32_0(v *Value) bool {
  1324  	b := v.Block
  1325  	_ = b
  1326  	typ := &b.Func.Config.Types
  1327  	_ = typ
  1328  	// match: (Div32 x y)
  1329  	// cond:
  1330  	// result: (Select1 (DIV x y))
  1331  	for {
  1332  		_ = v.Args[1]
  1333  		x := v.Args[0]
  1334  		y := v.Args[1]
  1335  		v.reset(OpSelect1)
  1336  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1337  		v0.AddArg(x)
  1338  		v0.AddArg(y)
  1339  		v.AddArg(v0)
  1340  		return true
  1341  	}
  1342  }
  1343  func rewriteValueMIPS_OpDiv32F_0(v *Value) bool {
  1344  	// match: (Div32F x y)
  1345  	// cond:
  1346  	// result: (DIVF x y)
  1347  	for {
  1348  		_ = v.Args[1]
  1349  		x := v.Args[0]
  1350  		y := v.Args[1]
  1351  		v.reset(OpMIPSDIVF)
  1352  		v.AddArg(x)
  1353  		v.AddArg(y)
  1354  		return true
  1355  	}
  1356  }
  1357  func rewriteValueMIPS_OpDiv32u_0(v *Value) bool {
  1358  	b := v.Block
  1359  	_ = b
  1360  	typ := &b.Func.Config.Types
  1361  	_ = typ
  1362  	// match: (Div32u x y)
  1363  	// cond:
  1364  	// result: (Select1 (DIVU x y))
  1365  	for {
  1366  		_ = v.Args[1]
  1367  		x := v.Args[0]
  1368  		y := v.Args[1]
  1369  		v.reset(OpSelect1)
  1370  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1371  		v0.AddArg(x)
  1372  		v0.AddArg(y)
  1373  		v.AddArg(v0)
  1374  		return true
  1375  	}
  1376  }
  1377  func rewriteValueMIPS_OpDiv64F_0(v *Value) bool {
  1378  	// match: (Div64F x y)
  1379  	// cond:
  1380  	// result: (DIVD x y)
  1381  	for {
  1382  		_ = v.Args[1]
  1383  		x := v.Args[0]
  1384  		y := v.Args[1]
  1385  		v.reset(OpMIPSDIVD)
  1386  		v.AddArg(x)
  1387  		v.AddArg(y)
  1388  		return true
  1389  	}
  1390  }
  1391  func rewriteValueMIPS_OpDiv8_0(v *Value) bool {
  1392  	b := v.Block
  1393  	_ = b
  1394  	typ := &b.Func.Config.Types
  1395  	_ = typ
  1396  	// match: (Div8 x y)
  1397  	// cond:
  1398  	// result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  1399  	for {
  1400  		_ = v.Args[1]
  1401  		x := v.Args[0]
  1402  		y := v.Args[1]
  1403  		v.reset(OpSelect1)
  1404  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1405  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1406  		v1.AddArg(x)
  1407  		v0.AddArg(v1)
  1408  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1409  		v2.AddArg(y)
  1410  		v0.AddArg(v2)
  1411  		v.AddArg(v0)
  1412  		return true
  1413  	}
  1414  }
  1415  func rewriteValueMIPS_OpDiv8u_0(v *Value) bool {
  1416  	b := v.Block
  1417  	_ = b
  1418  	typ := &b.Func.Config.Types
  1419  	_ = typ
  1420  	// match: (Div8u x y)
  1421  	// cond:
  1422  	// result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1423  	for {
  1424  		_ = v.Args[1]
  1425  		x := v.Args[0]
  1426  		y := v.Args[1]
  1427  		v.reset(OpSelect1)
  1428  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1429  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1430  		v1.AddArg(x)
  1431  		v0.AddArg(v1)
  1432  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1433  		v2.AddArg(y)
  1434  		v0.AddArg(v2)
  1435  		v.AddArg(v0)
  1436  		return true
  1437  	}
  1438  }
  1439  func rewriteValueMIPS_OpEq16_0(v *Value) bool {
  1440  	b := v.Block
  1441  	_ = b
  1442  	typ := &b.Func.Config.Types
  1443  	_ = typ
  1444  	// match: (Eq16 x y)
  1445  	// cond:
  1446  	// result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1447  	for {
  1448  		_ = v.Args[1]
  1449  		x := v.Args[0]
  1450  		y := v.Args[1]
  1451  		v.reset(OpMIPSSGTUconst)
  1452  		v.AuxInt = 1
  1453  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1454  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1455  		v1.AddArg(x)
  1456  		v0.AddArg(v1)
  1457  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1458  		v2.AddArg(y)
  1459  		v0.AddArg(v2)
  1460  		v.AddArg(v0)
  1461  		return true
  1462  	}
  1463  }
  1464  func rewriteValueMIPS_OpEq32_0(v *Value) bool {
  1465  	b := v.Block
  1466  	_ = b
  1467  	typ := &b.Func.Config.Types
  1468  	_ = typ
  1469  	// match: (Eq32 x y)
  1470  	// cond:
  1471  	// result: (SGTUconst [1] (XOR x y))
  1472  	for {
  1473  		_ = v.Args[1]
  1474  		x := v.Args[0]
  1475  		y := v.Args[1]
  1476  		v.reset(OpMIPSSGTUconst)
  1477  		v.AuxInt = 1
  1478  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1479  		v0.AddArg(x)
  1480  		v0.AddArg(y)
  1481  		v.AddArg(v0)
  1482  		return true
  1483  	}
  1484  }
  1485  func rewriteValueMIPS_OpEq32F_0(v *Value) bool {
  1486  	b := v.Block
  1487  	_ = b
  1488  	// match: (Eq32F x y)
  1489  	// cond:
  1490  	// result: (FPFlagTrue (CMPEQF x y))
  1491  	for {
  1492  		_ = v.Args[1]
  1493  		x := v.Args[0]
  1494  		y := v.Args[1]
  1495  		v.reset(OpMIPSFPFlagTrue)
  1496  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags)
  1497  		v0.AddArg(x)
  1498  		v0.AddArg(y)
  1499  		v.AddArg(v0)
  1500  		return true
  1501  	}
  1502  }
  1503  func rewriteValueMIPS_OpEq64F_0(v *Value) bool {
  1504  	b := v.Block
  1505  	_ = b
  1506  	// match: (Eq64F x y)
  1507  	// cond:
  1508  	// result: (FPFlagTrue (CMPEQD x y))
  1509  	for {
  1510  		_ = v.Args[1]
  1511  		x := v.Args[0]
  1512  		y := v.Args[1]
  1513  		v.reset(OpMIPSFPFlagTrue)
  1514  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags)
  1515  		v0.AddArg(x)
  1516  		v0.AddArg(y)
  1517  		v.AddArg(v0)
  1518  		return true
  1519  	}
  1520  }
  1521  func rewriteValueMIPS_OpEq8_0(v *Value) bool {
  1522  	b := v.Block
  1523  	_ = b
  1524  	typ := &b.Func.Config.Types
  1525  	_ = typ
  1526  	// match: (Eq8 x y)
  1527  	// cond:
  1528  	// result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1529  	for {
  1530  		_ = v.Args[1]
  1531  		x := v.Args[0]
  1532  		y := v.Args[1]
  1533  		v.reset(OpMIPSSGTUconst)
  1534  		v.AuxInt = 1
  1535  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1536  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1537  		v1.AddArg(x)
  1538  		v0.AddArg(v1)
  1539  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1540  		v2.AddArg(y)
  1541  		v0.AddArg(v2)
  1542  		v.AddArg(v0)
  1543  		return true
  1544  	}
  1545  }
  1546  func rewriteValueMIPS_OpEqB_0(v *Value) bool {
  1547  	b := v.Block
  1548  	_ = b
  1549  	typ := &b.Func.Config.Types
  1550  	_ = typ
  1551  	// match: (EqB x y)
  1552  	// cond:
  1553  	// result: (XORconst [1] (XOR <typ.Bool> x y))
  1554  	for {
  1555  		_ = v.Args[1]
  1556  		x := v.Args[0]
  1557  		y := v.Args[1]
  1558  		v.reset(OpMIPSXORconst)
  1559  		v.AuxInt = 1
  1560  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.Bool)
  1561  		v0.AddArg(x)
  1562  		v0.AddArg(y)
  1563  		v.AddArg(v0)
  1564  		return true
  1565  	}
  1566  }
  1567  func rewriteValueMIPS_OpEqPtr_0(v *Value) bool {
  1568  	b := v.Block
  1569  	_ = b
  1570  	typ := &b.Func.Config.Types
  1571  	_ = typ
  1572  	// match: (EqPtr x y)
  1573  	// cond:
  1574  	// result: (SGTUconst [1] (XOR x y))
  1575  	for {
  1576  		_ = v.Args[1]
  1577  		x := v.Args[0]
  1578  		y := v.Args[1]
  1579  		v.reset(OpMIPSSGTUconst)
  1580  		v.AuxInt = 1
  1581  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1582  		v0.AddArg(x)
  1583  		v0.AddArg(y)
  1584  		v.AddArg(v0)
  1585  		return true
  1586  	}
  1587  }
  1588  func rewriteValueMIPS_OpGeq16_0(v *Value) bool {
  1589  	b := v.Block
  1590  	_ = b
  1591  	typ := &b.Func.Config.Types
  1592  	_ = typ
  1593  	// match: (Geq16 x y)
  1594  	// cond:
  1595  	// result: (XORconst [1] (SGT (SignExt16to32 y) (SignExt16to32 x)))
  1596  	for {
  1597  		_ = v.Args[1]
  1598  		x := v.Args[0]
  1599  		y := v.Args[1]
  1600  		v.reset(OpMIPSXORconst)
  1601  		v.AuxInt = 1
  1602  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1603  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1604  		v1.AddArg(y)
  1605  		v0.AddArg(v1)
  1606  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1607  		v2.AddArg(x)
  1608  		v0.AddArg(v2)
  1609  		v.AddArg(v0)
  1610  		return true
  1611  	}
  1612  }
  1613  func rewriteValueMIPS_OpGeq16U_0(v *Value) bool {
  1614  	b := v.Block
  1615  	_ = b
  1616  	typ := &b.Func.Config.Types
  1617  	_ = typ
  1618  	// match: (Geq16U x y)
  1619  	// cond:
  1620  	// result: (XORconst [1] (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)))
  1621  	for {
  1622  		_ = v.Args[1]
  1623  		x := v.Args[0]
  1624  		y := v.Args[1]
  1625  		v.reset(OpMIPSXORconst)
  1626  		v.AuxInt = 1
  1627  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1628  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1629  		v1.AddArg(y)
  1630  		v0.AddArg(v1)
  1631  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1632  		v2.AddArg(x)
  1633  		v0.AddArg(v2)
  1634  		v.AddArg(v0)
  1635  		return true
  1636  	}
  1637  }
  1638  func rewriteValueMIPS_OpGeq32_0(v *Value) bool {
  1639  	b := v.Block
  1640  	_ = b
  1641  	typ := &b.Func.Config.Types
  1642  	_ = typ
  1643  	// match: (Geq32 x y)
  1644  	// cond:
  1645  	// result: (XORconst [1] (SGT y x))
  1646  	for {
  1647  		_ = v.Args[1]
  1648  		x := v.Args[0]
  1649  		y := v.Args[1]
  1650  		v.reset(OpMIPSXORconst)
  1651  		v.AuxInt = 1
  1652  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1653  		v0.AddArg(y)
  1654  		v0.AddArg(x)
  1655  		v.AddArg(v0)
  1656  		return true
  1657  	}
  1658  }
  1659  func rewriteValueMIPS_OpGeq32F_0(v *Value) bool {
  1660  	b := v.Block
  1661  	_ = b
  1662  	// match: (Geq32F x y)
  1663  	// cond:
  1664  	// result: (FPFlagTrue (CMPGEF x y))
  1665  	for {
  1666  		_ = v.Args[1]
  1667  		x := v.Args[0]
  1668  		y := v.Args[1]
  1669  		v.reset(OpMIPSFPFlagTrue)
  1670  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags)
  1671  		v0.AddArg(x)
  1672  		v0.AddArg(y)
  1673  		v.AddArg(v0)
  1674  		return true
  1675  	}
  1676  }
  1677  func rewriteValueMIPS_OpGeq32U_0(v *Value) bool {
  1678  	b := v.Block
  1679  	_ = b
  1680  	typ := &b.Func.Config.Types
  1681  	_ = typ
  1682  	// match: (Geq32U x y)
  1683  	// cond:
  1684  	// result: (XORconst [1] (SGTU y x))
  1685  	for {
  1686  		_ = v.Args[1]
  1687  		x := v.Args[0]
  1688  		y := v.Args[1]
  1689  		v.reset(OpMIPSXORconst)
  1690  		v.AuxInt = 1
  1691  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1692  		v0.AddArg(y)
  1693  		v0.AddArg(x)
  1694  		v.AddArg(v0)
  1695  		return true
  1696  	}
  1697  }
  1698  func rewriteValueMIPS_OpGeq64F_0(v *Value) bool {
  1699  	b := v.Block
  1700  	_ = b
  1701  	// match: (Geq64F x y)
  1702  	// cond:
  1703  	// result: (FPFlagTrue (CMPGED x y))
  1704  	for {
  1705  		_ = v.Args[1]
  1706  		x := v.Args[0]
  1707  		y := v.Args[1]
  1708  		v.reset(OpMIPSFPFlagTrue)
  1709  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags)
  1710  		v0.AddArg(x)
  1711  		v0.AddArg(y)
  1712  		v.AddArg(v0)
  1713  		return true
  1714  	}
  1715  }
  1716  func rewriteValueMIPS_OpGeq8_0(v *Value) bool {
  1717  	b := v.Block
  1718  	_ = b
  1719  	typ := &b.Func.Config.Types
  1720  	_ = typ
  1721  	// match: (Geq8 x y)
  1722  	// cond:
  1723  	// result: (XORconst [1] (SGT (SignExt8to32 y) (SignExt8to32 x)))
  1724  	for {
  1725  		_ = v.Args[1]
  1726  		x := v.Args[0]
  1727  		y := v.Args[1]
  1728  		v.reset(OpMIPSXORconst)
  1729  		v.AuxInt = 1
  1730  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1731  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1732  		v1.AddArg(y)
  1733  		v0.AddArg(v1)
  1734  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1735  		v2.AddArg(x)
  1736  		v0.AddArg(v2)
  1737  		v.AddArg(v0)
  1738  		return true
  1739  	}
  1740  }
  1741  func rewriteValueMIPS_OpGeq8U_0(v *Value) bool {
  1742  	b := v.Block
  1743  	_ = b
  1744  	typ := &b.Func.Config.Types
  1745  	_ = typ
  1746  	// match: (Geq8U x y)
  1747  	// cond:
  1748  	// result: (XORconst [1] (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)))
  1749  	for {
  1750  		_ = v.Args[1]
  1751  		x := v.Args[0]
  1752  		y := v.Args[1]
  1753  		v.reset(OpMIPSXORconst)
  1754  		v.AuxInt = 1
  1755  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1756  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1757  		v1.AddArg(y)
  1758  		v0.AddArg(v1)
  1759  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1760  		v2.AddArg(x)
  1761  		v0.AddArg(v2)
  1762  		v.AddArg(v0)
  1763  		return true
  1764  	}
  1765  }
  1766  func rewriteValueMIPS_OpGetCallerSP_0(v *Value) bool {
  1767  	// match: (GetCallerSP)
  1768  	// cond:
  1769  	// result: (LoweredGetCallerSP)
  1770  	for {
  1771  		v.reset(OpMIPSLoweredGetCallerSP)
  1772  		return true
  1773  	}
  1774  }
  1775  func rewriteValueMIPS_OpGetClosurePtr_0(v *Value) bool {
  1776  	// match: (GetClosurePtr)
  1777  	// cond:
  1778  	// result: (LoweredGetClosurePtr)
  1779  	for {
  1780  		v.reset(OpMIPSLoweredGetClosurePtr)
  1781  		return true
  1782  	}
  1783  }
  1784  func rewriteValueMIPS_OpGreater16_0(v *Value) bool {
  1785  	b := v.Block
  1786  	_ = b
  1787  	typ := &b.Func.Config.Types
  1788  	_ = typ
  1789  	// match: (Greater16 x y)
  1790  	// cond:
  1791  	// result: (SGT (SignExt16to32 x) (SignExt16to32 y))
  1792  	for {
  1793  		_ = v.Args[1]
  1794  		x := v.Args[0]
  1795  		y := v.Args[1]
  1796  		v.reset(OpMIPSSGT)
  1797  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1798  		v0.AddArg(x)
  1799  		v.AddArg(v0)
  1800  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1801  		v1.AddArg(y)
  1802  		v.AddArg(v1)
  1803  		return true
  1804  	}
  1805  }
  1806  func rewriteValueMIPS_OpGreater16U_0(v *Value) bool {
  1807  	b := v.Block
  1808  	_ = b
  1809  	typ := &b.Func.Config.Types
  1810  	_ = typ
  1811  	// match: (Greater16U x y)
  1812  	// cond:
  1813  	// result: (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))
  1814  	for {
  1815  		_ = v.Args[1]
  1816  		x := v.Args[0]
  1817  		y := v.Args[1]
  1818  		v.reset(OpMIPSSGTU)
  1819  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1820  		v0.AddArg(x)
  1821  		v.AddArg(v0)
  1822  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1823  		v1.AddArg(y)
  1824  		v.AddArg(v1)
  1825  		return true
  1826  	}
  1827  }
  1828  func rewriteValueMIPS_OpGreater32_0(v *Value) bool {
  1829  	// match: (Greater32 x y)
  1830  	// cond:
  1831  	// result: (SGT x y)
  1832  	for {
  1833  		_ = v.Args[1]
  1834  		x := v.Args[0]
  1835  		y := v.Args[1]
  1836  		v.reset(OpMIPSSGT)
  1837  		v.AddArg(x)
  1838  		v.AddArg(y)
  1839  		return true
  1840  	}
  1841  }
  1842  func rewriteValueMIPS_OpGreater32F_0(v *Value) bool {
  1843  	b := v.Block
  1844  	_ = b
  1845  	// match: (Greater32F x y)
  1846  	// cond:
  1847  	// result: (FPFlagTrue (CMPGTF x y))
  1848  	for {
  1849  		_ = v.Args[1]
  1850  		x := v.Args[0]
  1851  		y := v.Args[1]
  1852  		v.reset(OpMIPSFPFlagTrue)
  1853  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags)
  1854  		v0.AddArg(x)
  1855  		v0.AddArg(y)
  1856  		v.AddArg(v0)
  1857  		return true
  1858  	}
  1859  }
  1860  func rewriteValueMIPS_OpGreater32U_0(v *Value) bool {
  1861  	// match: (Greater32U x y)
  1862  	// cond:
  1863  	// result: (SGTU x y)
  1864  	for {
  1865  		_ = v.Args[1]
  1866  		x := v.Args[0]
  1867  		y := v.Args[1]
  1868  		v.reset(OpMIPSSGTU)
  1869  		v.AddArg(x)
  1870  		v.AddArg(y)
  1871  		return true
  1872  	}
  1873  }
  1874  func rewriteValueMIPS_OpGreater64F_0(v *Value) bool {
  1875  	b := v.Block
  1876  	_ = b
  1877  	// match: (Greater64F x y)
  1878  	// cond:
  1879  	// result: (FPFlagTrue (CMPGTD x y))
  1880  	for {
  1881  		_ = v.Args[1]
  1882  		x := v.Args[0]
  1883  		y := v.Args[1]
  1884  		v.reset(OpMIPSFPFlagTrue)
  1885  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags)
  1886  		v0.AddArg(x)
  1887  		v0.AddArg(y)
  1888  		v.AddArg(v0)
  1889  		return true
  1890  	}
  1891  }
  1892  func rewriteValueMIPS_OpGreater8_0(v *Value) bool {
  1893  	b := v.Block
  1894  	_ = b
  1895  	typ := &b.Func.Config.Types
  1896  	_ = typ
  1897  	// match: (Greater8 x y)
  1898  	// cond:
  1899  	// result: (SGT (SignExt8to32 x) (SignExt8to32 y))
  1900  	for {
  1901  		_ = v.Args[1]
  1902  		x := v.Args[0]
  1903  		y := v.Args[1]
  1904  		v.reset(OpMIPSSGT)
  1905  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1906  		v0.AddArg(x)
  1907  		v.AddArg(v0)
  1908  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1909  		v1.AddArg(y)
  1910  		v.AddArg(v1)
  1911  		return true
  1912  	}
  1913  }
  1914  func rewriteValueMIPS_OpGreater8U_0(v *Value) bool {
  1915  	b := v.Block
  1916  	_ = b
  1917  	typ := &b.Func.Config.Types
  1918  	_ = typ
  1919  	// match: (Greater8U x y)
  1920  	// cond:
  1921  	// result: (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))
  1922  	for {
  1923  		_ = v.Args[1]
  1924  		x := v.Args[0]
  1925  		y := v.Args[1]
  1926  		v.reset(OpMIPSSGTU)
  1927  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1928  		v0.AddArg(x)
  1929  		v.AddArg(v0)
  1930  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1931  		v1.AddArg(y)
  1932  		v.AddArg(v1)
  1933  		return true
  1934  	}
  1935  }
  1936  func rewriteValueMIPS_OpHmul32_0(v *Value) bool {
  1937  	b := v.Block
  1938  	_ = b
  1939  	typ := &b.Func.Config.Types
  1940  	_ = typ
  1941  	// match: (Hmul32 x y)
  1942  	// cond:
  1943  	// result: (Select0 (MULT x y))
  1944  	for {
  1945  		_ = v.Args[1]
  1946  		x := v.Args[0]
  1947  		y := v.Args[1]
  1948  		v.reset(OpSelect0)
  1949  		v0 := b.NewValue0(v.Pos, OpMIPSMULT, types.NewTuple(typ.Int32, typ.Int32))
  1950  		v0.AddArg(x)
  1951  		v0.AddArg(y)
  1952  		v.AddArg(v0)
  1953  		return true
  1954  	}
  1955  }
  1956  func rewriteValueMIPS_OpHmul32u_0(v *Value) bool {
  1957  	b := v.Block
  1958  	_ = b
  1959  	typ := &b.Func.Config.Types
  1960  	_ = typ
  1961  	// match: (Hmul32u x y)
  1962  	// cond:
  1963  	// result: (Select0 (MULTU x y))
  1964  	for {
  1965  		_ = v.Args[1]
  1966  		x := v.Args[0]
  1967  		y := v.Args[1]
  1968  		v.reset(OpSelect0)
  1969  		v0 := b.NewValue0(v.Pos, OpMIPSMULTU, types.NewTuple(typ.UInt32, typ.UInt32))
  1970  		v0.AddArg(x)
  1971  		v0.AddArg(y)
  1972  		v.AddArg(v0)
  1973  		return true
  1974  	}
  1975  }
  1976  func rewriteValueMIPS_OpInterCall_0(v *Value) bool {
  1977  	// match: (InterCall [argwid] entry mem)
  1978  	// cond:
  1979  	// result: (CALLinter [argwid] entry mem)
  1980  	for {
  1981  		argwid := v.AuxInt
  1982  		_ = v.Args[1]
  1983  		entry := v.Args[0]
  1984  		mem := v.Args[1]
  1985  		v.reset(OpMIPSCALLinter)
  1986  		v.AuxInt = argwid
  1987  		v.AddArg(entry)
  1988  		v.AddArg(mem)
  1989  		return true
  1990  	}
  1991  }
  1992  func rewriteValueMIPS_OpIsInBounds_0(v *Value) bool {
  1993  	// match: (IsInBounds idx len)
  1994  	// cond:
  1995  	// result: (SGTU len idx)
  1996  	for {
  1997  		_ = v.Args[1]
  1998  		idx := v.Args[0]
  1999  		len := v.Args[1]
  2000  		v.reset(OpMIPSSGTU)
  2001  		v.AddArg(len)
  2002  		v.AddArg(idx)
  2003  		return true
  2004  	}
  2005  }
  2006  func rewriteValueMIPS_OpIsNonNil_0(v *Value) bool {
  2007  	b := v.Block
  2008  	_ = b
  2009  	typ := &b.Func.Config.Types
  2010  	_ = typ
  2011  	// match: (IsNonNil ptr)
  2012  	// cond:
  2013  	// result: (SGTU ptr (MOVWconst [0]))
  2014  	for {
  2015  		ptr := v.Args[0]
  2016  		v.reset(OpMIPSSGTU)
  2017  		v.AddArg(ptr)
  2018  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2019  		v0.AuxInt = 0
  2020  		v.AddArg(v0)
  2021  		return true
  2022  	}
  2023  }
  2024  func rewriteValueMIPS_OpIsSliceInBounds_0(v *Value) bool {
  2025  	b := v.Block
  2026  	_ = b
  2027  	typ := &b.Func.Config.Types
  2028  	_ = typ
  2029  	// match: (IsSliceInBounds idx len)
  2030  	// cond:
  2031  	// result: (XORconst [1] (SGTU idx len))
  2032  	for {
  2033  		_ = v.Args[1]
  2034  		idx := v.Args[0]
  2035  		len := v.Args[1]
  2036  		v.reset(OpMIPSXORconst)
  2037  		v.AuxInt = 1
  2038  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2039  		v0.AddArg(idx)
  2040  		v0.AddArg(len)
  2041  		v.AddArg(v0)
  2042  		return true
  2043  	}
  2044  }
  2045  func rewriteValueMIPS_OpLeq16_0(v *Value) bool {
  2046  	b := v.Block
  2047  	_ = b
  2048  	typ := &b.Func.Config.Types
  2049  	_ = typ
  2050  	// match: (Leq16 x y)
  2051  	// cond:
  2052  	// result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y)))
  2053  	for {
  2054  		_ = v.Args[1]
  2055  		x := v.Args[0]
  2056  		y := v.Args[1]
  2057  		v.reset(OpMIPSXORconst)
  2058  		v.AuxInt = 1
  2059  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  2060  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2061  		v1.AddArg(x)
  2062  		v0.AddArg(v1)
  2063  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2064  		v2.AddArg(y)
  2065  		v0.AddArg(v2)
  2066  		v.AddArg(v0)
  2067  		return true
  2068  	}
  2069  }
  2070  func rewriteValueMIPS_OpLeq16U_0(v *Value) bool {
  2071  	b := v.Block
  2072  	_ = b
  2073  	typ := &b.Func.Config.Types
  2074  	_ = typ
  2075  	// match: (Leq16U x y)
  2076  	// cond:
  2077  	// result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  2078  	for {
  2079  		_ = v.Args[1]
  2080  		x := v.Args[0]
  2081  		y := v.Args[1]
  2082  		v.reset(OpMIPSXORconst)
  2083  		v.AuxInt = 1
  2084  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2085  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2086  		v1.AddArg(x)
  2087  		v0.AddArg(v1)
  2088  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2089  		v2.AddArg(y)
  2090  		v0.AddArg(v2)
  2091  		v.AddArg(v0)
  2092  		return true
  2093  	}
  2094  }
  2095  func rewriteValueMIPS_OpLeq32_0(v *Value) bool {
  2096  	b := v.Block
  2097  	_ = b
  2098  	typ := &b.Func.Config.Types
  2099  	_ = typ
  2100  	// match: (Leq32 x y)
  2101  	// cond:
  2102  	// result: (XORconst [1] (SGT x y))
  2103  	for {
  2104  		_ = v.Args[1]
  2105  		x := v.Args[0]
  2106  		y := v.Args[1]
  2107  		v.reset(OpMIPSXORconst)
  2108  		v.AuxInt = 1
  2109  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  2110  		v0.AddArg(x)
  2111  		v0.AddArg(y)
  2112  		v.AddArg(v0)
  2113  		return true
  2114  	}
  2115  }
  2116  func rewriteValueMIPS_OpLeq32F_0(v *Value) bool {
  2117  	b := v.Block
  2118  	_ = b
  2119  	// match: (Leq32F x y)
  2120  	// cond:
  2121  	// result: (FPFlagTrue (CMPGEF y x))
  2122  	for {
  2123  		_ = v.Args[1]
  2124  		x := v.Args[0]
  2125  		y := v.Args[1]
  2126  		v.reset(OpMIPSFPFlagTrue)
  2127  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags)
  2128  		v0.AddArg(y)
  2129  		v0.AddArg(x)
  2130  		v.AddArg(v0)
  2131  		return true
  2132  	}
  2133  }
  2134  func rewriteValueMIPS_OpLeq32U_0(v *Value) bool {
  2135  	b := v.Block
  2136  	_ = b
  2137  	typ := &b.Func.Config.Types
  2138  	_ = typ
  2139  	// match: (Leq32U x y)
  2140  	// cond:
  2141  	// result: (XORconst [1] (SGTU x y))
  2142  	for {
  2143  		_ = v.Args[1]
  2144  		x := v.Args[0]
  2145  		y := v.Args[1]
  2146  		v.reset(OpMIPSXORconst)
  2147  		v.AuxInt = 1
  2148  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2149  		v0.AddArg(x)
  2150  		v0.AddArg(y)
  2151  		v.AddArg(v0)
  2152  		return true
  2153  	}
  2154  }
  2155  func rewriteValueMIPS_OpLeq64F_0(v *Value) bool {
  2156  	b := v.Block
  2157  	_ = b
  2158  	// match: (Leq64F x y)
  2159  	// cond:
  2160  	// result: (FPFlagTrue (CMPGED y x))
  2161  	for {
  2162  		_ = v.Args[1]
  2163  		x := v.Args[0]
  2164  		y := v.Args[1]
  2165  		v.reset(OpMIPSFPFlagTrue)
  2166  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags)
  2167  		v0.AddArg(y)
  2168  		v0.AddArg(x)
  2169  		v.AddArg(v0)
  2170  		return true
  2171  	}
  2172  }
  2173  func rewriteValueMIPS_OpLeq8_0(v *Value) bool {
  2174  	b := v.Block
  2175  	_ = b
  2176  	typ := &b.Func.Config.Types
  2177  	_ = typ
  2178  	// match: (Leq8 x y)
  2179  	// cond:
  2180  	// result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y)))
  2181  	for {
  2182  		_ = v.Args[1]
  2183  		x := v.Args[0]
  2184  		y := v.Args[1]
  2185  		v.reset(OpMIPSXORconst)
  2186  		v.AuxInt = 1
  2187  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  2188  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2189  		v1.AddArg(x)
  2190  		v0.AddArg(v1)
  2191  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2192  		v2.AddArg(y)
  2193  		v0.AddArg(v2)
  2194  		v.AddArg(v0)
  2195  		return true
  2196  	}
  2197  }
  2198  func rewriteValueMIPS_OpLeq8U_0(v *Value) bool {
  2199  	b := v.Block
  2200  	_ = b
  2201  	typ := &b.Func.Config.Types
  2202  	_ = typ
  2203  	// match: (Leq8U x y)
  2204  	// cond:
  2205  	// result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  2206  	for {
  2207  		_ = v.Args[1]
  2208  		x := v.Args[0]
  2209  		y := v.Args[1]
  2210  		v.reset(OpMIPSXORconst)
  2211  		v.AuxInt = 1
  2212  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2213  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2214  		v1.AddArg(x)
  2215  		v0.AddArg(v1)
  2216  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2217  		v2.AddArg(y)
  2218  		v0.AddArg(v2)
  2219  		v.AddArg(v0)
  2220  		return true
  2221  	}
  2222  }
  2223  func rewriteValueMIPS_OpLess16_0(v *Value) bool {
  2224  	b := v.Block
  2225  	_ = b
  2226  	typ := &b.Func.Config.Types
  2227  	_ = typ
  2228  	// match: (Less16 x y)
  2229  	// cond:
  2230  	// result: (SGT (SignExt16to32 y) (SignExt16to32 x))
  2231  	for {
  2232  		_ = v.Args[1]
  2233  		x := v.Args[0]
  2234  		y := v.Args[1]
  2235  		v.reset(OpMIPSSGT)
  2236  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2237  		v0.AddArg(y)
  2238  		v.AddArg(v0)
  2239  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2240  		v1.AddArg(x)
  2241  		v.AddArg(v1)
  2242  		return true
  2243  	}
  2244  }
  2245  func rewriteValueMIPS_OpLess16U_0(v *Value) bool {
  2246  	b := v.Block
  2247  	_ = b
  2248  	typ := &b.Func.Config.Types
  2249  	_ = typ
  2250  	// match: (Less16U x y)
  2251  	// cond:
  2252  	// result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x))
  2253  	for {
  2254  		_ = v.Args[1]
  2255  		x := v.Args[0]
  2256  		y := v.Args[1]
  2257  		v.reset(OpMIPSSGTU)
  2258  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2259  		v0.AddArg(y)
  2260  		v.AddArg(v0)
  2261  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2262  		v1.AddArg(x)
  2263  		v.AddArg(v1)
  2264  		return true
  2265  	}
  2266  }
  2267  func rewriteValueMIPS_OpLess32_0(v *Value) bool {
  2268  	// match: (Less32 x y)
  2269  	// cond:
  2270  	// result: (SGT y x)
  2271  	for {
  2272  		_ = v.Args[1]
  2273  		x := v.Args[0]
  2274  		y := v.Args[1]
  2275  		v.reset(OpMIPSSGT)
  2276  		v.AddArg(y)
  2277  		v.AddArg(x)
  2278  		return true
  2279  	}
  2280  }
  2281  func rewriteValueMIPS_OpLess32F_0(v *Value) bool {
  2282  	b := v.Block
  2283  	_ = b
  2284  	// match: (Less32F x y)
  2285  	// cond:
  2286  	// result: (FPFlagTrue (CMPGTF y x))
  2287  	for {
  2288  		_ = v.Args[1]
  2289  		x := v.Args[0]
  2290  		y := v.Args[1]
  2291  		v.reset(OpMIPSFPFlagTrue)
  2292  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags)
  2293  		v0.AddArg(y)
  2294  		v0.AddArg(x)
  2295  		v.AddArg(v0)
  2296  		return true
  2297  	}
  2298  }
  2299  func rewriteValueMIPS_OpLess32U_0(v *Value) bool {
  2300  	// match: (Less32U x y)
  2301  	// cond:
  2302  	// result: (SGTU y x)
  2303  	for {
  2304  		_ = v.Args[1]
  2305  		x := v.Args[0]
  2306  		y := v.Args[1]
  2307  		v.reset(OpMIPSSGTU)
  2308  		v.AddArg(y)
  2309  		v.AddArg(x)
  2310  		return true
  2311  	}
  2312  }
  2313  func rewriteValueMIPS_OpLess64F_0(v *Value) bool {
  2314  	b := v.Block
  2315  	_ = b
  2316  	// match: (Less64F x y)
  2317  	// cond:
  2318  	// result: (FPFlagTrue (CMPGTD y x))
  2319  	for {
  2320  		_ = v.Args[1]
  2321  		x := v.Args[0]
  2322  		y := v.Args[1]
  2323  		v.reset(OpMIPSFPFlagTrue)
  2324  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags)
  2325  		v0.AddArg(y)
  2326  		v0.AddArg(x)
  2327  		v.AddArg(v0)
  2328  		return true
  2329  	}
  2330  }
  2331  func rewriteValueMIPS_OpLess8_0(v *Value) bool {
  2332  	b := v.Block
  2333  	_ = b
  2334  	typ := &b.Func.Config.Types
  2335  	_ = typ
  2336  	// match: (Less8 x y)
  2337  	// cond:
  2338  	// result: (SGT (SignExt8to32 y) (SignExt8to32 x))
  2339  	for {
  2340  		_ = v.Args[1]
  2341  		x := v.Args[0]
  2342  		y := v.Args[1]
  2343  		v.reset(OpMIPSSGT)
  2344  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2345  		v0.AddArg(y)
  2346  		v.AddArg(v0)
  2347  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2348  		v1.AddArg(x)
  2349  		v.AddArg(v1)
  2350  		return true
  2351  	}
  2352  }
  2353  func rewriteValueMIPS_OpLess8U_0(v *Value) bool {
  2354  	b := v.Block
  2355  	_ = b
  2356  	typ := &b.Func.Config.Types
  2357  	_ = typ
  2358  	// match: (Less8U x y)
  2359  	// cond:
  2360  	// result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x))
  2361  	for {
  2362  		_ = v.Args[1]
  2363  		x := v.Args[0]
  2364  		y := v.Args[1]
  2365  		v.reset(OpMIPSSGTU)
  2366  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2367  		v0.AddArg(y)
  2368  		v.AddArg(v0)
  2369  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2370  		v1.AddArg(x)
  2371  		v.AddArg(v1)
  2372  		return true
  2373  	}
  2374  }
  2375  func rewriteValueMIPS_OpLoad_0(v *Value) bool {
  2376  	// match: (Load <t> ptr mem)
  2377  	// cond: t.IsBoolean()
  2378  	// result: (MOVBUload ptr mem)
  2379  	for {
  2380  		t := v.Type
  2381  		_ = v.Args[1]
  2382  		ptr := v.Args[0]
  2383  		mem := v.Args[1]
  2384  		if !(t.IsBoolean()) {
  2385  			break
  2386  		}
  2387  		v.reset(OpMIPSMOVBUload)
  2388  		v.AddArg(ptr)
  2389  		v.AddArg(mem)
  2390  		return true
  2391  	}
  2392  	// match: (Load <t> ptr mem)
  2393  	// cond: (is8BitInt(t) && isSigned(t))
  2394  	// result: (MOVBload ptr mem)
  2395  	for {
  2396  		t := v.Type
  2397  		_ = v.Args[1]
  2398  		ptr := v.Args[0]
  2399  		mem := v.Args[1]
  2400  		if !(is8BitInt(t) && isSigned(t)) {
  2401  			break
  2402  		}
  2403  		v.reset(OpMIPSMOVBload)
  2404  		v.AddArg(ptr)
  2405  		v.AddArg(mem)
  2406  		return true
  2407  	}
  2408  	// match: (Load <t> ptr mem)
  2409  	// cond: (is8BitInt(t) && !isSigned(t))
  2410  	// result: (MOVBUload ptr mem)
  2411  	for {
  2412  		t := v.Type
  2413  		_ = v.Args[1]
  2414  		ptr := v.Args[0]
  2415  		mem := v.Args[1]
  2416  		if !(is8BitInt(t) && !isSigned(t)) {
  2417  			break
  2418  		}
  2419  		v.reset(OpMIPSMOVBUload)
  2420  		v.AddArg(ptr)
  2421  		v.AddArg(mem)
  2422  		return true
  2423  	}
  2424  	// match: (Load <t> ptr mem)
  2425  	// cond: (is16BitInt(t) && isSigned(t))
  2426  	// result: (MOVHload ptr mem)
  2427  	for {
  2428  		t := v.Type
  2429  		_ = v.Args[1]
  2430  		ptr := v.Args[0]
  2431  		mem := v.Args[1]
  2432  		if !(is16BitInt(t) && isSigned(t)) {
  2433  			break
  2434  		}
  2435  		v.reset(OpMIPSMOVHload)
  2436  		v.AddArg(ptr)
  2437  		v.AddArg(mem)
  2438  		return true
  2439  	}
  2440  	// match: (Load <t> ptr mem)
  2441  	// cond: (is16BitInt(t) && !isSigned(t))
  2442  	// result: (MOVHUload ptr mem)
  2443  	for {
  2444  		t := v.Type
  2445  		_ = v.Args[1]
  2446  		ptr := v.Args[0]
  2447  		mem := v.Args[1]
  2448  		if !(is16BitInt(t) && !isSigned(t)) {
  2449  			break
  2450  		}
  2451  		v.reset(OpMIPSMOVHUload)
  2452  		v.AddArg(ptr)
  2453  		v.AddArg(mem)
  2454  		return true
  2455  	}
  2456  	// match: (Load <t> ptr mem)
  2457  	// cond: (is32BitInt(t) || isPtr(t))
  2458  	// result: (MOVWload ptr mem)
  2459  	for {
  2460  		t := v.Type
  2461  		_ = v.Args[1]
  2462  		ptr := v.Args[0]
  2463  		mem := v.Args[1]
  2464  		if !(is32BitInt(t) || isPtr(t)) {
  2465  			break
  2466  		}
  2467  		v.reset(OpMIPSMOVWload)
  2468  		v.AddArg(ptr)
  2469  		v.AddArg(mem)
  2470  		return true
  2471  	}
  2472  	// match: (Load <t> ptr mem)
  2473  	// cond: is32BitFloat(t)
  2474  	// result: (MOVFload ptr mem)
  2475  	for {
  2476  		t := v.Type
  2477  		_ = v.Args[1]
  2478  		ptr := v.Args[0]
  2479  		mem := v.Args[1]
  2480  		if !(is32BitFloat(t)) {
  2481  			break
  2482  		}
  2483  		v.reset(OpMIPSMOVFload)
  2484  		v.AddArg(ptr)
  2485  		v.AddArg(mem)
  2486  		return true
  2487  	}
  2488  	// match: (Load <t> ptr mem)
  2489  	// cond: is64BitFloat(t)
  2490  	// result: (MOVDload ptr mem)
  2491  	for {
  2492  		t := v.Type
  2493  		_ = v.Args[1]
  2494  		ptr := v.Args[0]
  2495  		mem := v.Args[1]
  2496  		if !(is64BitFloat(t)) {
  2497  			break
  2498  		}
  2499  		v.reset(OpMIPSMOVDload)
  2500  		v.AddArg(ptr)
  2501  		v.AddArg(mem)
  2502  		return true
  2503  	}
  2504  	return false
  2505  }
  2506  func rewriteValueMIPS_OpLsh16x16_0(v *Value) bool {
  2507  	b := v.Block
  2508  	_ = b
  2509  	typ := &b.Func.Config.Types
  2510  	_ = typ
  2511  	// match: (Lsh16x16 <t> x y)
  2512  	// cond:
  2513  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2514  	for {
  2515  		t := v.Type
  2516  		_ = v.Args[1]
  2517  		x := v.Args[0]
  2518  		y := v.Args[1]
  2519  		v.reset(OpMIPSCMOVZ)
  2520  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2521  		v0.AddArg(x)
  2522  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2523  		v1.AddArg(y)
  2524  		v0.AddArg(v1)
  2525  		v.AddArg(v0)
  2526  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2527  		v2.AuxInt = 0
  2528  		v.AddArg(v2)
  2529  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2530  		v3.AuxInt = 32
  2531  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2532  		v4.AddArg(y)
  2533  		v3.AddArg(v4)
  2534  		v.AddArg(v3)
  2535  		return true
  2536  	}
  2537  }
  2538  func rewriteValueMIPS_OpLsh16x32_0(v *Value) bool {
  2539  	b := v.Block
  2540  	_ = b
  2541  	typ := &b.Func.Config.Types
  2542  	_ = typ
  2543  	// match: (Lsh16x32 <t> x y)
  2544  	// cond:
  2545  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2546  	for {
  2547  		t := v.Type
  2548  		_ = v.Args[1]
  2549  		x := v.Args[0]
  2550  		y := v.Args[1]
  2551  		v.reset(OpMIPSCMOVZ)
  2552  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2553  		v0.AddArg(x)
  2554  		v0.AddArg(y)
  2555  		v.AddArg(v0)
  2556  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2557  		v1.AuxInt = 0
  2558  		v.AddArg(v1)
  2559  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2560  		v2.AuxInt = 32
  2561  		v2.AddArg(y)
  2562  		v.AddArg(v2)
  2563  		return true
  2564  	}
  2565  }
  2566  func rewriteValueMIPS_OpLsh16x64_0(v *Value) bool {
  2567  	// match: (Lsh16x64 x (Const64 [c]))
  2568  	// cond: uint32(c) < 16
  2569  	// result: (SLLconst x [c])
  2570  	for {
  2571  		_ = v.Args[1]
  2572  		x := v.Args[0]
  2573  		v_1 := v.Args[1]
  2574  		if v_1.Op != OpConst64 {
  2575  			break
  2576  		}
  2577  		c := v_1.AuxInt
  2578  		if !(uint32(c) < 16) {
  2579  			break
  2580  		}
  2581  		v.reset(OpMIPSSLLconst)
  2582  		v.AuxInt = c
  2583  		v.AddArg(x)
  2584  		return true
  2585  	}
  2586  	// match: (Lsh16x64 _ (Const64 [c]))
  2587  	// cond: uint32(c) >= 16
  2588  	// result: (MOVWconst [0])
  2589  	for {
  2590  		_ = v.Args[1]
  2591  		v_1 := v.Args[1]
  2592  		if v_1.Op != OpConst64 {
  2593  			break
  2594  		}
  2595  		c := v_1.AuxInt
  2596  		if !(uint32(c) >= 16) {
  2597  			break
  2598  		}
  2599  		v.reset(OpMIPSMOVWconst)
  2600  		v.AuxInt = 0
  2601  		return true
  2602  	}
  2603  	return false
  2604  }
  2605  func rewriteValueMIPS_OpLsh16x8_0(v *Value) bool {
  2606  	b := v.Block
  2607  	_ = b
  2608  	typ := &b.Func.Config.Types
  2609  	_ = typ
  2610  	// match: (Lsh16x8 <t> x y)
  2611  	// cond:
  2612  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2613  	for {
  2614  		t := v.Type
  2615  		_ = v.Args[1]
  2616  		x := v.Args[0]
  2617  		y := v.Args[1]
  2618  		v.reset(OpMIPSCMOVZ)
  2619  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2620  		v0.AddArg(x)
  2621  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2622  		v1.AddArg(y)
  2623  		v0.AddArg(v1)
  2624  		v.AddArg(v0)
  2625  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2626  		v2.AuxInt = 0
  2627  		v.AddArg(v2)
  2628  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2629  		v3.AuxInt = 32
  2630  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2631  		v4.AddArg(y)
  2632  		v3.AddArg(v4)
  2633  		v.AddArg(v3)
  2634  		return true
  2635  	}
  2636  }
  2637  func rewriteValueMIPS_OpLsh32x16_0(v *Value) bool {
  2638  	b := v.Block
  2639  	_ = b
  2640  	typ := &b.Func.Config.Types
  2641  	_ = typ
  2642  	// match: (Lsh32x16 <t> x y)
  2643  	// cond:
  2644  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2645  	for {
  2646  		t := v.Type
  2647  		_ = v.Args[1]
  2648  		x := v.Args[0]
  2649  		y := v.Args[1]
  2650  		v.reset(OpMIPSCMOVZ)
  2651  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2652  		v0.AddArg(x)
  2653  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2654  		v1.AddArg(y)
  2655  		v0.AddArg(v1)
  2656  		v.AddArg(v0)
  2657  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2658  		v2.AuxInt = 0
  2659  		v.AddArg(v2)
  2660  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2661  		v3.AuxInt = 32
  2662  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2663  		v4.AddArg(y)
  2664  		v3.AddArg(v4)
  2665  		v.AddArg(v3)
  2666  		return true
  2667  	}
  2668  }
  2669  func rewriteValueMIPS_OpLsh32x32_0(v *Value) bool {
  2670  	b := v.Block
  2671  	_ = b
  2672  	typ := &b.Func.Config.Types
  2673  	_ = typ
  2674  	// match: (Lsh32x32 <t> x y)
  2675  	// cond:
  2676  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2677  	for {
  2678  		t := v.Type
  2679  		_ = v.Args[1]
  2680  		x := v.Args[0]
  2681  		y := v.Args[1]
  2682  		v.reset(OpMIPSCMOVZ)
  2683  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2684  		v0.AddArg(x)
  2685  		v0.AddArg(y)
  2686  		v.AddArg(v0)
  2687  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2688  		v1.AuxInt = 0
  2689  		v.AddArg(v1)
  2690  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2691  		v2.AuxInt = 32
  2692  		v2.AddArg(y)
  2693  		v.AddArg(v2)
  2694  		return true
  2695  	}
  2696  }
  2697  func rewriteValueMIPS_OpLsh32x64_0(v *Value) bool {
  2698  	// match: (Lsh32x64 x (Const64 [c]))
  2699  	// cond: uint32(c) < 32
  2700  	// result: (SLLconst x [c])
  2701  	for {
  2702  		_ = v.Args[1]
  2703  		x := v.Args[0]
  2704  		v_1 := v.Args[1]
  2705  		if v_1.Op != OpConst64 {
  2706  			break
  2707  		}
  2708  		c := v_1.AuxInt
  2709  		if !(uint32(c) < 32) {
  2710  			break
  2711  		}
  2712  		v.reset(OpMIPSSLLconst)
  2713  		v.AuxInt = c
  2714  		v.AddArg(x)
  2715  		return true
  2716  	}
  2717  	// match: (Lsh32x64 _ (Const64 [c]))
  2718  	// cond: uint32(c) >= 32
  2719  	// result: (MOVWconst [0])
  2720  	for {
  2721  		_ = v.Args[1]
  2722  		v_1 := v.Args[1]
  2723  		if v_1.Op != OpConst64 {
  2724  			break
  2725  		}
  2726  		c := v_1.AuxInt
  2727  		if !(uint32(c) >= 32) {
  2728  			break
  2729  		}
  2730  		v.reset(OpMIPSMOVWconst)
  2731  		v.AuxInt = 0
  2732  		return true
  2733  	}
  2734  	return false
  2735  }
  2736  func rewriteValueMIPS_OpLsh32x8_0(v *Value) bool {
  2737  	b := v.Block
  2738  	_ = b
  2739  	typ := &b.Func.Config.Types
  2740  	_ = typ
  2741  	// match: (Lsh32x8 <t> x y)
  2742  	// cond:
  2743  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2744  	for {
  2745  		t := v.Type
  2746  		_ = v.Args[1]
  2747  		x := v.Args[0]
  2748  		y := v.Args[1]
  2749  		v.reset(OpMIPSCMOVZ)
  2750  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2751  		v0.AddArg(x)
  2752  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2753  		v1.AddArg(y)
  2754  		v0.AddArg(v1)
  2755  		v.AddArg(v0)
  2756  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2757  		v2.AuxInt = 0
  2758  		v.AddArg(v2)
  2759  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2760  		v3.AuxInt = 32
  2761  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2762  		v4.AddArg(y)
  2763  		v3.AddArg(v4)
  2764  		v.AddArg(v3)
  2765  		return true
  2766  	}
  2767  }
  2768  func rewriteValueMIPS_OpLsh8x16_0(v *Value) bool {
  2769  	b := v.Block
  2770  	_ = b
  2771  	typ := &b.Func.Config.Types
  2772  	_ = typ
  2773  	// match: (Lsh8x16 <t> x y)
  2774  	// cond:
  2775  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2776  	for {
  2777  		t := v.Type
  2778  		_ = v.Args[1]
  2779  		x := v.Args[0]
  2780  		y := v.Args[1]
  2781  		v.reset(OpMIPSCMOVZ)
  2782  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2783  		v0.AddArg(x)
  2784  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2785  		v1.AddArg(y)
  2786  		v0.AddArg(v1)
  2787  		v.AddArg(v0)
  2788  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2789  		v2.AuxInt = 0
  2790  		v.AddArg(v2)
  2791  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2792  		v3.AuxInt = 32
  2793  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2794  		v4.AddArg(y)
  2795  		v3.AddArg(v4)
  2796  		v.AddArg(v3)
  2797  		return true
  2798  	}
  2799  }
  2800  func rewriteValueMIPS_OpLsh8x32_0(v *Value) bool {
  2801  	b := v.Block
  2802  	_ = b
  2803  	typ := &b.Func.Config.Types
  2804  	_ = typ
  2805  	// match: (Lsh8x32 <t> x y)
  2806  	// cond:
  2807  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2808  	for {
  2809  		t := v.Type
  2810  		_ = v.Args[1]
  2811  		x := v.Args[0]
  2812  		y := v.Args[1]
  2813  		v.reset(OpMIPSCMOVZ)
  2814  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2815  		v0.AddArg(x)
  2816  		v0.AddArg(y)
  2817  		v.AddArg(v0)
  2818  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2819  		v1.AuxInt = 0
  2820  		v.AddArg(v1)
  2821  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2822  		v2.AuxInt = 32
  2823  		v2.AddArg(y)
  2824  		v.AddArg(v2)
  2825  		return true
  2826  	}
  2827  }
  2828  func rewriteValueMIPS_OpLsh8x64_0(v *Value) bool {
  2829  	// match: (Lsh8x64 x (Const64 [c]))
  2830  	// cond: uint32(c) < 8
  2831  	// result: (SLLconst x [c])
  2832  	for {
  2833  		_ = v.Args[1]
  2834  		x := v.Args[0]
  2835  		v_1 := v.Args[1]
  2836  		if v_1.Op != OpConst64 {
  2837  			break
  2838  		}
  2839  		c := v_1.AuxInt
  2840  		if !(uint32(c) < 8) {
  2841  			break
  2842  		}
  2843  		v.reset(OpMIPSSLLconst)
  2844  		v.AuxInt = c
  2845  		v.AddArg(x)
  2846  		return true
  2847  	}
  2848  	// match: (Lsh8x64 _ (Const64 [c]))
  2849  	// cond: uint32(c) >= 8
  2850  	// result: (MOVWconst [0])
  2851  	for {
  2852  		_ = v.Args[1]
  2853  		v_1 := v.Args[1]
  2854  		if v_1.Op != OpConst64 {
  2855  			break
  2856  		}
  2857  		c := v_1.AuxInt
  2858  		if !(uint32(c) >= 8) {
  2859  			break
  2860  		}
  2861  		v.reset(OpMIPSMOVWconst)
  2862  		v.AuxInt = 0
  2863  		return true
  2864  	}
  2865  	return false
  2866  }
  2867  func rewriteValueMIPS_OpLsh8x8_0(v *Value) bool {
  2868  	b := v.Block
  2869  	_ = b
  2870  	typ := &b.Func.Config.Types
  2871  	_ = typ
  2872  	// match: (Lsh8x8 <t> x y)
  2873  	// cond:
  2874  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2875  	for {
  2876  		t := v.Type
  2877  		_ = v.Args[1]
  2878  		x := v.Args[0]
  2879  		y := v.Args[1]
  2880  		v.reset(OpMIPSCMOVZ)
  2881  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2882  		v0.AddArg(x)
  2883  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2884  		v1.AddArg(y)
  2885  		v0.AddArg(v1)
  2886  		v.AddArg(v0)
  2887  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2888  		v2.AuxInt = 0
  2889  		v.AddArg(v2)
  2890  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2891  		v3.AuxInt = 32
  2892  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2893  		v4.AddArg(y)
  2894  		v3.AddArg(v4)
  2895  		v.AddArg(v3)
  2896  		return true
  2897  	}
  2898  }
  2899  func rewriteValueMIPS_OpMIPSADD_0(v *Value) bool {
  2900  	// match: (ADD x (MOVWconst [c]))
  2901  	// cond:
  2902  	// result: (ADDconst [c] x)
  2903  	for {
  2904  		_ = v.Args[1]
  2905  		x := v.Args[0]
  2906  		v_1 := v.Args[1]
  2907  		if v_1.Op != OpMIPSMOVWconst {
  2908  			break
  2909  		}
  2910  		c := v_1.AuxInt
  2911  		v.reset(OpMIPSADDconst)
  2912  		v.AuxInt = c
  2913  		v.AddArg(x)
  2914  		return true
  2915  	}
  2916  	// match: (ADD (MOVWconst [c]) x)
  2917  	// cond:
  2918  	// result: (ADDconst [c] x)
  2919  	for {
  2920  		_ = v.Args[1]
  2921  		v_0 := v.Args[0]
  2922  		if v_0.Op != OpMIPSMOVWconst {
  2923  			break
  2924  		}
  2925  		c := v_0.AuxInt
  2926  		x := v.Args[1]
  2927  		v.reset(OpMIPSADDconst)
  2928  		v.AuxInt = c
  2929  		v.AddArg(x)
  2930  		return true
  2931  	}
  2932  	// match: (ADD x (NEG y))
  2933  	// cond:
  2934  	// result: (SUB x y)
  2935  	for {
  2936  		_ = v.Args[1]
  2937  		x := v.Args[0]
  2938  		v_1 := v.Args[1]
  2939  		if v_1.Op != OpMIPSNEG {
  2940  			break
  2941  		}
  2942  		y := v_1.Args[0]
  2943  		v.reset(OpMIPSSUB)
  2944  		v.AddArg(x)
  2945  		v.AddArg(y)
  2946  		return true
  2947  	}
  2948  	// match: (ADD (NEG y) x)
  2949  	// cond:
  2950  	// result: (SUB x y)
  2951  	for {
  2952  		_ = v.Args[1]
  2953  		v_0 := v.Args[0]
  2954  		if v_0.Op != OpMIPSNEG {
  2955  			break
  2956  		}
  2957  		y := v_0.Args[0]
  2958  		x := v.Args[1]
  2959  		v.reset(OpMIPSSUB)
  2960  		v.AddArg(x)
  2961  		v.AddArg(y)
  2962  		return true
  2963  	}
  2964  	return false
  2965  }
  2966  func rewriteValueMIPS_OpMIPSADDconst_0(v *Value) bool {
  2967  	// match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr))
  2968  	// cond:
  2969  	// result: (MOVWaddr [off1+off2] {sym} ptr)
  2970  	for {
  2971  		off1 := v.AuxInt
  2972  		v_0 := v.Args[0]
  2973  		if v_0.Op != OpMIPSMOVWaddr {
  2974  			break
  2975  		}
  2976  		off2 := v_0.AuxInt
  2977  		sym := v_0.Aux
  2978  		ptr := v_0.Args[0]
  2979  		v.reset(OpMIPSMOVWaddr)
  2980  		v.AuxInt = off1 + off2
  2981  		v.Aux = sym
  2982  		v.AddArg(ptr)
  2983  		return true
  2984  	}
  2985  	// match: (ADDconst [0] x)
  2986  	// cond:
  2987  	// result: x
  2988  	for {
  2989  		if v.AuxInt != 0 {
  2990  			break
  2991  		}
  2992  		x := v.Args[0]
  2993  		v.reset(OpCopy)
  2994  		v.Type = x.Type
  2995  		v.AddArg(x)
  2996  		return true
  2997  	}
  2998  	// match: (ADDconst [c] (MOVWconst [d]))
  2999  	// cond:
  3000  	// result: (MOVWconst [int64(int32(c+d))])
  3001  	for {
  3002  		c := v.AuxInt
  3003  		v_0 := v.Args[0]
  3004  		if v_0.Op != OpMIPSMOVWconst {
  3005  			break
  3006  		}
  3007  		d := v_0.AuxInt
  3008  		v.reset(OpMIPSMOVWconst)
  3009  		v.AuxInt = int64(int32(c + d))
  3010  		return true
  3011  	}
  3012  	// match: (ADDconst [c] (ADDconst [d] x))
  3013  	// cond:
  3014  	// result: (ADDconst [int64(int32(c+d))] x)
  3015  	for {
  3016  		c := v.AuxInt
  3017  		v_0 := v.Args[0]
  3018  		if v_0.Op != OpMIPSADDconst {
  3019  			break
  3020  		}
  3021  		d := v_0.AuxInt
  3022  		x := v_0.Args[0]
  3023  		v.reset(OpMIPSADDconst)
  3024  		v.AuxInt = int64(int32(c + d))
  3025  		v.AddArg(x)
  3026  		return true
  3027  	}
  3028  	// match: (ADDconst [c] (SUBconst [d] x))
  3029  	// cond:
  3030  	// result: (ADDconst [int64(int32(c-d))] x)
  3031  	for {
  3032  		c := v.AuxInt
  3033  		v_0 := v.Args[0]
  3034  		if v_0.Op != OpMIPSSUBconst {
  3035  			break
  3036  		}
  3037  		d := v_0.AuxInt
  3038  		x := v_0.Args[0]
  3039  		v.reset(OpMIPSADDconst)
  3040  		v.AuxInt = int64(int32(c - d))
  3041  		v.AddArg(x)
  3042  		return true
  3043  	}
  3044  	return false
  3045  }
  3046  func rewriteValueMIPS_OpMIPSAND_0(v *Value) bool {
  3047  	b := v.Block
  3048  	_ = b
  3049  	// match: (AND x (MOVWconst [c]))
  3050  	// cond:
  3051  	// result: (ANDconst [c] x)
  3052  	for {
  3053  		_ = v.Args[1]
  3054  		x := v.Args[0]
  3055  		v_1 := v.Args[1]
  3056  		if v_1.Op != OpMIPSMOVWconst {
  3057  			break
  3058  		}
  3059  		c := v_1.AuxInt
  3060  		v.reset(OpMIPSANDconst)
  3061  		v.AuxInt = c
  3062  		v.AddArg(x)
  3063  		return true
  3064  	}
  3065  	// match: (AND (MOVWconst [c]) x)
  3066  	// cond:
  3067  	// result: (ANDconst [c] x)
  3068  	for {
  3069  		_ = v.Args[1]
  3070  		v_0 := v.Args[0]
  3071  		if v_0.Op != OpMIPSMOVWconst {
  3072  			break
  3073  		}
  3074  		c := v_0.AuxInt
  3075  		x := v.Args[1]
  3076  		v.reset(OpMIPSANDconst)
  3077  		v.AuxInt = c
  3078  		v.AddArg(x)
  3079  		return true
  3080  	}
  3081  	// match: (AND x x)
  3082  	// cond:
  3083  	// result: x
  3084  	for {
  3085  		_ = v.Args[1]
  3086  		x := v.Args[0]
  3087  		if x != v.Args[1] {
  3088  			break
  3089  		}
  3090  		v.reset(OpCopy)
  3091  		v.Type = x.Type
  3092  		v.AddArg(x)
  3093  		return true
  3094  	}
  3095  	// match: (AND (SGTUconst [1] x) (SGTUconst [1] y))
  3096  	// cond:
  3097  	// result: (SGTUconst [1] (OR <x.Type> x y))
  3098  	for {
  3099  		_ = v.Args[1]
  3100  		v_0 := v.Args[0]
  3101  		if v_0.Op != OpMIPSSGTUconst {
  3102  			break
  3103  		}
  3104  		if v_0.AuxInt != 1 {
  3105  			break
  3106  		}
  3107  		x := v_0.Args[0]
  3108  		v_1 := v.Args[1]
  3109  		if v_1.Op != OpMIPSSGTUconst {
  3110  			break
  3111  		}
  3112  		if v_1.AuxInt != 1 {
  3113  			break
  3114  		}
  3115  		y := v_1.Args[0]
  3116  		v.reset(OpMIPSSGTUconst)
  3117  		v.AuxInt = 1
  3118  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  3119  		v0.AddArg(x)
  3120  		v0.AddArg(y)
  3121  		v.AddArg(v0)
  3122  		return true
  3123  	}
  3124  	// match: (AND (SGTUconst [1] y) (SGTUconst [1] x))
  3125  	// cond:
  3126  	// result: (SGTUconst [1] (OR <x.Type> x y))
  3127  	for {
  3128  		_ = v.Args[1]
  3129  		v_0 := v.Args[0]
  3130  		if v_0.Op != OpMIPSSGTUconst {
  3131  			break
  3132  		}
  3133  		if v_0.AuxInt != 1 {
  3134  			break
  3135  		}
  3136  		y := v_0.Args[0]
  3137  		v_1 := v.Args[1]
  3138  		if v_1.Op != OpMIPSSGTUconst {
  3139  			break
  3140  		}
  3141  		if v_1.AuxInt != 1 {
  3142  			break
  3143  		}
  3144  		x := v_1.Args[0]
  3145  		v.reset(OpMIPSSGTUconst)
  3146  		v.AuxInt = 1
  3147  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  3148  		v0.AddArg(x)
  3149  		v0.AddArg(y)
  3150  		v.AddArg(v0)
  3151  		return true
  3152  	}
  3153  	return false
  3154  }
  3155  func rewriteValueMIPS_OpMIPSANDconst_0(v *Value) bool {
  3156  	// match: (ANDconst [0] _)
  3157  	// cond:
  3158  	// result: (MOVWconst [0])
  3159  	for {
  3160  		if v.AuxInt != 0 {
  3161  			break
  3162  		}
  3163  		v.reset(OpMIPSMOVWconst)
  3164  		v.AuxInt = 0
  3165  		return true
  3166  	}
  3167  	// match: (ANDconst [-1] x)
  3168  	// cond:
  3169  	// result: x
  3170  	for {
  3171  		if v.AuxInt != -1 {
  3172  			break
  3173  		}
  3174  		x := v.Args[0]
  3175  		v.reset(OpCopy)
  3176  		v.Type = x.Type
  3177  		v.AddArg(x)
  3178  		return true
  3179  	}
  3180  	// match: (ANDconst [c] (MOVWconst [d]))
  3181  	// cond:
  3182  	// result: (MOVWconst [c&d])
  3183  	for {
  3184  		c := v.AuxInt
  3185  		v_0 := v.Args[0]
  3186  		if v_0.Op != OpMIPSMOVWconst {
  3187  			break
  3188  		}
  3189  		d := v_0.AuxInt
  3190  		v.reset(OpMIPSMOVWconst)
  3191  		v.AuxInt = c & d
  3192  		return true
  3193  	}
  3194  	// match: (ANDconst [c] (ANDconst [d] x))
  3195  	// cond:
  3196  	// result: (ANDconst [c&d] x)
  3197  	for {
  3198  		c := v.AuxInt
  3199  		v_0 := v.Args[0]
  3200  		if v_0.Op != OpMIPSANDconst {
  3201  			break
  3202  		}
  3203  		d := v_0.AuxInt
  3204  		x := v_0.Args[0]
  3205  		v.reset(OpMIPSANDconst)
  3206  		v.AuxInt = c & d
  3207  		v.AddArg(x)
  3208  		return true
  3209  	}
  3210  	return false
  3211  }
  3212  func rewriteValueMIPS_OpMIPSCMOVZ_0(v *Value) bool {
  3213  	b := v.Block
  3214  	_ = b
  3215  	// match: (CMOVZ _ b (MOVWconst [0]))
  3216  	// cond:
  3217  	// result: b
  3218  	for {
  3219  		_ = v.Args[2]
  3220  		b := v.Args[1]
  3221  		v_2 := v.Args[2]
  3222  		if v_2.Op != OpMIPSMOVWconst {
  3223  			break
  3224  		}
  3225  		if v_2.AuxInt != 0 {
  3226  			break
  3227  		}
  3228  		v.reset(OpCopy)
  3229  		v.Type = b.Type
  3230  		v.AddArg(b)
  3231  		return true
  3232  	}
  3233  	// match: (CMOVZ a _ (MOVWconst [c]))
  3234  	// cond: c!=0
  3235  	// result: a
  3236  	for {
  3237  		_ = v.Args[2]
  3238  		a := v.Args[0]
  3239  		v_2 := v.Args[2]
  3240  		if v_2.Op != OpMIPSMOVWconst {
  3241  			break
  3242  		}
  3243  		c := v_2.AuxInt
  3244  		if !(c != 0) {
  3245  			break
  3246  		}
  3247  		v.reset(OpCopy)
  3248  		v.Type = a.Type
  3249  		v.AddArg(a)
  3250  		return true
  3251  	}
  3252  	// match: (CMOVZ a (MOVWconst [0]) c)
  3253  	// cond:
  3254  	// result: (CMOVZzero a c)
  3255  	for {
  3256  		_ = v.Args[2]
  3257  		a := v.Args[0]
  3258  		v_1 := v.Args[1]
  3259  		if v_1.Op != OpMIPSMOVWconst {
  3260  			break
  3261  		}
  3262  		if v_1.AuxInt != 0 {
  3263  			break
  3264  		}
  3265  		c := v.Args[2]
  3266  		v.reset(OpMIPSCMOVZzero)
  3267  		v.AddArg(a)
  3268  		v.AddArg(c)
  3269  		return true
  3270  	}
  3271  	return false
  3272  }
  3273  func rewriteValueMIPS_OpMIPSCMOVZzero_0(v *Value) bool {
  3274  	// match: (CMOVZzero _ (MOVWconst [0]))
  3275  	// cond:
  3276  	// result: (MOVWconst [0])
  3277  	for {
  3278  		_ = v.Args[1]
  3279  		v_1 := v.Args[1]
  3280  		if v_1.Op != OpMIPSMOVWconst {
  3281  			break
  3282  		}
  3283  		if v_1.AuxInt != 0 {
  3284  			break
  3285  		}
  3286  		v.reset(OpMIPSMOVWconst)
  3287  		v.AuxInt = 0
  3288  		return true
  3289  	}
  3290  	// match: (CMOVZzero a (MOVWconst [c]))
  3291  	// cond: c!=0
  3292  	// result: a
  3293  	for {
  3294  		_ = v.Args[1]
  3295  		a := v.Args[0]
  3296  		v_1 := v.Args[1]
  3297  		if v_1.Op != OpMIPSMOVWconst {
  3298  			break
  3299  		}
  3300  		c := v_1.AuxInt
  3301  		if !(c != 0) {
  3302  			break
  3303  		}
  3304  		v.reset(OpCopy)
  3305  		v.Type = a.Type
  3306  		v.AddArg(a)
  3307  		return true
  3308  	}
  3309  	return false
  3310  }
  3311  func rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v *Value) bool {
  3312  	// match: (LoweredAtomicAdd ptr (MOVWconst [c]) mem)
  3313  	// cond: is16Bit(c)
  3314  	// result: (LoweredAtomicAddconst [c] ptr mem)
  3315  	for {
  3316  		_ = v.Args[2]
  3317  		ptr := v.Args[0]
  3318  		v_1 := v.Args[1]
  3319  		if v_1.Op != OpMIPSMOVWconst {
  3320  			break
  3321  		}
  3322  		c := v_1.AuxInt
  3323  		mem := v.Args[2]
  3324  		if !(is16Bit(c)) {
  3325  			break
  3326  		}
  3327  		v.reset(OpMIPSLoweredAtomicAddconst)
  3328  		v.AuxInt = c
  3329  		v.AddArg(ptr)
  3330  		v.AddArg(mem)
  3331  		return true
  3332  	}
  3333  	return false
  3334  }
  3335  func rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v *Value) bool {
  3336  	// match: (LoweredAtomicStore ptr (MOVWconst [0]) mem)
  3337  	// cond:
  3338  	// result: (LoweredAtomicStorezero ptr mem)
  3339  	for {
  3340  		_ = v.Args[2]
  3341  		ptr := v.Args[0]
  3342  		v_1 := v.Args[1]
  3343  		if v_1.Op != OpMIPSMOVWconst {
  3344  			break
  3345  		}
  3346  		if v_1.AuxInt != 0 {
  3347  			break
  3348  		}
  3349  		mem := v.Args[2]
  3350  		v.reset(OpMIPSLoweredAtomicStorezero)
  3351  		v.AddArg(ptr)
  3352  		v.AddArg(mem)
  3353  		return true
  3354  	}
  3355  	return false
  3356  }
  3357  func rewriteValueMIPS_OpMIPSMOVBUload_0(v *Value) bool {
  3358  	// match: (MOVBUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3359  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3360  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3361  	for {
  3362  		off1 := v.AuxInt
  3363  		sym := v.Aux
  3364  		_ = v.Args[1]
  3365  		x := v.Args[0]
  3366  		if x.Op != OpMIPSADDconst {
  3367  			break
  3368  		}
  3369  		off2 := x.AuxInt
  3370  		ptr := x.Args[0]
  3371  		mem := v.Args[1]
  3372  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3373  			break
  3374  		}
  3375  		v.reset(OpMIPSMOVBUload)
  3376  		v.AuxInt = off1 + off2
  3377  		v.Aux = sym
  3378  		v.AddArg(ptr)
  3379  		v.AddArg(mem)
  3380  		return true
  3381  	}
  3382  	// match: (MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3383  	// cond: canMergeSym(sym1,sym2)
  3384  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3385  	for {
  3386  		off1 := v.AuxInt
  3387  		sym1 := v.Aux
  3388  		_ = v.Args[1]
  3389  		v_0 := v.Args[0]
  3390  		if v_0.Op != OpMIPSMOVWaddr {
  3391  			break
  3392  		}
  3393  		off2 := v_0.AuxInt
  3394  		sym2 := v_0.Aux
  3395  		ptr := v_0.Args[0]
  3396  		mem := v.Args[1]
  3397  		if !(canMergeSym(sym1, sym2)) {
  3398  			break
  3399  		}
  3400  		v.reset(OpMIPSMOVBUload)
  3401  		v.AuxInt = off1 + off2
  3402  		v.Aux = mergeSym(sym1, sym2)
  3403  		v.AddArg(ptr)
  3404  		v.AddArg(mem)
  3405  		return true
  3406  	}
  3407  	// match: (MOVBUload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3408  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3409  	// result: (MOVBUreg x)
  3410  	for {
  3411  		off := v.AuxInt
  3412  		sym := v.Aux
  3413  		_ = v.Args[1]
  3414  		ptr := v.Args[0]
  3415  		v_1 := v.Args[1]
  3416  		if v_1.Op != OpMIPSMOVBstore {
  3417  			break
  3418  		}
  3419  		off2 := v_1.AuxInt
  3420  		sym2 := v_1.Aux
  3421  		_ = v_1.Args[2]
  3422  		ptr2 := v_1.Args[0]
  3423  		x := v_1.Args[1]
  3424  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3425  			break
  3426  		}
  3427  		v.reset(OpMIPSMOVBUreg)
  3428  		v.AddArg(x)
  3429  		return true
  3430  	}
  3431  	return false
  3432  }
  3433  func rewriteValueMIPS_OpMIPSMOVBUreg_0(v *Value) bool {
  3434  	b := v.Block
  3435  	_ = b
  3436  	// match: (MOVBUreg x:(MOVBUload _ _))
  3437  	// cond:
  3438  	// result: (MOVWreg x)
  3439  	for {
  3440  		x := v.Args[0]
  3441  		if x.Op != OpMIPSMOVBUload {
  3442  			break
  3443  		}
  3444  		_ = x.Args[1]
  3445  		v.reset(OpMIPSMOVWreg)
  3446  		v.AddArg(x)
  3447  		return true
  3448  	}
  3449  	// match: (MOVBUreg x:(MOVBUreg _))
  3450  	// cond:
  3451  	// result: (MOVWreg x)
  3452  	for {
  3453  		x := v.Args[0]
  3454  		if x.Op != OpMIPSMOVBUreg {
  3455  			break
  3456  		}
  3457  		v.reset(OpMIPSMOVWreg)
  3458  		v.AddArg(x)
  3459  		return true
  3460  	}
  3461  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  3462  	// cond: x.Uses == 1 && clobber(x)
  3463  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  3464  	for {
  3465  		t := v.Type
  3466  		x := v.Args[0]
  3467  		if x.Op != OpMIPSMOVBload {
  3468  			break
  3469  		}
  3470  		off := x.AuxInt
  3471  		sym := x.Aux
  3472  		_ = x.Args[1]
  3473  		ptr := x.Args[0]
  3474  		mem := x.Args[1]
  3475  		if !(x.Uses == 1 && clobber(x)) {
  3476  			break
  3477  		}
  3478  		b = x.Block
  3479  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, t)
  3480  		v.reset(OpCopy)
  3481  		v.AddArg(v0)
  3482  		v0.AuxInt = off
  3483  		v0.Aux = sym
  3484  		v0.AddArg(ptr)
  3485  		v0.AddArg(mem)
  3486  		return true
  3487  	}
  3488  	// match: (MOVBUreg (ANDconst [c] x))
  3489  	// cond:
  3490  	// result: (ANDconst [c&0xff] x)
  3491  	for {
  3492  		v_0 := v.Args[0]
  3493  		if v_0.Op != OpMIPSANDconst {
  3494  			break
  3495  		}
  3496  		c := v_0.AuxInt
  3497  		x := v_0.Args[0]
  3498  		v.reset(OpMIPSANDconst)
  3499  		v.AuxInt = c & 0xff
  3500  		v.AddArg(x)
  3501  		return true
  3502  	}
  3503  	// match: (MOVBUreg (MOVWconst [c]))
  3504  	// cond:
  3505  	// result: (MOVWconst [int64(uint8(c))])
  3506  	for {
  3507  		v_0 := v.Args[0]
  3508  		if v_0.Op != OpMIPSMOVWconst {
  3509  			break
  3510  		}
  3511  		c := v_0.AuxInt
  3512  		v.reset(OpMIPSMOVWconst)
  3513  		v.AuxInt = int64(uint8(c))
  3514  		return true
  3515  	}
  3516  	return false
  3517  }
  3518  func rewriteValueMIPS_OpMIPSMOVBload_0(v *Value) bool {
  3519  	// match: (MOVBload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3520  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3521  	// result: (MOVBload [off1+off2] {sym} ptr mem)
  3522  	for {
  3523  		off1 := v.AuxInt
  3524  		sym := v.Aux
  3525  		_ = v.Args[1]
  3526  		x := v.Args[0]
  3527  		if x.Op != OpMIPSADDconst {
  3528  			break
  3529  		}
  3530  		off2 := x.AuxInt
  3531  		ptr := x.Args[0]
  3532  		mem := v.Args[1]
  3533  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3534  			break
  3535  		}
  3536  		v.reset(OpMIPSMOVBload)
  3537  		v.AuxInt = off1 + off2
  3538  		v.Aux = sym
  3539  		v.AddArg(ptr)
  3540  		v.AddArg(mem)
  3541  		return true
  3542  	}
  3543  	// match: (MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3544  	// cond: canMergeSym(sym1,sym2)
  3545  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3546  	for {
  3547  		off1 := v.AuxInt
  3548  		sym1 := v.Aux
  3549  		_ = v.Args[1]
  3550  		v_0 := v.Args[0]
  3551  		if v_0.Op != OpMIPSMOVWaddr {
  3552  			break
  3553  		}
  3554  		off2 := v_0.AuxInt
  3555  		sym2 := v_0.Aux
  3556  		ptr := v_0.Args[0]
  3557  		mem := v.Args[1]
  3558  		if !(canMergeSym(sym1, sym2)) {
  3559  			break
  3560  		}
  3561  		v.reset(OpMIPSMOVBload)
  3562  		v.AuxInt = off1 + off2
  3563  		v.Aux = mergeSym(sym1, sym2)
  3564  		v.AddArg(ptr)
  3565  		v.AddArg(mem)
  3566  		return true
  3567  	}
  3568  	// match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3569  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3570  	// result: (MOVBreg x)
  3571  	for {
  3572  		off := v.AuxInt
  3573  		sym := v.Aux
  3574  		_ = v.Args[1]
  3575  		ptr := v.Args[0]
  3576  		v_1 := v.Args[1]
  3577  		if v_1.Op != OpMIPSMOVBstore {
  3578  			break
  3579  		}
  3580  		off2 := v_1.AuxInt
  3581  		sym2 := v_1.Aux
  3582  		_ = v_1.Args[2]
  3583  		ptr2 := v_1.Args[0]
  3584  		x := v_1.Args[1]
  3585  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3586  			break
  3587  		}
  3588  		v.reset(OpMIPSMOVBreg)
  3589  		v.AddArg(x)
  3590  		return true
  3591  	}
  3592  	return false
  3593  }
  3594  func rewriteValueMIPS_OpMIPSMOVBreg_0(v *Value) bool {
  3595  	b := v.Block
  3596  	_ = b
  3597  	// match: (MOVBreg x:(MOVBload _ _))
  3598  	// cond:
  3599  	// result: (MOVWreg x)
  3600  	for {
  3601  		x := v.Args[0]
  3602  		if x.Op != OpMIPSMOVBload {
  3603  			break
  3604  		}
  3605  		_ = x.Args[1]
  3606  		v.reset(OpMIPSMOVWreg)
  3607  		v.AddArg(x)
  3608  		return true
  3609  	}
  3610  	// match: (MOVBreg x:(MOVBreg _))
  3611  	// cond:
  3612  	// result: (MOVWreg x)
  3613  	for {
  3614  		x := v.Args[0]
  3615  		if x.Op != OpMIPSMOVBreg {
  3616  			break
  3617  		}
  3618  		v.reset(OpMIPSMOVWreg)
  3619  		v.AddArg(x)
  3620  		return true
  3621  	}
  3622  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  3623  	// cond: x.Uses == 1 && clobber(x)
  3624  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  3625  	for {
  3626  		t := v.Type
  3627  		x := v.Args[0]
  3628  		if x.Op != OpMIPSMOVBUload {
  3629  			break
  3630  		}
  3631  		off := x.AuxInt
  3632  		sym := x.Aux
  3633  		_ = x.Args[1]
  3634  		ptr := x.Args[0]
  3635  		mem := x.Args[1]
  3636  		if !(x.Uses == 1 && clobber(x)) {
  3637  			break
  3638  		}
  3639  		b = x.Block
  3640  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBload, t)
  3641  		v.reset(OpCopy)
  3642  		v.AddArg(v0)
  3643  		v0.AuxInt = off
  3644  		v0.Aux = sym
  3645  		v0.AddArg(ptr)
  3646  		v0.AddArg(mem)
  3647  		return true
  3648  	}
  3649  	// match: (MOVBreg (ANDconst [c] x))
  3650  	// cond: c & 0x80 == 0
  3651  	// result: (ANDconst [c&0x7f] x)
  3652  	for {
  3653  		v_0 := v.Args[0]
  3654  		if v_0.Op != OpMIPSANDconst {
  3655  			break
  3656  		}
  3657  		c := v_0.AuxInt
  3658  		x := v_0.Args[0]
  3659  		if !(c&0x80 == 0) {
  3660  			break
  3661  		}
  3662  		v.reset(OpMIPSANDconst)
  3663  		v.AuxInt = c & 0x7f
  3664  		v.AddArg(x)
  3665  		return true
  3666  	}
  3667  	// match: (MOVBreg (MOVWconst [c]))
  3668  	// cond:
  3669  	// result: (MOVWconst [int64(int8(c))])
  3670  	for {
  3671  		v_0 := v.Args[0]
  3672  		if v_0.Op != OpMIPSMOVWconst {
  3673  			break
  3674  		}
  3675  		c := v_0.AuxInt
  3676  		v.reset(OpMIPSMOVWconst)
  3677  		v.AuxInt = int64(int8(c))
  3678  		return true
  3679  	}
  3680  	return false
  3681  }
  3682  func rewriteValueMIPS_OpMIPSMOVBstore_0(v *Value) bool {
  3683  	// match: (MOVBstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3684  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3685  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3686  	for {
  3687  		off1 := v.AuxInt
  3688  		sym := v.Aux
  3689  		_ = v.Args[2]
  3690  		x := v.Args[0]
  3691  		if x.Op != OpMIPSADDconst {
  3692  			break
  3693  		}
  3694  		off2 := x.AuxInt
  3695  		ptr := x.Args[0]
  3696  		val := v.Args[1]
  3697  		mem := v.Args[2]
  3698  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3699  			break
  3700  		}
  3701  		v.reset(OpMIPSMOVBstore)
  3702  		v.AuxInt = off1 + off2
  3703  		v.Aux = sym
  3704  		v.AddArg(ptr)
  3705  		v.AddArg(val)
  3706  		v.AddArg(mem)
  3707  		return true
  3708  	}
  3709  	// match: (MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3710  	// cond: canMergeSym(sym1,sym2)
  3711  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3712  	for {
  3713  		off1 := v.AuxInt
  3714  		sym1 := v.Aux
  3715  		_ = v.Args[2]
  3716  		v_0 := v.Args[0]
  3717  		if v_0.Op != OpMIPSMOVWaddr {
  3718  			break
  3719  		}
  3720  		off2 := v_0.AuxInt
  3721  		sym2 := v_0.Aux
  3722  		ptr := v_0.Args[0]
  3723  		val := v.Args[1]
  3724  		mem := v.Args[2]
  3725  		if !(canMergeSym(sym1, sym2)) {
  3726  			break
  3727  		}
  3728  		v.reset(OpMIPSMOVBstore)
  3729  		v.AuxInt = off1 + off2
  3730  		v.Aux = mergeSym(sym1, sym2)
  3731  		v.AddArg(ptr)
  3732  		v.AddArg(val)
  3733  		v.AddArg(mem)
  3734  		return true
  3735  	}
  3736  	// match: (MOVBstore [off] {sym} ptr (MOVWconst [0]) mem)
  3737  	// cond:
  3738  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3739  	for {
  3740  		off := v.AuxInt
  3741  		sym := v.Aux
  3742  		_ = v.Args[2]
  3743  		ptr := v.Args[0]
  3744  		v_1 := v.Args[1]
  3745  		if v_1.Op != OpMIPSMOVWconst {
  3746  			break
  3747  		}
  3748  		if v_1.AuxInt != 0 {
  3749  			break
  3750  		}
  3751  		mem := v.Args[2]
  3752  		v.reset(OpMIPSMOVBstorezero)
  3753  		v.AuxInt = off
  3754  		v.Aux = sym
  3755  		v.AddArg(ptr)
  3756  		v.AddArg(mem)
  3757  		return true
  3758  	}
  3759  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3760  	// cond:
  3761  	// result: (MOVBstore [off] {sym} ptr x mem)
  3762  	for {
  3763  		off := v.AuxInt
  3764  		sym := v.Aux
  3765  		_ = v.Args[2]
  3766  		ptr := v.Args[0]
  3767  		v_1 := v.Args[1]
  3768  		if v_1.Op != OpMIPSMOVBreg {
  3769  			break
  3770  		}
  3771  		x := v_1.Args[0]
  3772  		mem := v.Args[2]
  3773  		v.reset(OpMIPSMOVBstore)
  3774  		v.AuxInt = off
  3775  		v.Aux = sym
  3776  		v.AddArg(ptr)
  3777  		v.AddArg(x)
  3778  		v.AddArg(mem)
  3779  		return true
  3780  	}
  3781  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3782  	// cond:
  3783  	// result: (MOVBstore [off] {sym} ptr x mem)
  3784  	for {
  3785  		off := v.AuxInt
  3786  		sym := v.Aux
  3787  		_ = v.Args[2]
  3788  		ptr := v.Args[0]
  3789  		v_1 := v.Args[1]
  3790  		if v_1.Op != OpMIPSMOVBUreg {
  3791  			break
  3792  		}
  3793  		x := v_1.Args[0]
  3794  		mem := v.Args[2]
  3795  		v.reset(OpMIPSMOVBstore)
  3796  		v.AuxInt = off
  3797  		v.Aux = sym
  3798  		v.AddArg(ptr)
  3799  		v.AddArg(x)
  3800  		v.AddArg(mem)
  3801  		return true
  3802  	}
  3803  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3804  	// cond:
  3805  	// result: (MOVBstore [off] {sym} ptr x mem)
  3806  	for {
  3807  		off := v.AuxInt
  3808  		sym := v.Aux
  3809  		_ = v.Args[2]
  3810  		ptr := v.Args[0]
  3811  		v_1 := v.Args[1]
  3812  		if v_1.Op != OpMIPSMOVHreg {
  3813  			break
  3814  		}
  3815  		x := v_1.Args[0]
  3816  		mem := v.Args[2]
  3817  		v.reset(OpMIPSMOVBstore)
  3818  		v.AuxInt = off
  3819  		v.Aux = sym
  3820  		v.AddArg(ptr)
  3821  		v.AddArg(x)
  3822  		v.AddArg(mem)
  3823  		return true
  3824  	}
  3825  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3826  	// cond:
  3827  	// result: (MOVBstore [off] {sym} ptr x mem)
  3828  	for {
  3829  		off := v.AuxInt
  3830  		sym := v.Aux
  3831  		_ = v.Args[2]
  3832  		ptr := v.Args[0]
  3833  		v_1 := v.Args[1]
  3834  		if v_1.Op != OpMIPSMOVHUreg {
  3835  			break
  3836  		}
  3837  		x := v_1.Args[0]
  3838  		mem := v.Args[2]
  3839  		v.reset(OpMIPSMOVBstore)
  3840  		v.AuxInt = off
  3841  		v.Aux = sym
  3842  		v.AddArg(ptr)
  3843  		v.AddArg(x)
  3844  		v.AddArg(mem)
  3845  		return true
  3846  	}
  3847  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3848  	// cond:
  3849  	// result: (MOVBstore [off] {sym} ptr x mem)
  3850  	for {
  3851  		off := v.AuxInt
  3852  		sym := v.Aux
  3853  		_ = v.Args[2]
  3854  		ptr := v.Args[0]
  3855  		v_1 := v.Args[1]
  3856  		if v_1.Op != OpMIPSMOVWreg {
  3857  			break
  3858  		}
  3859  		x := v_1.Args[0]
  3860  		mem := v.Args[2]
  3861  		v.reset(OpMIPSMOVBstore)
  3862  		v.AuxInt = off
  3863  		v.Aux = sym
  3864  		v.AddArg(ptr)
  3865  		v.AddArg(x)
  3866  		v.AddArg(mem)
  3867  		return true
  3868  	}
  3869  	return false
  3870  }
  3871  func rewriteValueMIPS_OpMIPSMOVBstorezero_0(v *Value) bool {
  3872  	// match: (MOVBstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3873  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3874  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3875  	for {
  3876  		off1 := v.AuxInt
  3877  		sym := v.Aux
  3878  		_ = v.Args[1]
  3879  		x := v.Args[0]
  3880  		if x.Op != OpMIPSADDconst {
  3881  			break
  3882  		}
  3883  		off2 := x.AuxInt
  3884  		ptr := x.Args[0]
  3885  		mem := v.Args[1]
  3886  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3887  			break
  3888  		}
  3889  		v.reset(OpMIPSMOVBstorezero)
  3890  		v.AuxInt = off1 + off2
  3891  		v.Aux = sym
  3892  		v.AddArg(ptr)
  3893  		v.AddArg(mem)
  3894  		return true
  3895  	}
  3896  	// match: (MOVBstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3897  	// cond: canMergeSym(sym1,sym2)
  3898  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3899  	for {
  3900  		off1 := v.AuxInt
  3901  		sym1 := v.Aux
  3902  		_ = v.Args[1]
  3903  		v_0 := v.Args[0]
  3904  		if v_0.Op != OpMIPSMOVWaddr {
  3905  			break
  3906  		}
  3907  		off2 := v_0.AuxInt
  3908  		sym2 := v_0.Aux
  3909  		ptr := v_0.Args[0]
  3910  		mem := v.Args[1]
  3911  		if !(canMergeSym(sym1, sym2)) {
  3912  			break
  3913  		}
  3914  		v.reset(OpMIPSMOVBstorezero)
  3915  		v.AuxInt = off1 + off2
  3916  		v.Aux = mergeSym(sym1, sym2)
  3917  		v.AddArg(ptr)
  3918  		v.AddArg(mem)
  3919  		return true
  3920  	}
  3921  	return false
  3922  }
  3923  func rewriteValueMIPS_OpMIPSMOVDload_0(v *Value) bool {
  3924  	// match: (MOVDload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3925  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3926  	// result: (MOVDload [off1+off2] {sym} ptr mem)
  3927  	for {
  3928  		off1 := v.AuxInt
  3929  		sym := v.Aux
  3930  		_ = v.Args[1]
  3931  		x := v.Args[0]
  3932  		if x.Op != OpMIPSADDconst {
  3933  			break
  3934  		}
  3935  		off2 := x.AuxInt
  3936  		ptr := x.Args[0]
  3937  		mem := v.Args[1]
  3938  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3939  			break
  3940  		}
  3941  		v.reset(OpMIPSMOVDload)
  3942  		v.AuxInt = off1 + off2
  3943  		v.Aux = sym
  3944  		v.AddArg(ptr)
  3945  		v.AddArg(mem)
  3946  		return true
  3947  	}
  3948  	// match: (MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3949  	// cond: canMergeSym(sym1,sym2)
  3950  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3951  	for {
  3952  		off1 := v.AuxInt
  3953  		sym1 := v.Aux
  3954  		_ = v.Args[1]
  3955  		v_0 := v.Args[0]
  3956  		if v_0.Op != OpMIPSMOVWaddr {
  3957  			break
  3958  		}
  3959  		off2 := v_0.AuxInt
  3960  		sym2 := v_0.Aux
  3961  		ptr := v_0.Args[0]
  3962  		mem := v.Args[1]
  3963  		if !(canMergeSym(sym1, sym2)) {
  3964  			break
  3965  		}
  3966  		v.reset(OpMIPSMOVDload)
  3967  		v.AuxInt = off1 + off2
  3968  		v.Aux = mergeSym(sym1, sym2)
  3969  		v.AddArg(ptr)
  3970  		v.AddArg(mem)
  3971  		return true
  3972  	}
  3973  	// match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _))
  3974  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3975  	// result: x
  3976  	for {
  3977  		off := v.AuxInt
  3978  		sym := v.Aux
  3979  		_ = v.Args[1]
  3980  		ptr := v.Args[0]
  3981  		v_1 := v.Args[1]
  3982  		if v_1.Op != OpMIPSMOVDstore {
  3983  			break
  3984  		}
  3985  		off2 := v_1.AuxInt
  3986  		sym2 := v_1.Aux
  3987  		_ = v_1.Args[2]
  3988  		ptr2 := v_1.Args[0]
  3989  		x := v_1.Args[1]
  3990  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3991  			break
  3992  		}
  3993  		v.reset(OpCopy)
  3994  		v.Type = x.Type
  3995  		v.AddArg(x)
  3996  		return true
  3997  	}
  3998  	return false
  3999  }
  4000  func rewriteValueMIPS_OpMIPSMOVDstore_0(v *Value) bool {
  4001  	// match: (MOVDstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4002  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4003  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  4004  	for {
  4005  		off1 := v.AuxInt
  4006  		sym := v.Aux
  4007  		_ = v.Args[2]
  4008  		x := v.Args[0]
  4009  		if x.Op != OpMIPSADDconst {
  4010  			break
  4011  		}
  4012  		off2 := x.AuxInt
  4013  		ptr := x.Args[0]
  4014  		val := v.Args[1]
  4015  		mem := v.Args[2]
  4016  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4017  			break
  4018  		}
  4019  		v.reset(OpMIPSMOVDstore)
  4020  		v.AuxInt = off1 + off2
  4021  		v.Aux = sym
  4022  		v.AddArg(ptr)
  4023  		v.AddArg(val)
  4024  		v.AddArg(mem)
  4025  		return true
  4026  	}
  4027  	// match: (MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4028  	// cond: canMergeSym(sym1,sym2)
  4029  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4030  	for {
  4031  		off1 := v.AuxInt
  4032  		sym1 := v.Aux
  4033  		_ = v.Args[2]
  4034  		v_0 := v.Args[0]
  4035  		if v_0.Op != OpMIPSMOVWaddr {
  4036  			break
  4037  		}
  4038  		off2 := v_0.AuxInt
  4039  		sym2 := v_0.Aux
  4040  		ptr := v_0.Args[0]
  4041  		val := v.Args[1]
  4042  		mem := v.Args[2]
  4043  		if !(canMergeSym(sym1, sym2)) {
  4044  			break
  4045  		}
  4046  		v.reset(OpMIPSMOVDstore)
  4047  		v.AuxInt = off1 + off2
  4048  		v.Aux = mergeSym(sym1, sym2)
  4049  		v.AddArg(ptr)
  4050  		v.AddArg(val)
  4051  		v.AddArg(mem)
  4052  		return true
  4053  	}
  4054  	return false
  4055  }
  4056  func rewriteValueMIPS_OpMIPSMOVFload_0(v *Value) bool {
  4057  	// match: (MOVFload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4058  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4059  	// result: (MOVFload [off1+off2] {sym} ptr mem)
  4060  	for {
  4061  		off1 := v.AuxInt
  4062  		sym := v.Aux
  4063  		_ = v.Args[1]
  4064  		x := v.Args[0]
  4065  		if x.Op != OpMIPSADDconst {
  4066  			break
  4067  		}
  4068  		off2 := x.AuxInt
  4069  		ptr := x.Args[0]
  4070  		mem := v.Args[1]
  4071  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4072  			break
  4073  		}
  4074  		v.reset(OpMIPSMOVFload)
  4075  		v.AuxInt = off1 + off2
  4076  		v.Aux = sym
  4077  		v.AddArg(ptr)
  4078  		v.AddArg(mem)
  4079  		return true
  4080  	}
  4081  	// match: (MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4082  	// cond: canMergeSym(sym1,sym2)
  4083  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4084  	for {
  4085  		off1 := v.AuxInt
  4086  		sym1 := v.Aux
  4087  		_ = v.Args[1]
  4088  		v_0 := v.Args[0]
  4089  		if v_0.Op != OpMIPSMOVWaddr {
  4090  			break
  4091  		}
  4092  		off2 := v_0.AuxInt
  4093  		sym2 := v_0.Aux
  4094  		ptr := v_0.Args[0]
  4095  		mem := v.Args[1]
  4096  		if !(canMergeSym(sym1, sym2)) {
  4097  			break
  4098  		}
  4099  		v.reset(OpMIPSMOVFload)
  4100  		v.AuxInt = off1 + off2
  4101  		v.Aux = mergeSym(sym1, sym2)
  4102  		v.AddArg(ptr)
  4103  		v.AddArg(mem)
  4104  		return true
  4105  	}
  4106  	// match: (MOVFload [off] {sym} ptr (MOVFstore [off2] {sym2} ptr2 x _))
  4107  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4108  	// result: x
  4109  	for {
  4110  		off := v.AuxInt
  4111  		sym := v.Aux
  4112  		_ = v.Args[1]
  4113  		ptr := v.Args[0]
  4114  		v_1 := v.Args[1]
  4115  		if v_1.Op != OpMIPSMOVFstore {
  4116  			break
  4117  		}
  4118  		off2 := v_1.AuxInt
  4119  		sym2 := v_1.Aux
  4120  		_ = v_1.Args[2]
  4121  		ptr2 := v_1.Args[0]
  4122  		x := v_1.Args[1]
  4123  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4124  			break
  4125  		}
  4126  		v.reset(OpCopy)
  4127  		v.Type = x.Type
  4128  		v.AddArg(x)
  4129  		return true
  4130  	}
  4131  	return false
  4132  }
  4133  func rewriteValueMIPS_OpMIPSMOVFstore_0(v *Value) bool {
  4134  	// match: (MOVFstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4135  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4136  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  4137  	for {
  4138  		off1 := v.AuxInt
  4139  		sym := v.Aux
  4140  		_ = v.Args[2]
  4141  		x := v.Args[0]
  4142  		if x.Op != OpMIPSADDconst {
  4143  			break
  4144  		}
  4145  		off2 := x.AuxInt
  4146  		ptr := x.Args[0]
  4147  		val := v.Args[1]
  4148  		mem := v.Args[2]
  4149  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4150  			break
  4151  		}
  4152  		v.reset(OpMIPSMOVFstore)
  4153  		v.AuxInt = off1 + off2
  4154  		v.Aux = sym
  4155  		v.AddArg(ptr)
  4156  		v.AddArg(val)
  4157  		v.AddArg(mem)
  4158  		return true
  4159  	}
  4160  	// match: (MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4161  	// cond: canMergeSym(sym1,sym2)
  4162  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4163  	for {
  4164  		off1 := v.AuxInt
  4165  		sym1 := v.Aux
  4166  		_ = v.Args[2]
  4167  		v_0 := v.Args[0]
  4168  		if v_0.Op != OpMIPSMOVWaddr {
  4169  			break
  4170  		}
  4171  		off2 := v_0.AuxInt
  4172  		sym2 := v_0.Aux
  4173  		ptr := v_0.Args[0]
  4174  		val := v.Args[1]
  4175  		mem := v.Args[2]
  4176  		if !(canMergeSym(sym1, sym2)) {
  4177  			break
  4178  		}
  4179  		v.reset(OpMIPSMOVFstore)
  4180  		v.AuxInt = off1 + off2
  4181  		v.Aux = mergeSym(sym1, sym2)
  4182  		v.AddArg(ptr)
  4183  		v.AddArg(val)
  4184  		v.AddArg(mem)
  4185  		return true
  4186  	}
  4187  	return false
  4188  }
  4189  func rewriteValueMIPS_OpMIPSMOVHUload_0(v *Value) bool {
  4190  	// match: (MOVHUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4191  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4192  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4193  	for {
  4194  		off1 := v.AuxInt
  4195  		sym := v.Aux
  4196  		_ = v.Args[1]
  4197  		x := v.Args[0]
  4198  		if x.Op != OpMIPSADDconst {
  4199  			break
  4200  		}
  4201  		off2 := x.AuxInt
  4202  		ptr := x.Args[0]
  4203  		mem := v.Args[1]
  4204  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4205  			break
  4206  		}
  4207  		v.reset(OpMIPSMOVHUload)
  4208  		v.AuxInt = off1 + off2
  4209  		v.Aux = sym
  4210  		v.AddArg(ptr)
  4211  		v.AddArg(mem)
  4212  		return true
  4213  	}
  4214  	// match: (MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4215  	// cond: canMergeSym(sym1,sym2)
  4216  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4217  	for {
  4218  		off1 := v.AuxInt
  4219  		sym1 := v.Aux
  4220  		_ = v.Args[1]
  4221  		v_0 := v.Args[0]
  4222  		if v_0.Op != OpMIPSMOVWaddr {
  4223  			break
  4224  		}
  4225  		off2 := v_0.AuxInt
  4226  		sym2 := v_0.Aux
  4227  		ptr := v_0.Args[0]
  4228  		mem := v.Args[1]
  4229  		if !(canMergeSym(sym1, sym2)) {
  4230  			break
  4231  		}
  4232  		v.reset(OpMIPSMOVHUload)
  4233  		v.AuxInt = off1 + off2
  4234  		v.Aux = mergeSym(sym1, sym2)
  4235  		v.AddArg(ptr)
  4236  		v.AddArg(mem)
  4237  		return true
  4238  	}
  4239  	// match: (MOVHUload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4240  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4241  	// result: (MOVHUreg x)
  4242  	for {
  4243  		off := v.AuxInt
  4244  		sym := v.Aux
  4245  		_ = v.Args[1]
  4246  		ptr := v.Args[0]
  4247  		v_1 := v.Args[1]
  4248  		if v_1.Op != OpMIPSMOVHstore {
  4249  			break
  4250  		}
  4251  		off2 := v_1.AuxInt
  4252  		sym2 := v_1.Aux
  4253  		_ = v_1.Args[2]
  4254  		ptr2 := v_1.Args[0]
  4255  		x := v_1.Args[1]
  4256  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4257  			break
  4258  		}
  4259  		v.reset(OpMIPSMOVHUreg)
  4260  		v.AddArg(x)
  4261  		return true
  4262  	}
  4263  	return false
  4264  }
  4265  func rewriteValueMIPS_OpMIPSMOVHUreg_0(v *Value) bool {
  4266  	b := v.Block
  4267  	_ = b
  4268  	// match: (MOVHUreg x:(MOVBUload _ _))
  4269  	// cond:
  4270  	// result: (MOVWreg x)
  4271  	for {
  4272  		x := v.Args[0]
  4273  		if x.Op != OpMIPSMOVBUload {
  4274  			break
  4275  		}
  4276  		_ = x.Args[1]
  4277  		v.reset(OpMIPSMOVWreg)
  4278  		v.AddArg(x)
  4279  		return true
  4280  	}
  4281  	// match: (MOVHUreg x:(MOVHUload _ _))
  4282  	// cond:
  4283  	// result: (MOVWreg x)
  4284  	for {
  4285  		x := v.Args[0]
  4286  		if x.Op != OpMIPSMOVHUload {
  4287  			break
  4288  		}
  4289  		_ = x.Args[1]
  4290  		v.reset(OpMIPSMOVWreg)
  4291  		v.AddArg(x)
  4292  		return true
  4293  	}
  4294  	// match: (MOVHUreg x:(MOVBUreg _))
  4295  	// cond:
  4296  	// result: (MOVWreg x)
  4297  	for {
  4298  		x := v.Args[0]
  4299  		if x.Op != OpMIPSMOVBUreg {
  4300  			break
  4301  		}
  4302  		v.reset(OpMIPSMOVWreg)
  4303  		v.AddArg(x)
  4304  		return true
  4305  	}
  4306  	// match: (MOVHUreg x:(MOVHUreg _))
  4307  	// cond:
  4308  	// result: (MOVWreg x)
  4309  	for {
  4310  		x := v.Args[0]
  4311  		if x.Op != OpMIPSMOVHUreg {
  4312  			break
  4313  		}
  4314  		v.reset(OpMIPSMOVWreg)
  4315  		v.AddArg(x)
  4316  		return true
  4317  	}
  4318  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  4319  	// cond: x.Uses == 1 && clobber(x)
  4320  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  4321  	for {
  4322  		t := v.Type
  4323  		x := v.Args[0]
  4324  		if x.Op != OpMIPSMOVHload {
  4325  			break
  4326  		}
  4327  		off := x.AuxInt
  4328  		sym := x.Aux
  4329  		_ = x.Args[1]
  4330  		ptr := x.Args[0]
  4331  		mem := x.Args[1]
  4332  		if !(x.Uses == 1 && clobber(x)) {
  4333  			break
  4334  		}
  4335  		b = x.Block
  4336  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, t)
  4337  		v.reset(OpCopy)
  4338  		v.AddArg(v0)
  4339  		v0.AuxInt = off
  4340  		v0.Aux = sym
  4341  		v0.AddArg(ptr)
  4342  		v0.AddArg(mem)
  4343  		return true
  4344  	}
  4345  	// match: (MOVHUreg (ANDconst [c] x))
  4346  	// cond:
  4347  	// result: (ANDconst [c&0xffff] x)
  4348  	for {
  4349  		v_0 := v.Args[0]
  4350  		if v_0.Op != OpMIPSANDconst {
  4351  			break
  4352  		}
  4353  		c := v_0.AuxInt
  4354  		x := v_0.Args[0]
  4355  		v.reset(OpMIPSANDconst)
  4356  		v.AuxInt = c & 0xffff
  4357  		v.AddArg(x)
  4358  		return true
  4359  	}
  4360  	// match: (MOVHUreg (MOVWconst [c]))
  4361  	// cond:
  4362  	// result: (MOVWconst [int64(uint16(c))])
  4363  	for {
  4364  		v_0 := v.Args[0]
  4365  		if v_0.Op != OpMIPSMOVWconst {
  4366  			break
  4367  		}
  4368  		c := v_0.AuxInt
  4369  		v.reset(OpMIPSMOVWconst)
  4370  		v.AuxInt = int64(uint16(c))
  4371  		return true
  4372  	}
  4373  	return false
  4374  }
  4375  func rewriteValueMIPS_OpMIPSMOVHload_0(v *Value) bool {
  4376  	// match: (MOVHload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4377  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4378  	// result: (MOVHload [off1+off2] {sym} ptr mem)
  4379  	for {
  4380  		off1 := v.AuxInt
  4381  		sym := v.Aux
  4382  		_ = v.Args[1]
  4383  		x := v.Args[0]
  4384  		if x.Op != OpMIPSADDconst {
  4385  			break
  4386  		}
  4387  		off2 := x.AuxInt
  4388  		ptr := x.Args[0]
  4389  		mem := v.Args[1]
  4390  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4391  			break
  4392  		}
  4393  		v.reset(OpMIPSMOVHload)
  4394  		v.AuxInt = off1 + off2
  4395  		v.Aux = sym
  4396  		v.AddArg(ptr)
  4397  		v.AddArg(mem)
  4398  		return true
  4399  	}
  4400  	// match: (MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4401  	// cond: canMergeSym(sym1,sym2)
  4402  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4403  	for {
  4404  		off1 := v.AuxInt
  4405  		sym1 := v.Aux
  4406  		_ = v.Args[1]
  4407  		v_0 := v.Args[0]
  4408  		if v_0.Op != OpMIPSMOVWaddr {
  4409  			break
  4410  		}
  4411  		off2 := v_0.AuxInt
  4412  		sym2 := v_0.Aux
  4413  		ptr := v_0.Args[0]
  4414  		mem := v.Args[1]
  4415  		if !(canMergeSym(sym1, sym2)) {
  4416  			break
  4417  		}
  4418  		v.reset(OpMIPSMOVHload)
  4419  		v.AuxInt = off1 + off2
  4420  		v.Aux = mergeSym(sym1, sym2)
  4421  		v.AddArg(ptr)
  4422  		v.AddArg(mem)
  4423  		return true
  4424  	}
  4425  	// match: (MOVHload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4426  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4427  	// result: (MOVHreg x)
  4428  	for {
  4429  		off := v.AuxInt
  4430  		sym := v.Aux
  4431  		_ = v.Args[1]
  4432  		ptr := v.Args[0]
  4433  		v_1 := v.Args[1]
  4434  		if v_1.Op != OpMIPSMOVHstore {
  4435  			break
  4436  		}
  4437  		off2 := v_1.AuxInt
  4438  		sym2 := v_1.Aux
  4439  		_ = v_1.Args[2]
  4440  		ptr2 := v_1.Args[0]
  4441  		x := v_1.Args[1]
  4442  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4443  			break
  4444  		}
  4445  		v.reset(OpMIPSMOVHreg)
  4446  		v.AddArg(x)
  4447  		return true
  4448  	}
  4449  	return false
  4450  }
  4451  func rewriteValueMIPS_OpMIPSMOVHreg_0(v *Value) bool {
  4452  	b := v.Block
  4453  	_ = b
  4454  	// match: (MOVHreg x:(MOVBload _ _))
  4455  	// cond:
  4456  	// result: (MOVWreg x)
  4457  	for {
  4458  		x := v.Args[0]
  4459  		if x.Op != OpMIPSMOVBload {
  4460  			break
  4461  		}
  4462  		_ = x.Args[1]
  4463  		v.reset(OpMIPSMOVWreg)
  4464  		v.AddArg(x)
  4465  		return true
  4466  	}
  4467  	// match: (MOVHreg x:(MOVBUload _ _))
  4468  	// cond:
  4469  	// result: (MOVWreg x)
  4470  	for {
  4471  		x := v.Args[0]
  4472  		if x.Op != OpMIPSMOVBUload {
  4473  			break
  4474  		}
  4475  		_ = x.Args[1]
  4476  		v.reset(OpMIPSMOVWreg)
  4477  		v.AddArg(x)
  4478  		return true
  4479  	}
  4480  	// match: (MOVHreg x:(MOVHload _ _))
  4481  	// cond:
  4482  	// result: (MOVWreg x)
  4483  	for {
  4484  		x := v.Args[0]
  4485  		if x.Op != OpMIPSMOVHload {
  4486  			break
  4487  		}
  4488  		_ = x.Args[1]
  4489  		v.reset(OpMIPSMOVWreg)
  4490  		v.AddArg(x)
  4491  		return true
  4492  	}
  4493  	// match: (MOVHreg x:(MOVBreg _))
  4494  	// cond:
  4495  	// result: (MOVWreg x)
  4496  	for {
  4497  		x := v.Args[0]
  4498  		if x.Op != OpMIPSMOVBreg {
  4499  			break
  4500  		}
  4501  		v.reset(OpMIPSMOVWreg)
  4502  		v.AddArg(x)
  4503  		return true
  4504  	}
  4505  	// match: (MOVHreg x:(MOVBUreg _))
  4506  	// cond:
  4507  	// result: (MOVWreg x)
  4508  	for {
  4509  		x := v.Args[0]
  4510  		if x.Op != OpMIPSMOVBUreg {
  4511  			break
  4512  		}
  4513  		v.reset(OpMIPSMOVWreg)
  4514  		v.AddArg(x)
  4515  		return true
  4516  	}
  4517  	// match: (MOVHreg x:(MOVHreg _))
  4518  	// cond:
  4519  	// result: (MOVWreg x)
  4520  	for {
  4521  		x := v.Args[0]
  4522  		if x.Op != OpMIPSMOVHreg {
  4523  			break
  4524  		}
  4525  		v.reset(OpMIPSMOVWreg)
  4526  		v.AddArg(x)
  4527  		return true
  4528  	}
  4529  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  4530  	// cond: x.Uses == 1 && clobber(x)
  4531  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  4532  	for {
  4533  		t := v.Type
  4534  		x := v.Args[0]
  4535  		if x.Op != OpMIPSMOVHUload {
  4536  			break
  4537  		}
  4538  		off := x.AuxInt
  4539  		sym := x.Aux
  4540  		_ = x.Args[1]
  4541  		ptr := x.Args[0]
  4542  		mem := x.Args[1]
  4543  		if !(x.Uses == 1 && clobber(x)) {
  4544  			break
  4545  		}
  4546  		b = x.Block
  4547  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, t)
  4548  		v.reset(OpCopy)
  4549  		v.AddArg(v0)
  4550  		v0.AuxInt = off
  4551  		v0.Aux = sym
  4552  		v0.AddArg(ptr)
  4553  		v0.AddArg(mem)
  4554  		return true
  4555  	}
  4556  	// match: (MOVHreg (ANDconst [c] x))
  4557  	// cond: c & 0x8000 == 0
  4558  	// result: (ANDconst [c&0x7fff] x)
  4559  	for {
  4560  		v_0 := v.Args[0]
  4561  		if v_0.Op != OpMIPSANDconst {
  4562  			break
  4563  		}
  4564  		c := v_0.AuxInt
  4565  		x := v_0.Args[0]
  4566  		if !(c&0x8000 == 0) {
  4567  			break
  4568  		}
  4569  		v.reset(OpMIPSANDconst)
  4570  		v.AuxInt = c & 0x7fff
  4571  		v.AddArg(x)
  4572  		return true
  4573  	}
  4574  	// match: (MOVHreg (MOVWconst [c]))
  4575  	// cond:
  4576  	// result: (MOVWconst [int64(int16(c))])
  4577  	for {
  4578  		v_0 := v.Args[0]
  4579  		if v_0.Op != OpMIPSMOVWconst {
  4580  			break
  4581  		}
  4582  		c := v_0.AuxInt
  4583  		v.reset(OpMIPSMOVWconst)
  4584  		v.AuxInt = int64(int16(c))
  4585  		return true
  4586  	}
  4587  	return false
  4588  }
  4589  func rewriteValueMIPS_OpMIPSMOVHstore_0(v *Value) bool {
  4590  	// match: (MOVHstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4591  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4592  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4593  	for {
  4594  		off1 := v.AuxInt
  4595  		sym := v.Aux
  4596  		_ = v.Args[2]
  4597  		x := v.Args[0]
  4598  		if x.Op != OpMIPSADDconst {
  4599  			break
  4600  		}
  4601  		off2 := x.AuxInt
  4602  		ptr := x.Args[0]
  4603  		val := v.Args[1]
  4604  		mem := v.Args[2]
  4605  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4606  			break
  4607  		}
  4608  		v.reset(OpMIPSMOVHstore)
  4609  		v.AuxInt = off1 + off2
  4610  		v.Aux = sym
  4611  		v.AddArg(ptr)
  4612  		v.AddArg(val)
  4613  		v.AddArg(mem)
  4614  		return true
  4615  	}
  4616  	// match: (MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4617  	// cond: canMergeSym(sym1,sym2)
  4618  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4619  	for {
  4620  		off1 := v.AuxInt
  4621  		sym1 := v.Aux
  4622  		_ = v.Args[2]
  4623  		v_0 := v.Args[0]
  4624  		if v_0.Op != OpMIPSMOVWaddr {
  4625  			break
  4626  		}
  4627  		off2 := v_0.AuxInt
  4628  		sym2 := v_0.Aux
  4629  		ptr := v_0.Args[0]
  4630  		val := v.Args[1]
  4631  		mem := v.Args[2]
  4632  		if !(canMergeSym(sym1, sym2)) {
  4633  			break
  4634  		}
  4635  		v.reset(OpMIPSMOVHstore)
  4636  		v.AuxInt = off1 + off2
  4637  		v.Aux = mergeSym(sym1, sym2)
  4638  		v.AddArg(ptr)
  4639  		v.AddArg(val)
  4640  		v.AddArg(mem)
  4641  		return true
  4642  	}
  4643  	// match: (MOVHstore [off] {sym} ptr (MOVWconst [0]) mem)
  4644  	// cond:
  4645  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4646  	for {
  4647  		off := v.AuxInt
  4648  		sym := v.Aux
  4649  		_ = v.Args[2]
  4650  		ptr := v.Args[0]
  4651  		v_1 := v.Args[1]
  4652  		if v_1.Op != OpMIPSMOVWconst {
  4653  			break
  4654  		}
  4655  		if v_1.AuxInt != 0 {
  4656  			break
  4657  		}
  4658  		mem := v.Args[2]
  4659  		v.reset(OpMIPSMOVHstorezero)
  4660  		v.AuxInt = off
  4661  		v.Aux = sym
  4662  		v.AddArg(ptr)
  4663  		v.AddArg(mem)
  4664  		return true
  4665  	}
  4666  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4667  	// cond:
  4668  	// result: (MOVHstore [off] {sym} ptr x mem)
  4669  	for {
  4670  		off := v.AuxInt
  4671  		sym := v.Aux
  4672  		_ = v.Args[2]
  4673  		ptr := v.Args[0]
  4674  		v_1 := v.Args[1]
  4675  		if v_1.Op != OpMIPSMOVHreg {
  4676  			break
  4677  		}
  4678  		x := v_1.Args[0]
  4679  		mem := v.Args[2]
  4680  		v.reset(OpMIPSMOVHstore)
  4681  		v.AuxInt = off
  4682  		v.Aux = sym
  4683  		v.AddArg(ptr)
  4684  		v.AddArg(x)
  4685  		v.AddArg(mem)
  4686  		return true
  4687  	}
  4688  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4689  	// cond:
  4690  	// result: (MOVHstore [off] {sym} ptr x mem)
  4691  	for {
  4692  		off := v.AuxInt
  4693  		sym := v.Aux
  4694  		_ = v.Args[2]
  4695  		ptr := v.Args[0]
  4696  		v_1 := v.Args[1]
  4697  		if v_1.Op != OpMIPSMOVHUreg {
  4698  			break
  4699  		}
  4700  		x := v_1.Args[0]
  4701  		mem := v.Args[2]
  4702  		v.reset(OpMIPSMOVHstore)
  4703  		v.AuxInt = off
  4704  		v.Aux = sym
  4705  		v.AddArg(ptr)
  4706  		v.AddArg(x)
  4707  		v.AddArg(mem)
  4708  		return true
  4709  	}
  4710  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4711  	// cond:
  4712  	// result: (MOVHstore [off] {sym} ptr x mem)
  4713  	for {
  4714  		off := v.AuxInt
  4715  		sym := v.Aux
  4716  		_ = v.Args[2]
  4717  		ptr := v.Args[0]
  4718  		v_1 := v.Args[1]
  4719  		if v_1.Op != OpMIPSMOVWreg {
  4720  			break
  4721  		}
  4722  		x := v_1.Args[0]
  4723  		mem := v.Args[2]
  4724  		v.reset(OpMIPSMOVHstore)
  4725  		v.AuxInt = off
  4726  		v.Aux = sym
  4727  		v.AddArg(ptr)
  4728  		v.AddArg(x)
  4729  		v.AddArg(mem)
  4730  		return true
  4731  	}
  4732  	return false
  4733  }
  4734  func rewriteValueMIPS_OpMIPSMOVHstorezero_0(v *Value) bool {
  4735  	// match: (MOVHstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4736  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4737  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4738  	for {
  4739  		off1 := v.AuxInt
  4740  		sym := v.Aux
  4741  		_ = v.Args[1]
  4742  		x := v.Args[0]
  4743  		if x.Op != OpMIPSADDconst {
  4744  			break
  4745  		}
  4746  		off2 := x.AuxInt
  4747  		ptr := x.Args[0]
  4748  		mem := v.Args[1]
  4749  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4750  			break
  4751  		}
  4752  		v.reset(OpMIPSMOVHstorezero)
  4753  		v.AuxInt = off1 + off2
  4754  		v.Aux = sym
  4755  		v.AddArg(ptr)
  4756  		v.AddArg(mem)
  4757  		return true
  4758  	}
  4759  	// match: (MOVHstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4760  	// cond: canMergeSym(sym1,sym2)
  4761  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4762  	for {
  4763  		off1 := v.AuxInt
  4764  		sym1 := v.Aux
  4765  		_ = v.Args[1]
  4766  		v_0 := v.Args[0]
  4767  		if v_0.Op != OpMIPSMOVWaddr {
  4768  			break
  4769  		}
  4770  		off2 := v_0.AuxInt
  4771  		sym2 := v_0.Aux
  4772  		ptr := v_0.Args[0]
  4773  		mem := v.Args[1]
  4774  		if !(canMergeSym(sym1, sym2)) {
  4775  			break
  4776  		}
  4777  		v.reset(OpMIPSMOVHstorezero)
  4778  		v.AuxInt = off1 + off2
  4779  		v.Aux = mergeSym(sym1, sym2)
  4780  		v.AddArg(ptr)
  4781  		v.AddArg(mem)
  4782  		return true
  4783  	}
  4784  	return false
  4785  }
  4786  func rewriteValueMIPS_OpMIPSMOVWload_0(v *Value) bool {
  4787  	// match: (MOVWload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4788  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4789  	// result: (MOVWload [off1+off2] {sym} ptr mem)
  4790  	for {
  4791  		off1 := v.AuxInt
  4792  		sym := v.Aux
  4793  		_ = v.Args[1]
  4794  		x := v.Args[0]
  4795  		if x.Op != OpMIPSADDconst {
  4796  			break
  4797  		}
  4798  		off2 := x.AuxInt
  4799  		ptr := x.Args[0]
  4800  		mem := v.Args[1]
  4801  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4802  			break
  4803  		}
  4804  		v.reset(OpMIPSMOVWload)
  4805  		v.AuxInt = off1 + off2
  4806  		v.Aux = sym
  4807  		v.AddArg(ptr)
  4808  		v.AddArg(mem)
  4809  		return true
  4810  	}
  4811  	// match: (MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4812  	// cond: canMergeSym(sym1,sym2)
  4813  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4814  	for {
  4815  		off1 := v.AuxInt
  4816  		sym1 := v.Aux
  4817  		_ = v.Args[1]
  4818  		v_0 := v.Args[0]
  4819  		if v_0.Op != OpMIPSMOVWaddr {
  4820  			break
  4821  		}
  4822  		off2 := v_0.AuxInt
  4823  		sym2 := v_0.Aux
  4824  		ptr := v_0.Args[0]
  4825  		mem := v.Args[1]
  4826  		if !(canMergeSym(sym1, sym2)) {
  4827  			break
  4828  		}
  4829  		v.reset(OpMIPSMOVWload)
  4830  		v.AuxInt = off1 + off2
  4831  		v.Aux = mergeSym(sym1, sym2)
  4832  		v.AddArg(ptr)
  4833  		v.AddArg(mem)
  4834  		return true
  4835  	}
  4836  	// match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
  4837  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4838  	// result: x
  4839  	for {
  4840  		off := v.AuxInt
  4841  		sym := v.Aux
  4842  		_ = v.Args[1]
  4843  		ptr := v.Args[0]
  4844  		v_1 := v.Args[1]
  4845  		if v_1.Op != OpMIPSMOVWstore {
  4846  			break
  4847  		}
  4848  		off2 := v_1.AuxInt
  4849  		sym2 := v_1.Aux
  4850  		_ = v_1.Args[2]
  4851  		ptr2 := v_1.Args[0]
  4852  		x := v_1.Args[1]
  4853  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4854  			break
  4855  		}
  4856  		v.reset(OpCopy)
  4857  		v.Type = x.Type
  4858  		v.AddArg(x)
  4859  		return true
  4860  	}
  4861  	return false
  4862  }
  4863  func rewriteValueMIPS_OpMIPSMOVWreg_0(v *Value) bool {
  4864  	// match: (MOVWreg x)
  4865  	// cond: x.Uses == 1
  4866  	// result: (MOVWnop x)
  4867  	for {
  4868  		x := v.Args[0]
  4869  		if !(x.Uses == 1) {
  4870  			break
  4871  		}
  4872  		v.reset(OpMIPSMOVWnop)
  4873  		v.AddArg(x)
  4874  		return true
  4875  	}
  4876  	// match: (MOVWreg (MOVWconst [c]))
  4877  	// cond:
  4878  	// result: (MOVWconst [c])
  4879  	for {
  4880  		v_0 := v.Args[0]
  4881  		if v_0.Op != OpMIPSMOVWconst {
  4882  			break
  4883  		}
  4884  		c := v_0.AuxInt
  4885  		v.reset(OpMIPSMOVWconst)
  4886  		v.AuxInt = c
  4887  		return true
  4888  	}
  4889  	return false
  4890  }
  4891  func rewriteValueMIPS_OpMIPSMOVWstore_0(v *Value) bool {
  4892  	// match: (MOVWstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4893  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4894  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  4895  	for {
  4896  		off1 := v.AuxInt
  4897  		sym := v.Aux
  4898  		_ = v.Args[2]
  4899  		x := v.Args[0]
  4900  		if x.Op != OpMIPSADDconst {
  4901  			break
  4902  		}
  4903  		off2 := x.AuxInt
  4904  		ptr := x.Args[0]
  4905  		val := v.Args[1]
  4906  		mem := v.Args[2]
  4907  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4908  			break
  4909  		}
  4910  		v.reset(OpMIPSMOVWstore)
  4911  		v.AuxInt = off1 + off2
  4912  		v.Aux = sym
  4913  		v.AddArg(ptr)
  4914  		v.AddArg(val)
  4915  		v.AddArg(mem)
  4916  		return true
  4917  	}
  4918  	// match: (MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4919  	// cond: canMergeSym(sym1,sym2)
  4920  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4921  	for {
  4922  		off1 := v.AuxInt
  4923  		sym1 := v.Aux
  4924  		_ = v.Args[2]
  4925  		v_0 := v.Args[0]
  4926  		if v_0.Op != OpMIPSMOVWaddr {
  4927  			break
  4928  		}
  4929  		off2 := v_0.AuxInt
  4930  		sym2 := v_0.Aux
  4931  		ptr := v_0.Args[0]
  4932  		val := v.Args[1]
  4933  		mem := v.Args[2]
  4934  		if !(canMergeSym(sym1, sym2)) {
  4935  			break
  4936  		}
  4937  		v.reset(OpMIPSMOVWstore)
  4938  		v.AuxInt = off1 + off2
  4939  		v.Aux = mergeSym(sym1, sym2)
  4940  		v.AddArg(ptr)
  4941  		v.AddArg(val)
  4942  		v.AddArg(mem)
  4943  		return true
  4944  	}
  4945  	// match: (MOVWstore [off] {sym} ptr (MOVWconst [0]) mem)
  4946  	// cond:
  4947  	// result: (MOVWstorezero [off] {sym} ptr mem)
  4948  	for {
  4949  		off := v.AuxInt
  4950  		sym := v.Aux
  4951  		_ = v.Args[2]
  4952  		ptr := v.Args[0]
  4953  		v_1 := v.Args[1]
  4954  		if v_1.Op != OpMIPSMOVWconst {
  4955  			break
  4956  		}
  4957  		if v_1.AuxInt != 0 {
  4958  			break
  4959  		}
  4960  		mem := v.Args[2]
  4961  		v.reset(OpMIPSMOVWstorezero)
  4962  		v.AuxInt = off
  4963  		v.Aux = sym
  4964  		v.AddArg(ptr)
  4965  		v.AddArg(mem)
  4966  		return true
  4967  	}
  4968  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4969  	// cond:
  4970  	// result: (MOVWstore [off] {sym} ptr x mem)
  4971  	for {
  4972  		off := v.AuxInt
  4973  		sym := v.Aux
  4974  		_ = v.Args[2]
  4975  		ptr := v.Args[0]
  4976  		v_1 := v.Args[1]
  4977  		if v_1.Op != OpMIPSMOVWreg {
  4978  			break
  4979  		}
  4980  		x := v_1.Args[0]
  4981  		mem := v.Args[2]
  4982  		v.reset(OpMIPSMOVWstore)
  4983  		v.AuxInt = off
  4984  		v.Aux = sym
  4985  		v.AddArg(ptr)
  4986  		v.AddArg(x)
  4987  		v.AddArg(mem)
  4988  		return true
  4989  	}
  4990  	return false
  4991  }
  4992  func rewriteValueMIPS_OpMIPSMOVWstorezero_0(v *Value) bool {
  4993  	// match: (MOVWstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4994  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4995  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  4996  	for {
  4997  		off1 := v.AuxInt
  4998  		sym := v.Aux
  4999  		_ = v.Args[1]
  5000  		x := v.Args[0]
  5001  		if x.Op != OpMIPSADDconst {
  5002  			break
  5003  		}
  5004  		off2 := x.AuxInt
  5005  		ptr := x.Args[0]
  5006  		mem := v.Args[1]
  5007  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  5008  			break
  5009  		}
  5010  		v.reset(OpMIPSMOVWstorezero)
  5011  		v.AuxInt = off1 + off2
  5012  		v.Aux = sym
  5013  		v.AddArg(ptr)
  5014  		v.AddArg(mem)
  5015  		return true
  5016  	}
  5017  	// match: (MOVWstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  5018  	// cond: canMergeSym(sym1,sym2)
  5019  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5020  	for {
  5021  		off1 := v.AuxInt
  5022  		sym1 := v.Aux
  5023  		_ = v.Args[1]
  5024  		v_0 := v.Args[0]
  5025  		if v_0.Op != OpMIPSMOVWaddr {
  5026  			break
  5027  		}
  5028  		off2 := v_0.AuxInt
  5029  		sym2 := v_0.Aux
  5030  		ptr := v_0.Args[0]
  5031  		mem := v.Args[1]
  5032  		if !(canMergeSym(sym1, sym2)) {
  5033  			break
  5034  		}
  5035  		v.reset(OpMIPSMOVWstorezero)
  5036  		v.AuxInt = off1 + off2
  5037  		v.Aux = mergeSym(sym1, sym2)
  5038  		v.AddArg(ptr)
  5039  		v.AddArg(mem)
  5040  		return true
  5041  	}
  5042  	return false
  5043  }
  5044  func rewriteValueMIPS_OpMIPSMUL_0(v *Value) bool {
  5045  	// match: (MUL (MOVWconst [0]) _)
  5046  	// cond:
  5047  	// result: (MOVWconst [0])
  5048  	for {
  5049  		_ = v.Args[1]
  5050  		v_0 := v.Args[0]
  5051  		if v_0.Op != OpMIPSMOVWconst {
  5052  			break
  5053  		}
  5054  		if v_0.AuxInt != 0 {
  5055  			break
  5056  		}
  5057  		v.reset(OpMIPSMOVWconst)
  5058  		v.AuxInt = 0
  5059  		return true
  5060  	}
  5061  	// match: (MUL _ (MOVWconst [0]))
  5062  	// cond:
  5063  	// result: (MOVWconst [0])
  5064  	for {
  5065  		_ = v.Args[1]
  5066  		v_1 := v.Args[1]
  5067  		if v_1.Op != OpMIPSMOVWconst {
  5068  			break
  5069  		}
  5070  		if v_1.AuxInt != 0 {
  5071  			break
  5072  		}
  5073  		v.reset(OpMIPSMOVWconst)
  5074  		v.AuxInt = 0
  5075  		return true
  5076  	}
  5077  	// match: (MUL (MOVWconst [1]) x)
  5078  	// cond:
  5079  	// result: x
  5080  	for {
  5081  		_ = v.Args[1]
  5082  		v_0 := v.Args[0]
  5083  		if v_0.Op != OpMIPSMOVWconst {
  5084  			break
  5085  		}
  5086  		if v_0.AuxInt != 1 {
  5087  			break
  5088  		}
  5089  		x := v.Args[1]
  5090  		v.reset(OpCopy)
  5091  		v.Type = x.Type
  5092  		v.AddArg(x)
  5093  		return true
  5094  	}
  5095  	// match: (MUL x (MOVWconst [1]))
  5096  	// cond:
  5097  	// result: x
  5098  	for {
  5099  		_ = v.Args[1]
  5100  		x := v.Args[0]
  5101  		v_1 := v.Args[1]
  5102  		if v_1.Op != OpMIPSMOVWconst {
  5103  			break
  5104  		}
  5105  		if v_1.AuxInt != 1 {
  5106  			break
  5107  		}
  5108  		v.reset(OpCopy)
  5109  		v.Type = x.Type
  5110  		v.AddArg(x)
  5111  		return true
  5112  	}
  5113  	// match: (MUL (MOVWconst [-1]) x)
  5114  	// cond:
  5115  	// result: (NEG x)
  5116  	for {
  5117  		_ = v.Args[1]
  5118  		v_0 := v.Args[0]
  5119  		if v_0.Op != OpMIPSMOVWconst {
  5120  			break
  5121  		}
  5122  		if v_0.AuxInt != -1 {
  5123  			break
  5124  		}
  5125  		x := v.Args[1]
  5126  		v.reset(OpMIPSNEG)
  5127  		v.AddArg(x)
  5128  		return true
  5129  	}
  5130  	// match: (MUL x (MOVWconst [-1]))
  5131  	// cond:
  5132  	// result: (NEG x)
  5133  	for {
  5134  		_ = v.Args[1]
  5135  		x := v.Args[0]
  5136  		v_1 := v.Args[1]
  5137  		if v_1.Op != OpMIPSMOVWconst {
  5138  			break
  5139  		}
  5140  		if v_1.AuxInt != -1 {
  5141  			break
  5142  		}
  5143  		v.reset(OpMIPSNEG)
  5144  		v.AddArg(x)
  5145  		return true
  5146  	}
  5147  	// match: (MUL (MOVWconst [c]) x)
  5148  	// cond: isPowerOfTwo(int64(uint32(c)))
  5149  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  5150  	for {
  5151  		_ = v.Args[1]
  5152  		v_0 := v.Args[0]
  5153  		if v_0.Op != OpMIPSMOVWconst {
  5154  			break
  5155  		}
  5156  		c := v_0.AuxInt
  5157  		x := v.Args[1]
  5158  		if !(isPowerOfTwo(int64(uint32(c)))) {
  5159  			break
  5160  		}
  5161  		v.reset(OpMIPSSLLconst)
  5162  		v.AuxInt = log2(int64(uint32(c)))
  5163  		v.AddArg(x)
  5164  		return true
  5165  	}
  5166  	// match: (MUL x (MOVWconst [c]))
  5167  	// cond: isPowerOfTwo(int64(uint32(c)))
  5168  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  5169  	for {
  5170  		_ = v.Args[1]
  5171  		x := v.Args[0]
  5172  		v_1 := v.Args[1]
  5173  		if v_1.Op != OpMIPSMOVWconst {
  5174  			break
  5175  		}
  5176  		c := v_1.AuxInt
  5177  		if !(isPowerOfTwo(int64(uint32(c)))) {
  5178  			break
  5179  		}
  5180  		v.reset(OpMIPSSLLconst)
  5181  		v.AuxInt = log2(int64(uint32(c)))
  5182  		v.AddArg(x)
  5183  		return true
  5184  	}
  5185  	// match: (MUL (MOVWconst [c]) (MOVWconst [d]))
  5186  	// cond:
  5187  	// result: (MOVWconst [int64(int32(c)*int32(d))])
  5188  	for {
  5189  		_ = v.Args[1]
  5190  		v_0 := v.Args[0]
  5191  		if v_0.Op != OpMIPSMOVWconst {
  5192  			break
  5193  		}
  5194  		c := v_0.AuxInt
  5195  		v_1 := v.Args[1]
  5196  		if v_1.Op != OpMIPSMOVWconst {
  5197  			break
  5198  		}
  5199  		d := v_1.AuxInt
  5200  		v.reset(OpMIPSMOVWconst)
  5201  		v.AuxInt = int64(int32(c) * int32(d))
  5202  		return true
  5203  	}
  5204  	// match: (MUL (MOVWconst [d]) (MOVWconst [c]))
  5205  	// cond:
  5206  	// result: (MOVWconst [int64(int32(c)*int32(d))])
  5207  	for {
  5208  		_ = v.Args[1]
  5209  		v_0 := v.Args[0]
  5210  		if v_0.Op != OpMIPSMOVWconst {
  5211  			break
  5212  		}
  5213  		d := v_0.AuxInt
  5214  		v_1 := v.Args[1]
  5215  		if v_1.Op != OpMIPSMOVWconst {
  5216  			break
  5217  		}
  5218  		c := v_1.AuxInt
  5219  		v.reset(OpMIPSMOVWconst)
  5220  		v.AuxInt = int64(int32(c) * int32(d))
  5221  		return true
  5222  	}
  5223  	return false
  5224  }
  5225  func rewriteValueMIPS_OpMIPSNEG_0(v *Value) bool {
  5226  	// match: (NEG (MOVWconst [c]))
  5227  	// cond:
  5228  	// result: (MOVWconst [int64(int32(-c))])
  5229  	for {
  5230  		v_0 := v.Args[0]
  5231  		if v_0.Op != OpMIPSMOVWconst {
  5232  			break
  5233  		}
  5234  		c := v_0.AuxInt
  5235  		v.reset(OpMIPSMOVWconst)
  5236  		v.AuxInt = int64(int32(-c))
  5237  		return true
  5238  	}
  5239  	return false
  5240  }
  5241  func rewriteValueMIPS_OpMIPSNOR_0(v *Value) bool {
  5242  	// match: (NOR x (MOVWconst [c]))
  5243  	// cond:
  5244  	// result: (NORconst [c] x)
  5245  	for {
  5246  		_ = v.Args[1]
  5247  		x := v.Args[0]
  5248  		v_1 := v.Args[1]
  5249  		if v_1.Op != OpMIPSMOVWconst {
  5250  			break
  5251  		}
  5252  		c := v_1.AuxInt
  5253  		v.reset(OpMIPSNORconst)
  5254  		v.AuxInt = c
  5255  		v.AddArg(x)
  5256  		return true
  5257  	}
  5258  	// match: (NOR (MOVWconst [c]) x)
  5259  	// cond:
  5260  	// result: (NORconst [c] x)
  5261  	for {
  5262  		_ = v.Args[1]
  5263  		v_0 := v.Args[0]
  5264  		if v_0.Op != OpMIPSMOVWconst {
  5265  			break
  5266  		}
  5267  		c := v_0.AuxInt
  5268  		x := v.Args[1]
  5269  		v.reset(OpMIPSNORconst)
  5270  		v.AuxInt = c
  5271  		v.AddArg(x)
  5272  		return true
  5273  	}
  5274  	return false
  5275  }
  5276  func rewriteValueMIPS_OpMIPSNORconst_0(v *Value) bool {
  5277  	// match: (NORconst [c] (MOVWconst [d]))
  5278  	// cond:
  5279  	// result: (MOVWconst [^(c|d)])
  5280  	for {
  5281  		c := v.AuxInt
  5282  		v_0 := v.Args[0]
  5283  		if v_0.Op != OpMIPSMOVWconst {
  5284  			break
  5285  		}
  5286  		d := v_0.AuxInt
  5287  		v.reset(OpMIPSMOVWconst)
  5288  		v.AuxInt = ^(c | d)
  5289  		return true
  5290  	}
  5291  	return false
  5292  }
  5293  func rewriteValueMIPS_OpMIPSOR_0(v *Value) bool {
  5294  	b := v.Block
  5295  	_ = b
  5296  	// match: (OR x (MOVWconst [c]))
  5297  	// cond:
  5298  	// result: (ORconst [c] x)
  5299  	for {
  5300  		_ = v.Args[1]
  5301  		x := v.Args[0]
  5302  		v_1 := v.Args[1]
  5303  		if v_1.Op != OpMIPSMOVWconst {
  5304  			break
  5305  		}
  5306  		c := v_1.AuxInt
  5307  		v.reset(OpMIPSORconst)
  5308  		v.AuxInt = c
  5309  		v.AddArg(x)
  5310  		return true
  5311  	}
  5312  	// match: (OR (MOVWconst [c]) x)
  5313  	// cond:
  5314  	// result: (ORconst [c] x)
  5315  	for {
  5316  		_ = v.Args[1]
  5317  		v_0 := v.Args[0]
  5318  		if v_0.Op != OpMIPSMOVWconst {
  5319  			break
  5320  		}
  5321  		c := v_0.AuxInt
  5322  		x := v.Args[1]
  5323  		v.reset(OpMIPSORconst)
  5324  		v.AuxInt = c
  5325  		v.AddArg(x)
  5326  		return true
  5327  	}
  5328  	// match: (OR x x)
  5329  	// cond:
  5330  	// result: x
  5331  	for {
  5332  		_ = v.Args[1]
  5333  		x := v.Args[0]
  5334  		if x != v.Args[1] {
  5335  			break
  5336  		}
  5337  		v.reset(OpCopy)
  5338  		v.Type = x.Type
  5339  		v.AddArg(x)
  5340  		return true
  5341  	}
  5342  	// match: (OR (SGTUzero x) (SGTUzero y))
  5343  	// cond:
  5344  	// result: (SGTUzero (OR <x.Type> x y))
  5345  	for {
  5346  		_ = v.Args[1]
  5347  		v_0 := v.Args[0]
  5348  		if v_0.Op != OpMIPSSGTUzero {
  5349  			break
  5350  		}
  5351  		x := v_0.Args[0]
  5352  		v_1 := v.Args[1]
  5353  		if v_1.Op != OpMIPSSGTUzero {
  5354  			break
  5355  		}
  5356  		y := v_1.Args[0]
  5357  		v.reset(OpMIPSSGTUzero)
  5358  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5359  		v0.AddArg(x)
  5360  		v0.AddArg(y)
  5361  		v.AddArg(v0)
  5362  		return true
  5363  	}
  5364  	// match: (OR (SGTUzero y) (SGTUzero x))
  5365  	// cond:
  5366  	// result: (SGTUzero (OR <x.Type> x y))
  5367  	for {
  5368  		_ = v.Args[1]
  5369  		v_0 := v.Args[0]
  5370  		if v_0.Op != OpMIPSSGTUzero {
  5371  			break
  5372  		}
  5373  		y := v_0.Args[0]
  5374  		v_1 := v.Args[1]
  5375  		if v_1.Op != OpMIPSSGTUzero {
  5376  			break
  5377  		}
  5378  		x := v_1.Args[0]
  5379  		v.reset(OpMIPSSGTUzero)
  5380  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5381  		v0.AddArg(x)
  5382  		v0.AddArg(y)
  5383  		v.AddArg(v0)
  5384  		return true
  5385  	}
  5386  	return false
  5387  }
  5388  func rewriteValueMIPS_OpMIPSORconst_0(v *Value) bool {
  5389  	// match: (ORconst [0] x)
  5390  	// cond:
  5391  	// result: x
  5392  	for {
  5393  		if v.AuxInt != 0 {
  5394  			break
  5395  		}
  5396  		x := v.Args[0]
  5397  		v.reset(OpCopy)
  5398  		v.Type = x.Type
  5399  		v.AddArg(x)
  5400  		return true
  5401  	}
  5402  	// match: (ORconst [-1] _)
  5403  	// cond:
  5404  	// result: (MOVWconst [-1])
  5405  	for {
  5406  		if v.AuxInt != -1 {
  5407  			break
  5408  		}
  5409  		v.reset(OpMIPSMOVWconst)
  5410  		v.AuxInt = -1
  5411  		return true
  5412  	}
  5413  	// match: (ORconst [c] (MOVWconst [d]))
  5414  	// cond:
  5415  	// result: (MOVWconst [c|d])
  5416  	for {
  5417  		c := v.AuxInt
  5418  		v_0 := v.Args[0]
  5419  		if v_0.Op != OpMIPSMOVWconst {
  5420  			break
  5421  		}
  5422  		d := v_0.AuxInt
  5423  		v.reset(OpMIPSMOVWconst)
  5424  		v.AuxInt = c | d
  5425  		return true
  5426  	}
  5427  	// match: (ORconst [c] (ORconst [d] x))
  5428  	// cond:
  5429  	// result: (ORconst [c|d] x)
  5430  	for {
  5431  		c := v.AuxInt
  5432  		v_0 := v.Args[0]
  5433  		if v_0.Op != OpMIPSORconst {
  5434  			break
  5435  		}
  5436  		d := v_0.AuxInt
  5437  		x := v_0.Args[0]
  5438  		v.reset(OpMIPSORconst)
  5439  		v.AuxInt = c | d
  5440  		v.AddArg(x)
  5441  		return true
  5442  	}
  5443  	return false
  5444  }
  5445  func rewriteValueMIPS_OpMIPSSGT_0(v *Value) bool {
  5446  	// match: (SGT (MOVWconst [c]) x)
  5447  	// cond:
  5448  	// result: (SGTconst [c] x)
  5449  	for {
  5450  		_ = v.Args[1]
  5451  		v_0 := v.Args[0]
  5452  		if v_0.Op != OpMIPSMOVWconst {
  5453  			break
  5454  		}
  5455  		c := v_0.AuxInt
  5456  		x := v.Args[1]
  5457  		v.reset(OpMIPSSGTconst)
  5458  		v.AuxInt = c
  5459  		v.AddArg(x)
  5460  		return true
  5461  	}
  5462  	// match: (SGT x (MOVWconst [0]))
  5463  	// cond:
  5464  	// result: (SGTzero x)
  5465  	for {
  5466  		_ = v.Args[1]
  5467  		x := v.Args[0]
  5468  		v_1 := v.Args[1]
  5469  		if v_1.Op != OpMIPSMOVWconst {
  5470  			break
  5471  		}
  5472  		if v_1.AuxInt != 0 {
  5473  			break
  5474  		}
  5475  		v.reset(OpMIPSSGTzero)
  5476  		v.AddArg(x)
  5477  		return true
  5478  	}
  5479  	return false
  5480  }
  5481  func rewriteValueMIPS_OpMIPSSGTU_0(v *Value) bool {
  5482  	// match: (SGTU (MOVWconst [c]) x)
  5483  	// cond:
  5484  	// result: (SGTUconst [c] x)
  5485  	for {
  5486  		_ = v.Args[1]
  5487  		v_0 := v.Args[0]
  5488  		if v_0.Op != OpMIPSMOVWconst {
  5489  			break
  5490  		}
  5491  		c := v_0.AuxInt
  5492  		x := v.Args[1]
  5493  		v.reset(OpMIPSSGTUconst)
  5494  		v.AuxInt = c
  5495  		v.AddArg(x)
  5496  		return true
  5497  	}
  5498  	// match: (SGTU x (MOVWconst [0]))
  5499  	// cond:
  5500  	// result: (SGTUzero x)
  5501  	for {
  5502  		_ = v.Args[1]
  5503  		x := v.Args[0]
  5504  		v_1 := v.Args[1]
  5505  		if v_1.Op != OpMIPSMOVWconst {
  5506  			break
  5507  		}
  5508  		if v_1.AuxInt != 0 {
  5509  			break
  5510  		}
  5511  		v.reset(OpMIPSSGTUzero)
  5512  		v.AddArg(x)
  5513  		return true
  5514  	}
  5515  	return false
  5516  }
  5517  func rewriteValueMIPS_OpMIPSSGTUconst_0(v *Value) bool {
  5518  	// match: (SGTUconst [c] (MOVWconst [d]))
  5519  	// cond: uint32(c)>uint32(d)
  5520  	// result: (MOVWconst [1])
  5521  	for {
  5522  		c := v.AuxInt
  5523  		v_0 := v.Args[0]
  5524  		if v_0.Op != OpMIPSMOVWconst {
  5525  			break
  5526  		}
  5527  		d := v_0.AuxInt
  5528  		if !(uint32(c) > uint32(d)) {
  5529  			break
  5530  		}
  5531  		v.reset(OpMIPSMOVWconst)
  5532  		v.AuxInt = 1
  5533  		return true
  5534  	}
  5535  	// match: (SGTUconst [c] (MOVWconst [d]))
  5536  	// cond: uint32(c)<=uint32(d)
  5537  	// result: (MOVWconst [0])
  5538  	for {
  5539  		c := v.AuxInt
  5540  		v_0 := v.Args[0]
  5541  		if v_0.Op != OpMIPSMOVWconst {
  5542  			break
  5543  		}
  5544  		d := v_0.AuxInt
  5545  		if !(uint32(c) <= uint32(d)) {
  5546  			break
  5547  		}
  5548  		v.reset(OpMIPSMOVWconst)
  5549  		v.AuxInt = 0
  5550  		return true
  5551  	}
  5552  	// match: (SGTUconst [c] (MOVBUreg _))
  5553  	// cond: 0xff < uint32(c)
  5554  	// result: (MOVWconst [1])
  5555  	for {
  5556  		c := v.AuxInt
  5557  		v_0 := v.Args[0]
  5558  		if v_0.Op != OpMIPSMOVBUreg {
  5559  			break
  5560  		}
  5561  		if !(0xff < uint32(c)) {
  5562  			break
  5563  		}
  5564  		v.reset(OpMIPSMOVWconst)
  5565  		v.AuxInt = 1
  5566  		return true
  5567  	}
  5568  	// match: (SGTUconst [c] (MOVHUreg _))
  5569  	// cond: 0xffff < uint32(c)
  5570  	// result: (MOVWconst [1])
  5571  	for {
  5572  		c := v.AuxInt
  5573  		v_0 := v.Args[0]
  5574  		if v_0.Op != OpMIPSMOVHUreg {
  5575  			break
  5576  		}
  5577  		if !(0xffff < uint32(c)) {
  5578  			break
  5579  		}
  5580  		v.reset(OpMIPSMOVWconst)
  5581  		v.AuxInt = 1
  5582  		return true
  5583  	}
  5584  	// match: (SGTUconst [c] (ANDconst [m] _))
  5585  	// cond: uint32(m) < uint32(c)
  5586  	// result: (MOVWconst [1])
  5587  	for {
  5588  		c := v.AuxInt
  5589  		v_0 := v.Args[0]
  5590  		if v_0.Op != OpMIPSANDconst {
  5591  			break
  5592  		}
  5593  		m := v_0.AuxInt
  5594  		if !(uint32(m) < uint32(c)) {
  5595  			break
  5596  		}
  5597  		v.reset(OpMIPSMOVWconst)
  5598  		v.AuxInt = 1
  5599  		return true
  5600  	}
  5601  	// match: (SGTUconst [c] (SRLconst _ [d]))
  5602  	// cond: uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)
  5603  	// result: (MOVWconst [1])
  5604  	for {
  5605  		c := v.AuxInt
  5606  		v_0 := v.Args[0]
  5607  		if v_0.Op != OpMIPSSRLconst {
  5608  			break
  5609  		}
  5610  		d := v_0.AuxInt
  5611  		if !(uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)) {
  5612  			break
  5613  		}
  5614  		v.reset(OpMIPSMOVWconst)
  5615  		v.AuxInt = 1
  5616  		return true
  5617  	}
  5618  	return false
  5619  }
  5620  func rewriteValueMIPS_OpMIPSSGTUzero_0(v *Value) bool {
  5621  	// match: (SGTUzero (MOVWconst [d]))
  5622  	// cond: uint32(d) != 0
  5623  	// result: (MOVWconst [1])
  5624  	for {
  5625  		v_0 := v.Args[0]
  5626  		if v_0.Op != OpMIPSMOVWconst {
  5627  			break
  5628  		}
  5629  		d := v_0.AuxInt
  5630  		if !(uint32(d) != 0) {
  5631  			break
  5632  		}
  5633  		v.reset(OpMIPSMOVWconst)
  5634  		v.AuxInt = 1
  5635  		return true
  5636  	}
  5637  	// match: (SGTUzero (MOVWconst [d]))
  5638  	// cond: uint32(d) == 0
  5639  	// result: (MOVWconst [0])
  5640  	for {
  5641  		v_0 := v.Args[0]
  5642  		if v_0.Op != OpMIPSMOVWconst {
  5643  			break
  5644  		}
  5645  		d := v_0.AuxInt
  5646  		if !(uint32(d) == 0) {
  5647  			break
  5648  		}
  5649  		v.reset(OpMIPSMOVWconst)
  5650  		v.AuxInt = 0
  5651  		return true
  5652  	}
  5653  	return false
  5654  }
  5655  func rewriteValueMIPS_OpMIPSSGTconst_0(v *Value) bool {
  5656  	// match: (SGTconst [c] (MOVWconst [d]))
  5657  	// cond: int32(c) > int32(d)
  5658  	// result: (MOVWconst [1])
  5659  	for {
  5660  		c := v.AuxInt
  5661  		v_0 := v.Args[0]
  5662  		if v_0.Op != OpMIPSMOVWconst {
  5663  			break
  5664  		}
  5665  		d := v_0.AuxInt
  5666  		if !(int32(c) > int32(d)) {
  5667  			break
  5668  		}
  5669  		v.reset(OpMIPSMOVWconst)
  5670  		v.AuxInt = 1
  5671  		return true
  5672  	}
  5673  	// match: (SGTconst [c] (MOVWconst [d]))
  5674  	// cond: int32(c) <= int32(d)
  5675  	// result: (MOVWconst [0])
  5676  	for {
  5677  		c := v.AuxInt
  5678  		v_0 := v.Args[0]
  5679  		if v_0.Op != OpMIPSMOVWconst {
  5680  			break
  5681  		}
  5682  		d := v_0.AuxInt
  5683  		if !(int32(c) <= int32(d)) {
  5684  			break
  5685  		}
  5686  		v.reset(OpMIPSMOVWconst)
  5687  		v.AuxInt = 0
  5688  		return true
  5689  	}
  5690  	// match: (SGTconst [c] (MOVBreg _))
  5691  	// cond: 0x7f < int32(c)
  5692  	// result: (MOVWconst [1])
  5693  	for {
  5694  		c := v.AuxInt
  5695  		v_0 := v.Args[0]
  5696  		if v_0.Op != OpMIPSMOVBreg {
  5697  			break
  5698  		}
  5699  		if !(0x7f < int32(c)) {
  5700  			break
  5701  		}
  5702  		v.reset(OpMIPSMOVWconst)
  5703  		v.AuxInt = 1
  5704  		return true
  5705  	}
  5706  	// match: (SGTconst [c] (MOVBreg _))
  5707  	// cond: int32(c) <= -0x80
  5708  	// result: (MOVWconst [0])
  5709  	for {
  5710  		c := v.AuxInt
  5711  		v_0 := v.Args[0]
  5712  		if v_0.Op != OpMIPSMOVBreg {
  5713  			break
  5714  		}
  5715  		if !(int32(c) <= -0x80) {
  5716  			break
  5717  		}
  5718  		v.reset(OpMIPSMOVWconst)
  5719  		v.AuxInt = 0
  5720  		return true
  5721  	}
  5722  	// match: (SGTconst [c] (MOVBUreg _))
  5723  	// cond: 0xff < int32(c)
  5724  	// result: (MOVWconst [1])
  5725  	for {
  5726  		c := v.AuxInt
  5727  		v_0 := v.Args[0]
  5728  		if v_0.Op != OpMIPSMOVBUreg {
  5729  			break
  5730  		}
  5731  		if !(0xff < int32(c)) {
  5732  			break
  5733  		}
  5734  		v.reset(OpMIPSMOVWconst)
  5735  		v.AuxInt = 1
  5736  		return true
  5737  	}
  5738  	// match: (SGTconst [c] (MOVBUreg _))
  5739  	// cond: int32(c) < 0
  5740  	// result: (MOVWconst [0])
  5741  	for {
  5742  		c := v.AuxInt
  5743  		v_0 := v.Args[0]
  5744  		if v_0.Op != OpMIPSMOVBUreg {
  5745  			break
  5746  		}
  5747  		if !(int32(c) < 0) {
  5748  			break
  5749  		}
  5750  		v.reset(OpMIPSMOVWconst)
  5751  		v.AuxInt = 0
  5752  		return true
  5753  	}
  5754  	// match: (SGTconst [c] (MOVHreg _))
  5755  	// cond: 0x7fff < int32(c)
  5756  	// result: (MOVWconst [1])
  5757  	for {
  5758  		c := v.AuxInt
  5759  		v_0 := v.Args[0]
  5760  		if v_0.Op != OpMIPSMOVHreg {
  5761  			break
  5762  		}
  5763  		if !(0x7fff < int32(c)) {
  5764  			break
  5765  		}
  5766  		v.reset(OpMIPSMOVWconst)
  5767  		v.AuxInt = 1
  5768  		return true
  5769  	}
  5770  	// match: (SGTconst [c] (MOVHreg _))
  5771  	// cond: int32(c) <= -0x8000
  5772  	// result: (MOVWconst [0])
  5773  	for {
  5774  		c := v.AuxInt
  5775  		v_0 := v.Args[0]
  5776  		if v_0.Op != OpMIPSMOVHreg {
  5777  			break
  5778  		}
  5779  		if !(int32(c) <= -0x8000) {
  5780  			break
  5781  		}
  5782  		v.reset(OpMIPSMOVWconst)
  5783  		v.AuxInt = 0
  5784  		return true
  5785  	}
  5786  	// match: (SGTconst [c] (MOVHUreg _))
  5787  	// cond: 0xffff < int32(c)
  5788  	// result: (MOVWconst [1])
  5789  	for {
  5790  		c := v.AuxInt
  5791  		v_0 := v.Args[0]
  5792  		if v_0.Op != OpMIPSMOVHUreg {
  5793  			break
  5794  		}
  5795  		if !(0xffff < int32(c)) {
  5796  			break
  5797  		}
  5798  		v.reset(OpMIPSMOVWconst)
  5799  		v.AuxInt = 1
  5800  		return true
  5801  	}
  5802  	// match: (SGTconst [c] (MOVHUreg _))
  5803  	// cond: int32(c) < 0
  5804  	// result: (MOVWconst [0])
  5805  	for {
  5806  		c := v.AuxInt
  5807  		v_0 := v.Args[0]
  5808  		if v_0.Op != OpMIPSMOVHUreg {
  5809  			break
  5810  		}
  5811  		if !(int32(c) < 0) {
  5812  			break
  5813  		}
  5814  		v.reset(OpMIPSMOVWconst)
  5815  		v.AuxInt = 0
  5816  		return true
  5817  	}
  5818  	return false
  5819  }
  5820  func rewriteValueMIPS_OpMIPSSGTconst_10(v *Value) bool {
  5821  	// match: (SGTconst [c] (ANDconst [m] _))
  5822  	// cond: 0 <= int32(m) && int32(m) < int32(c)
  5823  	// result: (MOVWconst [1])
  5824  	for {
  5825  		c := v.AuxInt
  5826  		v_0 := v.Args[0]
  5827  		if v_0.Op != OpMIPSANDconst {
  5828  			break
  5829  		}
  5830  		m := v_0.AuxInt
  5831  		if !(0 <= int32(m) && int32(m) < int32(c)) {
  5832  			break
  5833  		}
  5834  		v.reset(OpMIPSMOVWconst)
  5835  		v.AuxInt = 1
  5836  		return true
  5837  	}
  5838  	// match: (SGTconst [c] (SRLconst _ [d]))
  5839  	// cond: 0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)
  5840  	// result: (MOVWconst [1])
  5841  	for {
  5842  		c := v.AuxInt
  5843  		v_0 := v.Args[0]
  5844  		if v_0.Op != OpMIPSSRLconst {
  5845  			break
  5846  		}
  5847  		d := v_0.AuxInt
  5848  		if !(0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)) {
  5849  			break
  5850  		}
  5851  		v.reset(OpMIPSMOVWconst)
  5852  		v.AuxInt = 1
  5853  		return true
  5854  	}
  5855  	return false
  5856  }
  5857  func rewriteValueMIPS_OpMIPSSGTzero_0(v *Value) bool {
  5858  	// match: (SGTzero (MOVWconst [d]))
  5859  	// cond: int32(d) > 0
  5860  	// result: (MOVWconst [1])
  5861  	for {
  5862  		v_0 := v.Args[0]
  5863  		if v_0.Op != OpMIPSMOVWconst {
  5864  			break
  5865  		}
  5866  		d := v_0.AuxInt
  5867  		if !(int32(d) > 0) {
  5868  			break
  5869  		}
  5870  		v.reset(OpMIPSMOVWconst)
  5871  		v.AuxInt = 1
  5872  		return true
  5873  	}
  5874  	// match: (SGTzero (MOVWconst [d]))
  5875  	// cond: int32(d) <= 0
  5876  	// result: (MOVWconst [0])
  5877  	for {
  5878  		v_0 := v.Args[0]
  5879  		if v_0.Op != OpMIPSMOVWconst {
  5880  			break
  5881  		}
  5882  		d := v_0.AuxInt
  5883  		if !(int32(d) <= 0) {
  5884  			break
  5885  		}
  5886  		v.reset(OpMIPSMOVWconst)
  5887  		v.AuxInt = 0
  5888  		return true
  5889  	}
  5890  	return false
  5891  }
  5892  func rewriteValueMIPS_OpMIPSSLL_0(v *Value) bool {
  5893  	// match: (SLL _ (MOVWconst [c]))
  5894  	// cond: uint32(c)>=32
  5895  	// result: (MOVWconst [0])
  5896  	for {
  5897  		_ = v.Args[1]
  5898  		v_1 := v.Args[1]
  5899  		if v_1.Op != OpMIPSMOVWconst {
  5900  			break
  5901  		}
  5902  		c := v_1.AuxInt
  5903  		if !(uint32(c) >= 32) {
  5904  			break
  5905  		}
  5906  		v.reset(OpMIPSMOVWconst)
  5907  		v.AuxInt = 0
  5908  		return true
  5909  	}
  5910  	// match: (SLL x (MOVWconst [c]))
  5911  	// cond:
  5912  	// result: (SLLconst x [c])
  5913  	for {
  5914  		_ = v.Args[1]
  5915  		x := v.Args[0]
  5916  		v_1 := v.Args[1]
  5917  		if v_1.Op != OpMIPSMOVWconst {
  5918  			break
  5919  		}
  5920  		c := v_1.AuxInt
  5921  		v.reset(OpMIPSSLLconst)
  5922  		v.AuxInt = c
  5923  		v.AddArg(x)
  5924  		return true
  5925  	}
  5926  	return false
  5927  }
  5928  func rewriteValueMIPS_OpMIPSSLLconst_0(v *Value) bool {
  5929  	// match: (SLLconst [c] (MOVWconst [d]))
  5930  	// cond:
  5931  	// result: (MOVWconst [int64(int32(uint32(d)<<uint32(c)))])
  5932  	for {
  5933  		c := v.AuxInt
  5934  		v_0 := v.Args[0]
  5935  		if v_0.Op != OpMIPSMOVWconst {
  5936  			break
  5937  		}
  5938  		d := v_0.AuxInt
  5939  		v.reset(OpMIPSMOVWconst)
  5940  		v.AuxInt = int64(int32(uint32(d) << uint32(c)))
  5941  		return true
  5942  	}
  5943  	return false
  5944  }
  5945  func rewriteValueMIPS_OpMIPSSRA_0(v *Value) bool {
  5946  	// match: (SRA x (MOVWconst [c]))
  5947  	// cond: uint32(c)>=32
  5948  	// result: (SRAconst x [31])
  5949  	for {
  5950  		_ = v.Args[1]
  5951  		x := v.Args[0]
  5952  		v_1 := v.Args[1]
  5953  		if v_1.Op != OpMIPSMOVWconst {
  5954  			break
  5955  		}
  5956  		c := v_1.AuxInt
  5957  		if !(uint32(c) >= 32) {
  5958  			break
  5959  		}
  5960  		v.reset(OpMIPSSRAconst)
  5961  		v.AuxInt = 31
  5962  		v.AddArg(x)
  5963  		return true
  5964  	}
  5965  	// match: (SRA x (MOVWconst [c]))
  5966  	// cond:
  5967  	// result: (SRAconst x [c])
  5968  	for {
  5969  		_ = v.Args[1]
  5970  		x := v.Args[0]
  5971  		v_1 := v.Args[1]
  5972  		if v_1.Op != OpMIPSMOVWconst {
  5973  			break
  5974  		}
  5975  		c := v_1.AuxInt
  5976  		v.reset(OpMIPSSRAconst)
  5977  		v.AuxInt = c
  5978  		v.AddArg(x)
  5979  		return true
  5980  	}
  5981  	return false
  5982  }
  5983  func rewriteValueMIPS_OpMIPSSRAconst_0(v *Value) bool {
  5984  	// match: (SRAconst [c] (MOVWconst [d]))
  5985  	// cond:
  5986  	// result: (MOVWconst [int64(int32(d)>>uint32(c))])
  5987  	for {
  5988  		c := v.AuxInt
  5989  		v_0 := v.Args[0]
  5990  		if v_0.Op != OpMIPSMOVWconst {
  5991  			break
  5992  		}
  5993  		d := v_0.AuxInt
  5994  		v.reset(OpMIPSMOVWconst)
  5995  		v.AuxInt = int64(int32(d) >> uint32(c))
  5996  		return true
  5997  	}
  5998  	return false
  5999  }
  6000  func rewriteValueMIPS_OpMIPSSRL_0(v *Value) bool {
  6001  	// match: (SRL _ (MOVWconst [c]))
  6002  	// cond: uint32(c)>=32
  6003  	// result: (MOVWconst [0])
  6004  	for {
  6005  		_ = v.Args[1]
  6006  		v_1 := v.Args[1]
  6007  		if v_1.Op != OpMIPSMOVWconst {
  6008  			break
  6009  		}
  6010  		c := v_1.AuxInt
  6011  		if !(uint32(c) >= 32) {
  6012  			break
  6013  		}
  6014  		v.reset(OpMIPSMOVWconst)
  6015  		v.AuxInt = 0
  6016  		return true
  6017  	}
  6018  	// match: (SRL x (MOVWconst [c]))
  6019  	// cond:
  6020  	// result: (SRLconst x [c])
  6021  	for {
  6022  		_ = v.Args[1]
  6023  		x := v.Args[0]
  6024  		v_1 := v.Args[1]
  6025  		if v_1.Op != OpMIPSMOVWconst {
  6026  			break
  6027  		}
  6028  		c := v_1.AuxInt
  6029  		v.reset(OpMIPSSRLconst)
  6030  		v.AuxInt = c
  6031  		v.AddArg(x)
  6032  		return true
  6033  	}
  6034  	return false
  6035  }
  6036  func rewriteValueMIPS_OpMIPSSRLconst_0(v *Value) bool {
  6037  	// match: (SRLconst [c] (MOVWconst [d]))
  6038  	// cond:
  6039  	// result: (MOVWconst [int64(uint32(d)>>uint32(c))])
  6040  	for {
  6041  		c := v.AuxInt
  6042  		v_0 := v.Args[0]
  6043  		if v_0.Op != OpMIPSMOVWconst {
  6044  			break
  6045  		}
  6046  		d := v_0.AuxInt
  6047  		v.reset(OpMIPSMOVWconst)
  6048  		v.AuxInt = int64(uint32(d) >> uint32(c))
  6049  		return true
  6050  	}
  6051  	return false
  6052  }
  6053  func rewriteValueMIPS_OpMIPSSUB_0(v *Value) bool {
  6054  	// match: (SUB x (MOVWconst [c]))
  6055  	// cond:
  6056  	// result: (SUBconst [c] x)
  6057  	for {
  6058  		_ = v.Args[1]
  6059  		x := v.Args[0]
  6060  		v_1 := v.Args[1]
  6061  		if v_1.Op != OpMIPSMOVWconst {
  6062  			break
  6063  		}
  6064  		c := v_1.AuxInt
  6065  		v.reset(OpMIPSSUBconst)
  6066  		v.AuxInt = c
  6067  		v.AddArg(x)
  6068  		return true
  6069  	}
  6070  	// match: (SUB x x)
  6071  	// cond:
  6072  	// result: (MOVWconst [0])
  6073  	for {
  6074  		_ = v.Args[1]
  6075  		x := v.Args[0]
  6076  		if x != v.Args[1] {
  6077  			break
  6078  		}
  6079  		v.reset(OpMIPSMOVWconst)
  6080  		v.AuxInt = 0
  6081  		return true
  6082  	}
  6083  	// match: (SUB (MOVWconst [0]) x)
  6084  	// cond:
  6085  	// result: (NEG x)
  6086  	for {
  6087  		_ = v.Args[1]
  6088  		v_0 := v.Args[0]
  6089  		if v_0.Op != OpMIPSMOVWconst {
  6090  			break
  6091  		}
  6092  		if v_0.AuxInt != 0 {
  6093  			break
  6094  		}
  6095  		x := v.Args[1]
  6096  		v.reset(OpMIPSNEG)
  6097  		v.AddArg(x)
  6098  		return true
  6099  	}
  6100  	return false
  6101  }
  6102  func rewriteValueMIPS_OpMIPSSUBconst_0(v *Value) bool {
  6103  	// match: (SUBconst [0] x)
  6104  	// cond:
  6105  	// result: x
  6106  	for {
  6107  		if v.AuxInt != 0 {
  6108  			break
  6109  		}
  6110  		x := v.Args[0]
  6111  		v.reset(OpCopy)
  6112  		v.Type = x.Type
  6113  		v.AddArg(x)
  6114  		return true
  6115  	}
  6116  	// match: (SUBconst [c] (MOVWconst [d]))
  6117  	// cond:
  6118  	// result: (MOVWconst [int64(int32(d-c))])
  6119  	for {
  6120  		c := v.AuxInt
  6121  		v_0 := v.Args[0]
  6122  		if v_0.Op != OpMIPSMOVWconst {
  6123  			break
  6124  		}
  6125  		d := v_0.AuxInt
  6126  		v.reset(OpMIPSMOVWconst)
  6127  		v.AuxInt = int64(int32(d - c))
  6128  		return true
  6129  	}
  6130  	// match: (SUBconst [c] (SUBconst [d] x))
  6131  	// cond:
  6132  	// result: (ADDconst [int64(int32(-c-d))] x)
  6133  	for {
  6134  		c := v.AuxInt
  6135  		v_0 := v.Args[0]
  6136  		if v_0.Op != OpMIPSSUBconst {
  6137  			break
  6138  		}
  6139  		d := v_0.AuxInt
  6140  		x := v_0.Args[0]
  6141  		v.reset(OpMIPSADDconst)
  6142  		v.AuxInt = int64(int32(-c - d))
  6143  		v.AddArg(x)
  6144  		return true
  6145  	}
  6146  	// match: (SUBconst [c] (ADDconst [d] x))
  6147  	// cond:
  6148  	// result: (ADDconst [int64(int32(-c+d))] x)
  6149  	for {
  6150  		c := v.AuxInt
  6151  		v_0 := v.Args[0]
  6152  		if v_0.Op != OpMIPSADDconst {
  6153  			break
  6154  		}
  6155  		d := v_0.AuxInt
  6156  		x := v_0.Args[0]
  6157  		v.reset(OpMIPSADDconst)
  6158  		v.AuxInt = int64(int32(-c + d))
  6159  		v.AddArg(x)
  6160  		return true
  6161  	}
  6162  	return false
  6163  }
  6164  func rewriteValueMIPS_OpMIPSXOR_0(v *Value) bool {
  6165  	// match: (XOR x (MOVWconst [c]))
  6166  	// cond:
  6167  	// result: (XORconst [c] x)
  6168  	for {
  6169  		_ = v.Args[1]
  6170  		x := v.Args[0]
  6171  		v_1 := v.Args[1]
  6172  		if v_1.Op != OpMIPSMOVWconst {
  6173  			break
  6174  		}
  6175  		c := v_1.AuxInt
  6176  		v.reset(OpMIPSXORconst)
  6177  		v.AuxInt = c
  6178  		v.AddArg(x)
  6179  		return true
  6180  	}
  6181  	// match: (XOR (MOVWconst [c]) x)
  6182  	// cond:
  6183  	// result: (XORconst [c] x)
  6184  	for {
  6185  		_ = v.Args[1]
  6186  		v_0 := v.Args[0]
  6187  		if v_0.Op != OpMIPSMOVWconst {
  6188  			break
  6189  		}
  6190  		c := v_0.AuxInt
  6191  		x := v.Args[1]
  6192  		v.reset(OpMIPSXORconst)
  6193  		v.AuxInt = c
  6194  		v.AddArg(x)
  6195  		return true
  6196  	}
  6197  	// match: (XOR x x)
  6198  	// cond:
  6199  	// result: (MOVWconst [0])
  6200  	for {
  6201  		_ = v.Args[1]
  6202  		x := v.Args[0]
  6203  		if x != v.Args[1] {
  6204  			break
  6205  		}
  6206  		v.reset(OpMIPSMOVWconst)
  6207  		v.AuxInt = 0
  6208  		return true
  6209  	}
  6210  	return false
  6211  }
  6212  func rewriteValueMIPS_OpMIPSXORconst_0(v *Value) bool {
  6213  	// match: (XORconst [0] x)
  6214  	// cond:
  6215  	// result: x
  6216  	for {
  6217  		if v.AuxInt != 0 {
  6218  			break
  6219  		}
  6220  		x := v.Args[0]
  6221  		v.reset(OpCopy)
  6222  		v.Type = x.Type
  6223  		v.AddArg(x)
  6224  		return true
  6225  	}
  6226  	// match: (XORconst [-1] x)
  6227  	// cond:
  6228  	// result: (NORconst [0] x)
  6229  	for {
  6230  		if v.AuxInt != -1 {
  6231  			break
  6232  		}
  6233  		x := v.Args[0]
  6234  		v.reset(OpMIPSNORconst)
  6235  		v.AuxInt = 0
  6236  		v.AddArg(x)
  6237  		return true
  6238  	}
  6239  	// match: (XORconst [c] (MOVWconst [d]))
  6240  	// cond:
  6241  	// result: (MOVWconst [c^d])
  6242  	for {
  6243  		c := v.AuxInt
  6244  		v_0 := v.Args[0]
  6245  		if v_0.Op != OpMIPSMOVWconst {
  6246  			break
  6247  		}
  6248  		d := v_0.AuxInt
  6249  		v.reset(OpMIPSMOVWconst)
  6250  		v.AuxInt = c ^ d
  6251  		return true
  6252  	}
  6253  	// match: (XORconst [c] (XORconst [d] x))
  6254  	// cond:
  6255  	// result: (XORconst [c^d] x)
  6256  	for {
  6257  		c := v.AuxInt
  6258  		v_0 := v.Args[0]
  6259  		if v_0.Op != OpMIPSXORconst {
  6260  			break
  6261  		}
  6262  		d := v_0.AuxInt
  6263  		x := v_0.Args[0]
  6264  		v.reset(OpMIPSXORconst)
  6265  		v.AuxInt = c ^ d
  6266  		v.AddArg(x)
  6267  		return true
  6268  	}
  6269  	return false
  6270  }
  6271  func rewriteValueMIPS_OpMod16_0(v *Value) bool {
  6272  	b := v.Block
  6273  	_ = b
  6274  	typ := &b.Func.Config.Types
  6275  	_ = typ
  6276  	// match: (Mod16 x y)
  6277  	// cond:
  6278  	// result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  6279  	for {
  6280  		_ = v.Args[1]
  6281  		x := v.Args[0]
  6282  		y := v.Args[1]
  6283  		v.reset(OpSelect0)
  6284  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6285  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  6286  		v1.AddArg(x)
  6287  		v0.AddArg(v1)
  6288  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  6289  		v2.AddArg(y)
  6290  		v0.AddArg(v2)
  6291  		v.AddArg(v0)
  6292  		return true
  6293  	}
  6294  }
  6295  func rewriteValueMIPS_OpMod16u_0(v *Value) bool {
  6296  	b := v.Block
  6297  	_ = b
  6298  	typ := &b.Func.Config.Types
  6299  	_ = typ
  6300  	// match: (Mod16u x y)
  6301  	// cond:
  6302  	// result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  6303  	for {
  6304  		_ = v.Args[1]
  6305  		x := v.Args[0]
  6306  		y := v.Args[1]
  6307  		v.reset(OpSelect0)
  6308  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6309  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6310  		v1.AddArg(x)
  6311  		v0.AddArg(v1)
  6312  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6313  		v2.AddArg(y)
  6314  		v0.AddArg(v2)
  6315  		v.AddArg(v0)
  6316  		return true
  6317  	}
  6318  }
  6319  func rewriteValueMIPS_OpMod32_0(v *Value) bool {
  6320  	b := v.Block
  6321  	_ = b
  6322  	typ := &b.Func.Config.Types
  6323  	_ = typ
  6324  	// match: (Mod32 x y)
  6325  	// cond:
  6326  	// result: (Select0 (DIV x y))
  6327  	for {
  6328  		_ = v.Args[1]
  6329  		x := v.Args[0]
  6330  		y := v.Args[1]
  6331  		v.reset(OpSelect0)
  6332  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6333  		v0.AddArg(x)
  6334  		v0.AddArg(y)
  6335  		v.AddArg(v0)
  6336  		return true
  6337  	}
  6338  }
  6339  func rewriteValueMIPS_OpMod32u_0(v *Value) bool {
  6340  	b := v.Block
  6341  	_ = b
  6342  	typ := &b.Func.Config.Types
  6343  	_ = typ
  6344  	// match: (Mod32u x y)
  6345  	// cond:
  6346  	// result: (Select0 (DIVU x y))
  6347  	for {
  6348  		_ = v.Args[1]
  6349  		x := v.Args[0]
  6350  		y := v.Args[1]
  6351  		v.reset(OpSelect0)
  6352  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6353  		v0.AddArg(x)
  6354  		v0.AddArg(y)
  6355  		v.AddArg(v0)
  6356  		return true
  6357  	}
  6358  }
  6359  func rewriteValueMIPS_OpMod8_0(v *Value) bool {
  6360  	b := v.Block
  6361  	_ = b
  6362  	typ := &b.Func.Config.Types
  6363  	_ = typ
  6364  	// match: (Mod8 x y)
  6365  	// cond:
  6366  	// result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  6367  	for {
  6368  		_ = v.Args[1]
  6369  		x := v.Args[0]
  6370  		y := v.Args[1]
  6371  		v.reset(OpSelect0)
  6372  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6373  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  6374  		v1.AddArg(x)
  6375  		v0.AddArg(v1)
  6376  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  6377  		v2.AddArg(y)
  6378  		v0.AddArg(v2)
  6379  		v.AddArg(v0)
  6380  		return true
  6381  	}
  6382  }
  6383  func rewriteValueMIPS_OpMod8u_0(v *Value) bool {
  6384  	b := v.Block
  6385  	_ = b
  6386  	typ := &b.Func.Config.Types
  6387  	_ = typ
  6388  	// match: (Mod8u x y)
  6389  	// cond:
  6390  	// result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  6391  	for {
  6392  		_ = v.Args[1]
  6393  		x := v.Args[0]
  6394  		y := v.Args[1]
  6395  		v.reset(OpSelect0)
  6396  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6397  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  6398  		v1.AddArg(x)
  6399  		v0.AddArg(v1)
  6400  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  6401  		v2.AddArg(y)
  6402  		v0.AddArg(v2)
  6403  		v.AddArg(v0)
  6404  		return true
  6405  	}
  6406  }
  6407  func rewriteValueMIPS_OpMove_0(v *Value) bool {
  6408  	b := v.Block
  6409  	_ = b
  6410  	typ := &b.Func.Config.Types
  6411  	_ = typ
  6412  	// match: (Move [0] _ _ mem)
  6413  	// cond:
  6414  	// result: mem
  6415  	for {
  6416  		if v.AuxInt != 0 {
  6417  			break
  6418  		}
  6419  		_ = v.Args[2]
  6420  		mem := v.Args[2]
  6421  		v.reset(OpCopy)
  6422  		v.Type = mem.Type
  6423  		v.AddArg(mem)
  6424  		return true
  6425  	}
  6426  	// match: (Move [1] dst src mem)
  6427  	// cond:
  6428  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  6429  	for {
  6430  		if v.AuxInt != 1 {
  6431  			break
  6432  		}
  6433  		_ = v.Args[2]
  6434  		dst := v.Args[0]
  6435  		src := v.Args[1]
  6436  		mem := v.Args[2]
  6437  		v.reset(OpMIPSMOVBstore)
  6438  		v.AddArg(dst)
  6439  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6440  		v0.AddArg(src)
  6441  		v0.AddArg(mem)
  6442  		v.AddArg(v0)
  6443  		v.AddArg(mem)
  6444  		return true
  6445  	}
  6446  	// match: (Move [2] {t} dst src mem)
  6447  	// cond: t.(*types.Type).Alignment()%2 == 0
  6448  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  6449  	for {
  6450  		if v.AuxInt != 2 {
  6451  			break
  6452  		}
  6453  		t := v.Aux
  6454  		_ = v.Args[2]
  6455  		dst := v.Args[0]
  6456  		src := v.Args[1]
  6457  		mem := v.Args[2]
  6458  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6459  			break
  6460  		}
  6461  		v.reset(OpMIPSMOVHstore)
  6462  		v.AddArg(dst)
  6463  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6464  		v0.AddArg(src)
  6465  		v0.AddArg(mem)
  6466  		v.AddArg(v0)
  6467  		v.AddArg(mem)
  6468  		return true
  6469  	}
  6470  	// match: (Move [2] dst src mem)
  6471  	// cond:
  6472  	// result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))
  6473  	for {
  6474  		if v.AuxInt != 2 {
  6475  			break
  6476  		}
  6477  		_ = v.Args[2]
  6478  		dst := v.Args[0]
  6479  		src := v.Args[1]
  6480  		mem := v.Args[2]
  6481  		v.reset(OpMIPSMOVBstore)
  6482  		v.AuxInt = 1
  6483  		v.AddArg(dst)
  6484  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6485  		v0.AuxInt = 1
  6486  		v0.AddArg(src)
  6487  		v0.AddArg(mem)
  6488  		v.AddArg(v0)
  6489  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6490  		v1.AddArg(dst)
  6491  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6492  		v2.AddArg(src)
  6493  		v2.AddArg(mem)
  6494  		v1.AddArg(v2)
  6495  		v1.AddArg(mem)
  6496  		v.AddArg(v1)
  6497  		return true
  6498  	}
  6499  	// match: (Move [4] {t} dst src mem)
  6500  	// cond: t.(*types.Type).Alignment()%4 == 0
  6501  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6502  	for {
  6503  		if v.AuxInt != 4 {
  6504  			break
  6505  		}
  6506  		t := v.Aux
  6507  		_ = v.Args[2]
  6508  		dst := v.Args[0]
  6509  		src := v.Args[1]
  6510  		mem := v.Args[2]
  6511  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6512  			break
  6513  		}
  6514  		v.reset(OpMIPSMOVWstore)
  6515  		v.AddArg(dst)
  6516  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6517  		v0.AddArg(src)
  6518  		v0.AddArg(mem)
  6519  		v.AddArg(v0)
  6520  		v.AddArg(mem)
  6521  		return true
  6522  	}
  6523  	// match: (Move [4] {t} dst src mem)
  6524  	// cond: t.(*types.Type).Alignment()%2 == 0
  6525  	// result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
  6526  	for {
  6527  		if v.AuxInt != 4 {
  6528  			break
  6529  		}
  6530  		t := v.Aux
  6531  		_ = v.Args[2]
  6532  		dst := v.Args[0]
  6533  		src := v.Args[1]
  6534  		mem := v.Args[2]
  6535  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6536  			break
  6537  		}
  6538  		v.reset(OpMIPSMOVHstore)
  6539  		v.AuxInt = 2
  6540  		v.AddArg(dst)
  6541  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6542  		v0.AuxInt = 2
  6543  		v0.AddArg(src)
  6544  		v0.AddArg(mem)
  6545  		v.AddArg(v0)
  6546  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6547  		v1.AddArg(dst)
  6548  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6549  		v2.AddArg(src)
  6550  		v2.AddArg(mem)
  6551  		v1.AddArg(v2)
  6552  		v1.AddArg(mem)
  6553  		v.AddArg(v1)
  6554  		return true
  6555  	}
  6556  	// match: (Move [4] dst src mem)
  6557  	// cond:
  6558  	// result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))))
  6559  	for {
  6560  		if v.AuxInt != 4 {
  6561  			break
  6562  		}
  6563  		_ = v.Args[2]
  6564  		dst := v.Args[0]
  6565  		src := v.Args[1]
  6566  		mem := v.Args[2]
  6567  		v.reset(OpMIPSMOVBstore)
  6568  		v.AuxInt = 3
  6569  		v.AddArg(dst)
  6570  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6571  		v0.AuxInt = 3
  6572  		v0.AddArg(src)
  6573  		v0.AddArg(mem)
  6574  		v.AddArg(v0)
  6575  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6576  		v1.AuxInt = 2
  6577  		v1.AddArg(dst)
  6578  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6579  		v2.AuxInt = 2
  6580  		v2.AddArg(src)
  6581  		v2.AddArg(mem)
  6582  		v1.AddArg(v2)
  6583  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6584  		v3.AuxInt = 1
  6585  		v3.AddArg(dst)
  6586  		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6587  		v4.AuxInt = 1
  6588  		v4.AddArg(src)
  6589  		v4.AddArg(mem)
  6590  		v3.AddArg(v4)
  6591  		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6592  		v5.AddArg(dst)
  6593  		v6 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6594  		v6.AddArg(src)
  6595  		v6.AddArg(mem)
  6596  		v5.AddArg(v6)
  6597  		v5.AddArg(mem)
  6598  		v3.AddArg(v5)
  6599  		v1.AddArg(v3)
  6600  		v.AddArg(v1)
  6601  		return true
  6602  	}
  6603  	// match: (Move [3] dst src mem)
  6604  	// cond:
  6605  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))
  6606  	for {
  6607  		if v.AuxInt != 3 {
  6608  			break
  6609  		}
  6610  		_ = v.Args[2]
  6611  		dst := v.Args[0]
  6612  		src := v.Args[1]
  6613  		mem := v.Args[2]
  6614  		v.reset(OpMIPSMOVBstore)
  6615  		v.AuxInt = 2
  6616  		v.AddArg(dst)
  6617  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6618  		v0.AuxInt = 2
  6619  		v0.AddArg(src)
  6620  		v0.AddArg(mem)
  6621  		v.AddArg(v0)
  6622  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6623  		v1.AuxInt = 1
  6624  		v1.AddArg(dst)
  6625  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6626  		v2.AuxInt = 1
  6627  		v2.AddArg(src)
  6628  		v2.AddArg(mem)
  6629  		v1.AddArg(v2)
  6630  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6631  		v3.AddArg(dst)
  6632  		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6633  		v4.AddArg(src)
  6634  		v4.AddArg(mem)
  6635  		v3.AddArg(v4)
  6636  		v3.AddArg(mem)
  6637  		v1.AddArg(v3)
  6638  		v.AddArg(v1)
  6639  		return true
  6640  	}
  6641  	// match: (Move [8] {t} dst src mem)
  6642  	// cond: t.(*types.Type).Alignment()%4 == 0
  6643  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  6644  	for {
  6645  		if v.AuxInt != 8 {
  6646  			break
  6647  		}
  6648  		t := v.Aux
  6649  		_ = v.Args[2]
  6650  		dst := v.Args[0]
  6651  		src := v.Args[1]
  6652  		mem := v.Args[2]
  6653  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6654  			break
  6655  		}
  6656  		v.reset(OpMIPSMOVWstore)
  6657  		v.AuxInt = 4
  6658  		v.AddArg(dst)
  6659  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6660  		v0.AuxInt = 4
  6661  		v0.AddArg(src)
  6662  		v0.AddArg(mem)
  6663  		v.AddArg(v0)
  6664  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6665  		v1.AddArg(dst)
  6666  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6667  		v2.AddArg(src)
  6668  		v2.AddArg(mem)
  6669  		v1.AddArg(v2)
  6670  		v1.AddArg(mem)
  6671  		v.AddArg(v1)
  6672  		return true
  6673  	}
  6674  	// match: (Move [8] {t} dst src mem)
  6675  	// cond: t.(*types.Type).Alignment()%2 == 0
  6676  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  6677  	for {
  6678  		if v.AuxInt != 8 {
  6679  			break
  6680  		}
  6681  		t := v.Aux
  6682  		_ = v.Args[2]
  6683  		dst := v.Args[0]
  6684  		src := v.Args[1]
  6685  		mem := v.Args[2]
  6686  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6687  			break
  6688  		}
  6689  		v.reset(OpMIPSMOVHstore)
  6690  		v.AuxInt = 6
  6691  		v.AddArg(dst)
  6692  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6693  		v0.AuxInt = 6
  6694  		v0.AddArg(src)
  6695  		v0.AddArg(mem)
  6696  		v.AddArg(v0)
  6697  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6698  		v1.AuxInt = 4
  6699  		v1.AddArg(dst)
  6700  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6701  		v2.AuxInt = 4
  6702  		v2.AddArg(src)
  6703  		v2.AddArg(mem)
  6704  		v1.AddArg(v2)
  6705  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6706  		v3.AuxInt = 2
  6707  		v3.AddArg(dst)
  6708  		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6709  		v4.AuxInt = 2
  6710  		v4.AddArg(src)
  6711  		v4.AddArg(mem)
  6712  		v3.AddArg(v4)
  6713  		v5 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6714  		v5.AddArg(dst)
  6715  		v6 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6716  		v6.AddArg(src)
  6717  		v6.AddArg(mem)
  6718  		v5.AddArg(v6)
  6719  		v5.AddArg(mem)
  6720  		v3.AddArg(v5)
  6721  		v1.AddArg(v3)
  6722  		v.AddArg(v1)
  6723  		return true
  6724  	}
  6725  	return false
  6726  }
  6727  func rewriteValueMIPS_OpMove_10(v *Value) bool {
  6728  	b := v.Block
  6729  	_ = b
  6730  	config := b.Func.Config
  6731  	_ = config
  6732  	typ := &b.Func.Config.Types
  6733  	_ = typ
  6734  	// match: (Move [6] {t} dst src mem)
  6735  	// cond: t.(*types.Type).Alignment()%2 == 0
  6736  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  6737  	for {
  6738  		if v.AuxInt != 6 {
  6739  			break
  6740  		}
  6741  		t := v.Aux
  6742  		_ = v.Args[2]
  6743  		dst := v.Args[0]
  6744  		src := v.Args[1]
  6745  		mem := v.Args[2]
  6746  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6747  			break
  6748  		}
  6749  		v.reset(OpMIPSMOVHstore)
  6750  		v.AuxInt = 4
  6751  		v.AddArg(dst)
  6752  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6753  		v0.AuxInt = 4
  6754  		v0.AddArg(src)
  6755  		v0.AddArg(mem)
  6756  		v.AddArg(v0)
  6757  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6758  		v1.AuxInt = 2
  6759  		v1.AddArg(dst)
  6760  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6761  		v2.AuxInt = 2
  6762  		v2.AddArg(src)
  6763  		v2.AddArg(mem)
  6764  		v1.AddArg(v2)
  6765  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6766  		v3.AddArg(dst)
  6767  		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6768  		v4.AddArg(src)
  6769  		v4.AddArg(mem)
  6770  		v3.AddArg(v4)
  6771  		v3.AddArg(mem)
  6772  		v1.AddArg(v3)
  6773  		v.AddArg(v1)
  6774  		return true
  6775  	}
  6776  	// match: (Move [12] {t} dst src mem)
  6777  	// cond: t.(*types.Type).Alignment()%4 == 0
  6778  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  6779  	for {
  6780  		if v.AuxInt != 12 {
  6781  			break
  6782  		}
  6783  		t := v.Aux
  6784  		_ = v.Args[2]
  6785  		dst := v.Args[0]
  6786  		src := v.Args[1]
  6787  		mem := v.Args[2]
  6788  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6789  			break
  6790  		}
  6791  		v.reset(OpMIPSMOVWstore)
  6792  		v.AuxInt = 8
  6793  		v.AddArg(dst)
  6794  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6795  		v0.AuxInt = 8
  6796  		v0.AddArg(src)
  6797  		v0.AddArg(mem)
  6798  		v.AddArg(v0)
  6799  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6800  		v1.AuxInt = 4
  6801  		v1.AddArg(dst)
  6802  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6803  		v2.AuxInt = 4
  6804  		v2.AddArg(src)
  6805  		v2.AddArg(mem)
  6806  		v1.AddArg(v2)
  6807  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6808  		v3.AddArg(dst)
  6809  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6810  		v4.AddArg(src)
  6811  		v4.AddArg(mem)
  6812  		v3.AddArg(v4)
  6813  		v3.AddArg(mem)
  6814  		v1.AddArg(v3)
  6815  		v.AddArg(v1)
  6816  		return true
  6817  	}
  6818  	// match: (Move [16] {t} dst src mem)
  6819  	// cond: t.(*types.Type).Alignment()%4 == 0
  6820  	// result: (MOVWstore [12] dst (MOVWload [12] src mem) (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))))
  6821  	for {
  6822  		if v.AuxInt != 16 {
  6823  			break
  6824  		}
  6825  		t := v.Aux
  6826  		_ = v.Args[2]
  6827  		dst := v.Args[0]
  6828  		src := v.Args[1]
  6829  		mem := v.Args[2]
  6830  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6831  			break
  6832  		}
  6833  		v.reset(OpMIPSMOVWstore)
  6834  		v.AuxInt = 12
  6835  		v.AddArg(dst)
  6836  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6837  		v0.AuxInt = 12
  6838  		v0.AddArg(src)
  6839  		v0.AddArg(mem)
  6840  		v.AddArg(v0)
  6841  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6842  		v1.AuxInt = 8
  6843  		v1.AddArg(dst)
  6844  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6845  		v2.AuxInt = 8
  6846  		v2.AddArg(src)
  6847  		v2.AddArg(mem)
  6848  		v1.AddArg(v2)
  6849  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6850  		v3.AuxInt = 4
  6851  		v3.AddArg(dst)
  6852  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6853  		v4.AuxInt = 4
  6854  		v4.AddArg(src)
  6855  		v4.AddArg(mem)
  6856  		v3.AddArg(v4)
  6857  		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6858  		v5.AddArg(dst)
  6859  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6860  		v6.AddArg(src)
  6861  		v6.AddArg(mem)
  6862  		v5.AddArg(v6)
  6863  		v5.AddArg(mem)
  6864  		v3.AddArg(v5)
  6865  		v1.AddArg(v3)
  6866  		v.AddArg(v1)
  6867  		return true
  6868  	}
  6869  	// match: (Move [s] {t} dst src mem)
  6870  	// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
  6871  	// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
  6872  	for {
  6873  		s := v.AuxInt
  6874  		t := v.Aux
  6875  		_ = v.Args[2]
  6876  		dst := v.Args[0]
  6877  		src := v.Args[1]
  6878  		mem := v.Args[2]
  6879  		if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) {
  6880  			break
  6881  		}
  6882  		v.reset(OpMIPSLoweredMove)
  6883  		v.AuxInt = t.(*types.Type).Alignment()
  6884  		v.AddArg(dst)
  6885  		v.AddArg(src)
  6886  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type)
  6887  		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  6888  		v0.AddArg(src)
  6889  		v.AddArg(v0)
  6890  		v.AddArg(mem)
  6891  		return true
  6892  	}
  6893  	return false
  6894  }
  6895  func rewriteValueMIPS_OpMul16_0(v *Value) bool {
  6896  	// match: (Mul16 x y)
  6897  	// cond:
  6898  	// result: (MUL x y)
  6899  	for {
  6900  		_ = v.Args[1]
  6901  		x := v.Args[0]
  6902  		y := v.Args[1]
  6903  		v.reset(OpMIPSMUL)
  6904  		v.AddArg(x)
  6905  		v.AddArg(y)
  6906  		return true
  6907  	}
  6908  }
  6909  func rewriteValueMIPS_OpMul32_0(v *Value) bool {
  6910  	// match: (Mul32 x y)
  6911  	// cond:
  6912  	// result: (MUL x y)
  6913  	for {
  6914  		_ = v.Args[1]
  6915  		x := v.Args[0]
  6916  		y := v.Args[1]
  6917  		v.reset(OpMIPSMUL)
  6918  		v.AddArg(x)
  6919  		v.AddArg(y)
  6920  		return true
  6921  	}
  6922  }
  6923  func rewriteValueMIPS_OpMul32F_0(v *Value) bool {
  6924  	// match: (Mul32F x y)
  6925  	// cond:
  6926  	// result: (MULF x y)
  6927  	for {
  6928  		_ = v.Args[1]
  6929  		x := v.Args[0]
  6930  		y := v.Args[1]
  6931  		v.reset(OpMIPSMULF)
  6932  		v.AddArg(x)
  6933  		v.AddArg(y)
  6934  		return true
  6935  	}
  6936  }
  6937  func rewriteValueMIPS_OpMul32uhilo_0(v *Value) bool {
  6938  	// match: (Mul32uhilo x y)
  6939  	// cond:
  6940  	// result: (MULTU x y)
  6941  	for {
  6942  		_ = v.Args[1]
  6943  		x := v.Args[0]
  6944  		y := v.Args[1]
  6945  		v.reset(OpMIPSMULTU)
  6946  		v.AddArg(x)
  6947  		v.AddArg(y)
  6948  		return true
  6949  	}
  6950  }
  6951  func rewriteValueMIPS_OpMul64F_0(v *Value) bool {
  6952  	// match: (Mul64F x y)
  6953  	// cond:
  6954  	// result: (MULD x y)
  6955  	for {
  6956  		_ = v.Args[1]
  6957  		x := v.Args[0]
  6958  		y := v.Args[1]
  6959  		v.reset(OpMIPSMULD)
  6960  		v.AddArg(x)
  6961  		v.AddArg(y)
  6962  		return true
  6963  	}
  6964  }
  6965  func rewriteValueMIPS_OpMul8_0(v *Value) bool {
  6966  	// match: (Mul8 x y)
  6967  	// cond:
  6968  	// result: (MUL x y)
  6969  	for {
  6970  		_ = v.Args[1]
  6971  		x := v.Args[0]
  6972  		y := v.Args[1]
  6973  		v.reset(OpMIPSMUL)
  6974  		v.AddArg(x)
  6975  		v.AddArg(y)
  6976  		return true
  6977  	}
  6978  }
  6979  func rewriteValueMIPS_OpNeg16_0(v *Value) bool {
  6980  	// match: (Neg16 x)
  6981  	// cond:
  6982  	// result: (NEG x)
  6983  	for {
  6984  		x := v.Args[0]
  6985  		v.reset(OpMIPSNEG)
  6986  		v.AddArg(x)
  6987  		return true
  6988  	}
  6989  }
  6990  func rewriteValueMIPS_OpNeg32_0(v *Value) bool {
  6991  	// match: (Neg32 x)
  6992  	// cond:
  6993  	// result: (NEG x)
  6994  	for {
  6995  		x := v.Args[0]
  6996  		v.reset(OpMIPSNEG)
  6997  		v.AddArg(x)
  6998  		return true
  6999  	}
  7000  }
  7001  func rewriteValueMIPS_OpNeg32F_0(v *Value) bool {
  7002  	// match: (Neg32F x)
  7003  	// cond:
  7004  	// result: (NEGF x)
  7005  	for {
  7006  		x := v.Args[0]
  7007  		v.reset(OpMIPSNEGF)
  7008  		v.AddArg(x)
  7009  		return true
  7010  	}
  7011  }
  7012  func rewriteValueMIPS_OpNeg64F_0(v *Value) bool {
  7013  	// match: (Neg64F x)
  7014  	// cond:
  7015  	// result: (NEGD x)
  7016  	for {
  7017  		x := v.Args[0]
  7018  		v.reset(OpMIPSNEGD)
  7019  		v.AddArg(x)
  7020  		return true
  7021  	}
  7022  }
  7023  func rewriteValueMIPS_OpNeg8_0(v *Value) bool {
  7024  	// match: (Neg8 x)
  7025  	// cond:
  7026  	// result: (NEG x)
  7027  	for {
  7028  		x := v.Args[0]
  7029  		v.reset(OpMIPSNEG)
  7030  		v.AddArg(x)
  7031  		return true
  7032  	}
  7033  }
  7034  func rewriteValueMIPS_OpNeq16_0(v *Value) bool {
  7035  	b := v.Block
  7036  	_ = b
  7037  	typ := &b.Func.Config.Types
  7038  	_ = typ
  7039  	// match: (Neq16 x y)
  7040  	// cond:
  7041  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0]))
  7042  	for {
  7043  		_ = v.Args[1]
  7044  		x := v.Args[0]
  7045  		y := v.Args[1]
  7046  		v.reset(OpMIPSSGTU)
  7047  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7048  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7049  		v1.AddArg(x)
  7050  		v0.AddArg(v1)
  7051  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7052  		v2.AddArg(y)
  7053  		v0.AddArg(v2)
  7054  		v.AddArg(v0)
  7055  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7056  		v3.AuxInt = 0
  7057  		v.AddArg(v3)
  7058  		return true
  7059  	}
  7060  }
  7061  func rewriteValueMIPS_OpNeq32_0(v *Value) bool {
  7062  	b := v.Block
  7063  	_ = b
  7064  	typ := &b.Func.Config.Types
  7065  	_ = typ
  7066  	// match: (Neq32 x y)
  7067  	// cond:
  7068  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  7069  	for {
  7070  		_ = v.Args[1]
  7071  		x := v.Args[0]
  7072  		y := v.Args[1]
  7073  		v.reset(OpMIPSSGTU)
  7074  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7075  		v0.AddArg(x)
  7076  		v0.AddArg(y)
  7077  		v.AddArg(v0)
  7078  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7079  		v1.AuxInt = 0
  7080  		v.AddArg(v1)
  7081  		return true
  7082  	}
  7083  }
  7084  func rewriteValueMIPS_OpNeq32F_0(v *Value) bool {
  7085  	b := v.Block
  7086  	_ = b
  7087  	// match: (Neq32F x y)
  7088  	// cond:
  7089  	// result: (FPFlagFalse (CMPEQF x y))
  7090  	for {
  7091  		_ = v.Args[1]
  7092  		x := v.Args[0]
  7093  		y := v.Args[1]
  7094  		v.reset(OpMIPSFPFlagFalse)
  7095  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags)
  7096  		v0.AddArg(x)
  7097  		v0.AddArg(y)
  7098  		v.AddArg(v0)
  7099  		return true
  7100  	}
  7101  }
  7102  func rewriteValueMIPS_OpNeq64F_0(v *Value) bool {
  7103  	b := v.Block
  7104  	_ = b
  7105  	// match: (Neq64F x y)
  7106  	// cond:
  7107  	// result: (FPFlagFalse (CMPEQD x y))
  7108  	for {
  7109  		_ = v.Args[1]
  7110  		x := v.Args[0]
  7111  		y := v.Args[1]
  7112  		v.reset(OpMIPSFPFlagFalse)
  7113  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags)
  7114  		v0.AddArg(x)
  7115  		v0.AddArg(y)
  7116  		v.AddArg(v0)
  7117  		return true
  7118  	}
  7119  }
  7120  func rewriteValueMIPS_OpNeq8_0(v *Value) bool {
  7121  	b := v.Block
  7122  	_ = b
  7123  	typ := &b.Func.Config.Types
  7124  	_ = typ
  7125  	// match: (Neq8 x y)
  7126  	// cond:
  7127  	// result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0]))
  7128  	for {
  7129  		_ = v.Args[1]
  7130  		x := v.Args[0]
  7131  		y := v.Args[1]
  7132  		v.reset(OpMIPSSGTU)
  7133  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7134  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7135  		v1.AddArg(x)
  7136  		v0.AddArg(v1)
  7137  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7138  		v2.AddArg(y)
  7139  		v0.AddArg(v2)
  7140  		v.AddArg(v0)
  7141  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7142  		v3.AuxInt = 0
  7143  		v.AddArg(v3)
  7144  		return true
  7145  	}
  7146  }
  7147  func rewriteValueMIPS_OpNeqB_0(v *Value) bool {
  7148  	// match: (NeqB x y)
  7149  	// cond:
  7150  	// result: (XOR x y)
  7151  	for {
  7152  		_ = v.Args[1]
  7153  		x := v.Args[0]
  7154  		y := v.Args[1]
  7155  		v.reset(OpMIPSXOR)
  7156  		v.AddArg(x)
  7157  		v.AddArg(y)
  7158  		return true
  7159  	}
  7160  }
  7161  func rewriteValueMIPS_OpNeqPtr_0(v *Value) bool {
  7162  	b := v.Block
  7163  	_ = b
  7164  	typ := &b.Func.Config.Types
  7165  	_ = typ
  7166  	// match: (NeqPtr x y)
  7167  	// cond:
  7168  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  7169  	for {
  7170  		_ = v.Args[1]
  7171  		x := v.Args[0]
  7172  		y := v.Args[1]
  7173  		v.reset(OpMIPSSGTU)
  7174  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7175  		v0.AddArg(x)
  7176  		v0.AddArg(y)
  7177  		v.AddArg(v0)
  7178  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7179  		v1.AuxInt = 0
  7180  		v.AddArg(v1)
  7181  		return true
  7182  	}
  7183  }
  7184  func rewriteValueMIPS_OpNilCheck_0(v *Value) bool {
  7185  	// match: (NilCheck ptr mem)
  7186  	// cond:
  7187  	// result: (LoweredNilCheck ptr mem)
  7188  	for {
  7189  		_ = v.Args[1]
  7190  		ptr := v.Args[0]
  7191  		mem := v.Args[1]
  7192  		v.reset(OpMIPSLoweredNilCheck)
  7193  		v.AddArg(ptr)
  7194  		v.AddArg(mem)
  7195  		return true
  7196  	}
  7197  }
  7198  func rewriteValueMIPS_OpNot_0(v *Value) bool {
  7199  	// match: (Not x)
  7200  	// cond:
  7201  	// result: (XORconst [1] x)
  7202  	for {
  7203  		x := v.Args[0]
  7204  		v.reset(OpMIPSXORconst)
  7205  		v.AuxInt = 1
  7206  		v.AddArg(x)
  7207  		return true
  7208  	}
  7209  }
  7210  func rewriteValueMIPS_OpOffPtr_0(v *Value) bool {
  7211  	// match: (OffPtr [off] ptr:(SP))
  7212  	// cond:
  7213  	// result: (MOVWaddr [off] ptr)
  7214  	for {
  7215  		off := v.AuxInt
  7216  		ptr := v.Args[0]
  7217  		if ptr.Op != OpSP {
  7218  			break
  7219  		}
  7220  		v.reset(OpMIPSMOVWaddr)
  7221  		v.AuxInt = off
  7222  		v.AddArg(ptr)
  7223  		return true
  7224  	}
  7225  	// match: (OffPtr [off] ptr)
  7226  	// cond:
  7227  	// result: (ADDconst [off] ptr)
  7228  	for {
  7229  		off := v.AuxInt
  7230  		ptr := v.Args[0]
  7231  		v.reset(OpMIPSADDconst)
  7232  		v.AuxInt = off
  7233  		v.AddArg(ptr)
  7234  		return true
  7235  	}
  7236  }
  7237  func rewriteValueMIPS_OpOr16_0(v *Value) bool {
  7238  	// match: (Or16 x y)
  7239  	// cond:
  7240  	// result: (OR x y)
  7241  	for {
  7242  		_ = v.Args[1]
  7243  		x := v.Args[0]
  7244  		y := v.Args[1]
  7245  		v.reset(OpMIPSOR)
  7246  		v.AddArg(x)
  7247  		v.AddArg(y)
  7248  		return true
  7249  	}
  7250  }
  7251  func rewriteValueMIPS_OpOr32_0(v *Value) bool {
  7252  	// match: (Or32 x y)
  7253  	// cond:
  7254  	// result: (OR x y)
  7255  	for {
  7256  		_ = v.Args[1]
  7257  		x := v.Args[0]
  7258  		y := v.Args[1]
  7259  		v.reset(OpMIPSOR)
  7260  		v.AddArg(x)
  7261  		v.AddArg(y)
  7262  		return true
  7263  	}
  7264  }
  7265  func rewriteValueMIPS_OpOr8_0(v *Value) bool {
  7266  	// match: (Or8 x y)
  7267  	// cond:
  7268  	// result: (OR x y)
  7269  	for {
  7270  		_ = v.Args[1]
  7271  		x := v.Args[0]
  7272  		y := v.Args[1]
  7273  		v.reset(OpMIPSOR)
  7274  		v.AddArg(x)
  7275  		v.AddArg(y)
  7276  		return true
  7277  	}
  7278  }
  7279  func rewriteValueMIPS_OpOrB_0(v *Value) bool {
  7280  	// match: (OrB x y)
  7281  	// cond:
  7282  	// result: (OR x y)
  7283  	for {
  7284  		_ = v.Args[1]
  7285  		x := v.Args[0]
  7286  		y := v.Args[1]
  7287  		v.reset(OpMIPSOR)
  7288  		v.AddArg(x)
  7289  		v.AddArg(y)
  7290  		return true
  7291  	}
  7292  }
  7293  func rewriteValueMIPS_OpRound32F_0(v *Value) bool {
  7294  	// match: (Round32F x)
  7295  	// cond:
  7296  	// result: x
  7297  	for {
  7298  		x := v.Args[0]
  7299  		v.reset(OpCopy)
  7300  		v.Type = x.Type
  7301  		v.AddArg(x)
  7302  		return true
  7303  	}
  7304  }
  7305  func rewriteValueMIPS_OpRound64F_0(v *Value) bool {
  7306  	// match: (Round64F x)
  7307  	// cond:
  7308  	// result: x
  7309  	for {
  7310  		x := v.Args[0]
  7311  		v.reset(OpCopy)
  7312  		v.Type = x.Type
  7313  		v.AddArg(x)
  7314  		return true
  7315  	}
  7316  }
  7317  func rewriteValueMIPS_OpRsh16Ux16_0(v *Value) bool {
  7318  	b := v.Block
  7319  	_ = b
  7320  	typ := &b.Func.Config.Types
  7321  	_ = typ
  7322  	// match: (Rsh16Ux16 <t> x y)
  7323  	// cond:
  7324  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7325  	for {
  7326  		t := v.Type
  7327  		_ = v.Args[1]
  7328  		x := v.Args[0]
  7329  		y := v.Args[1]
  7330  		v.reset(OpMIPSCMOVZ)
  7331  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7332  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7333  		v1.AddArg(x)
  7334  		v0.AddArg(v1)
  7335  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7336  		v2.AddArg(y)
  7337  		v0.AddArg(v2)
  7338  		v.AddArg(v0)
  7339  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7340  		v3.AuxInt = 0
  7341  		v.AddArg(v3)
  7342  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7343  		v4.AuxInt = 32
  7344  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7345  		v5.AddArg(y)
  7346  		v4.AddArg(v5)
  7347  		v.AddArg(v4)
  7348  		return true
  7349  	}
  7350  }
  7351  func rewriteValueMIPS_OpRsh16Ux32_0(v *Value) bool {
  7352  	b := v.Block
  7353  	_ = b
  7354  	typ := &b.Func.Config.Types
  7355  	_ = typ
  7356  	// match: (Rsh16Ux32 <t> x y)
  7357  	// cond:
  7358  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7359  	for {
  7360  		t := v.Type
  7361  		_ = v.Args[1]
  7362  		x := v.Args[0]
  7363  		y := v.Args[1]
  7364  		v.reset(OpMIPSCMOVZ)
  7365  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7366  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7367  		v1.AddArg(x)
  7368  		v0.AddArg(v1)
  7369  		v0.AddArg(y)
  7370  		v.AddArg(v0)
  7371  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7372  		v2.AuxInt = 0
  7373  		v.AddArg(v2)
  7374  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7375  		v3.AuxInt = 32
  7376  		v3.AddArg(y)
  7377  		v.AddArg(v3)
  7378  		return true
  7379  	}
  7380  }
  7381  func rewriteValueMIPS_OpRsh16Ux64_0(v *Value) bool {
  7382  	b := v.Block
  7383  	_ = b
  7384  	typ := &b.Func.Config.Types
  7385  	_ = typ
  7386  	// match: (Rsh16Ux64 x (Const64 [c]))
  7387  	// cond: uint32(c) < 16
  7388  	// result: (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16])
  7389  	for {
  7390  		_ = v.Args[1]
  7391  		x := v.Args[0]
  7392  		v_1 := v.Args[1]
  7393  		if v_1.Op != OpConst64 {
  7394  			break
  7395  		}
  7396  		c := v_1.AuxInt
  7397  		if !(uint32(c) < 16) {
  7398  			break
  7399  		}
  7400  		v.reset(OpMIPSSRLconst)
  7401  		v.AuxInt = c + 16
  7402  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7403  		v0.AuxInt = 16
  7404  		v0.AddArg(x)
  7405  		v.AddArg(v0)
  7406  		return true
  7407  	}
  7408  	// match: (Rsh16Ux64 _ (Const64 [c]))
  7409  	// cond: uint32(c) >= 16
  7410  	// result: (MOVWconst [0])
  7411  	for {
  7412  		_ = v.Args[1]
  7413  		v_1 := v.Args[1]
  7414  		if v_1.Op != OpConst64 {
  7415  			break
  7416  		}
  7417  		c := v_1.AuxInt
  7418  		if !(uint32(c) >= 16) {
  7419  			break
  7420  		}
  7421  		v.reset(OpMIPSMOVWconst)
  7422  		v.AuxInt = 0
  7423  		return true
  7424  	}
  7425  	return false
  7426  }
  7427  func rewriteValueMIPS_OpRsh16Ux8_0(v *Value) bool {
  7428  	b := v.Block
  7429  	_ = b
  7430  	typ := &b.Func.Config.Types
  7431  	_ = typ
  7432  	// match: (Rsh16Ux8 <t> x y)
  7433  	// cond:
  7434  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7435  	for {
  7436  		t := v.Type
  7437  		_ = v.Args[1]
  7438  		x := v.Args[0]
  7439  		y := v.Args[1]
  7440  		v.reset(OpMIPSCMOVZ)
  7441  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7442  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7443  		v1.AddArg(x)
  7444  		v0.AddArg(v1)
  7445  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7446  		v2.AddArg(y)
  7447  		v0.AddArg(v2)
  7448  		v.AddArg(v0)
  7449  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7450  		v3.AuxInt = 0
  7451  		v.AddArg(v3)
  7452  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7453  		v4.AuxInt = 32
  7454  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7455  		v5.AddArg(y)
  7456  		v4.AddArg(v5)
  7457  		v.AddArg(v4)
  7458  		return true
  7459  	}
  7460  }
  7461  func rewriteValueMIPS_OpRsh16x16_0(v *Value) bool {
  7462  	b := v.Block
  7463  	_ = b
  7464  	typ := &b.Func.Config.Types
  7465  	_ = typ
  7466  	// match: (Rsh16x16 x y)
  7467  	// cond:
  7468  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7469  	for {
  7470  		_ = v.Args[1]
  7471  		x := v.Args[0]
  7472  		y := v.Args[1]
  7473  		v.reset(OpMIPSSRA)
  7474  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7475  		v0.AddArg(x)
  7476  		v.AddArg(v0)
  7477  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7478  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7479  		v2.AddArg(y)
  7480  		v1.AddArg(v2)
  7481  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7482  		v3.AuxInt = -1
  7483  		v1.AddArg(v3)
  7484  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7485  		v4.AuxInt = 32
  7486  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7487  		v5.AddArg(y)
  7488  		v4.AddArg(v5)
  7489  		v1.AddArg(v4)
  7490  		v.AddArg(v1)
  7491  		return true
  7492  	}
  7493  }
  7494  func rewriteValueMIPS_OpRsh16x32_0(v *Value) bool {
  7495  	b := v.Block
  7496  	_ = b
  7497  	typ := &b.Func.Config.Types
  7498  	_ = typ
  7499  	// match: (Rsh16x32 x y)
  7500  	// cond:
  7501  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7502  	for {
  7503  		_ = v.Args[1]
  7504  		x := v.Args[0]
  7505  		y := v.Args[1]
  7506  		v.reset(OpMIPSSRA)
  7507  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7508  		v0.AddArg(x)
  7509  		v.AddArg(v0)
  7510  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7511  		v1.AddArg(y)
  7512  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7513  		v2.AuxInt = -1
  7514  		v1.AddArg(v2)
  7515  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7516  		v3.AuxInt = 32
  7517  		v3.AddArg(y)
  7518  		v1.AddArg(v3)
  7519  		v.AddArg(v1)
  7520  		return true
  7521  	}
  7522  }
  7523  func rewriteValueMIPS_OpRsh16x64_0(v *Value) bool {
  7524  	b := v.Block
  7525  	_ = b
  7526  	typ := &b.Func.Config.Types
  7527  	_ = typ
  7528  	// match: (Rsh16x64 x (Const64 [c]))
  7529  	// cond: uint32(c) < 16
  7530  	// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16])
  7531  	for {
  7532  		_ = v.Args[1]
  7533  		x := v.Args[0]
  7534  		v_1 := v.Args[1]
  7535  		if v_1.Op != OpConst64 {
  7536  			break
  7537  		}
  7538  		c := v_1.AuxInt
  7539  		if !(uint32(c) < 16) {
  7540  			break
  7541  		}
  7542  		v.reset(OpMIPSSRAconst)
  7543  		v.AuxInt = c + 16
  7544  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7545  		v0.AuxInt = 16
  7546  		v0.AddArg(x)
  7547  		v.AddArg(v0)
  7548  		return true
  7549  	}
  7550  	// match: (Rsh16x64 x (Const64 [c]))
  7551  	// cond: uint32(c) >= 16
  7552  	// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [31])
  7553  	for {
  7554  		_ = v.Args[1]
  7555  		x := v.Args[0]
  7556  		v_1 := v.Args[1]
  7557  		if v_1.Op != OpConst64 {
  7558  			break
  7559  		}
  7560  		c := v_1.AuxInt
  7561  		if !(uint32(c) >= 16) {
  7562  			break
  7563  		}
  7564  		v.reset(OpMIPSSRAconst)
  7565  		v.AuxInt = 31
  7566  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7567  		v0.AuxInt = 16
  7568  		v0.AddArg(x)
  7569  		v.AddArg(v0)
  7570  		return true
  7571  	}
  7572  	return false
  7573  }
  7574  func rewriteValueMIPS_OpRsh16x8_0(v *Value) bool {
  7575  	b := v.Block
  7576  	_ = b
  7577  	typ := &b.Func.Config.Types
  7578  	_ = typ
  7579  	// match: (Rsh16x8 x y)
  7580  	// cond:
  7581  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7582  	for {
  7583  		_ = v.Args[1]
  7584  		x := v.Args[0]
  7585  		y := v.Args[1]
  7586  		v.reset(OpMIPSSRA)
  7587  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7588  		v0.AddArg(x)
  7589  		v.AddArg(v0)
  7590  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7591  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7592  		v2.AddArg(y)
  7593  		v1.AddArg(v2)
  7594  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7595  		v3.AuxInt = -1
  7596  		v1.AddArg(v3)
  7597  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7598  		v4.AuxInt = 32
  7599  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7600  		v5.AddArg(y)
  7601  		v4.AddArg(v5)
  7602  		v1.AddArg(v4)
  7603  		v.AddArg(v1)
  7604  		return true
  7605  	}
  7606  }
  7607  func rewriteValueMIPS_OpRsh32Ux16_0(v *Value) bool {
  7608  	b := v.Block
  7609  	_ = b
  7610  	typ := &b.Func.Config.Types
  7611  	_ = typ
  7612  	// match: (Rsh32Ux16 <t> x y)
  7613  	// cond:
  7614  	// result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7615  	for {
  7616  		t := v.Type
  7617  		_ = v.Args[1]
  7618  		x := v.Args[0]
  7619  		y := v.Args[1]
  7620  		v.reset(OpMIPSCMOVZ)
  7621  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7622  		v0.AddArg(x)
  7623  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7624  		v1.AddArg(y)
  7625  		v0.AddArg(v1)
  7626  		v.AddArg(v0)
  7627  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7628  		v2.AuxInt = 0
  7629  		v.AddArg(v2)
  7630  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7631  		v3.AuxInt = 32
  7632  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7633  		v4.AddArg(y)
  7634  		v3.AddArg(v4)
  7635  		v.AddArg(v3)
  7636  		return true
  7637  	}
  7638  }
  7639  func rewriteValueMIPS_OpRsh32Ux32_0(v *Value) bool {
  7640  	b := v.Block
  7641  	_ = b
  7642  	typ := &b.Func.Config.Types
  7643  	_ = typ
  7644  	// match: (Rsh32Ux32 <t> x y)
  7645  	// cond:
  7646  	// result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  7647  	for {
  7648  		t := v.Type
  7649  		_ = v.Args[1]
  7650  		x := v.Args[0]
  7651  		y := v.Args[1]
  7652  		v.reset(OpMIPSCMOVZ)
  7653  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7654  		v0.AddArg(x)
  7655  		v0.AddArg(y)
  7656  		v.AddArg(v0)
  7657  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7658  		v1.AuxInt = 0
  7659  		v.AddArg(v1)
  7660  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7661  		v2.AuxInt = 32
  7662  		v2.AddArg(y)
  7663  		v.AddArg(v2)
  7664  		return true
  7665  	}
  7666  }
  7667  func rewriteValueMIPS_OpRsh32Ux64_0(v *Value) bool {
  7668  	// match: (Rsh32Ux64 x (Const64 [c]))
  7669  	// cond: uint32(c) < 32
  7670  	// result: (SRLconst x [c])
  7671  	for {
  7672  		_ = v.Args[1]
  7673  		x := v.Args[0]
  7674  		v_1 := v.Args[1]
  7675  		if v_1.Op != OpConst64 {
  7676  			break
  7677  		}
  7678  		c := v_1.AuxInt
  7679  		if !(uint32(c) < 32) {
  7680  			break
  7681  		}
  7682  		v.reset(OpMIPSSRLconst)
  7683  		v.AuxInt = c
  7684  		v.AddArg(x)
  7685  		return true
  7686  	}
  7687  	// match: (Rsh32Ux64 _ (Const64 [c]))
  7688  	// cond: uint32(c) >= 32
  7689  	// result: (MOVWconst [0])
  7690  	for {
  7691  		_ = v.Args[1]
  7692  		v_1 := v.Args[1]
  7693  		if v_1.Op != OpConst64 {
  7694  			break
  7695  		}
  7696  		c := v_1.AuxInt
  7697  		if !(uint32(c) >= 32) {
  7698  			break
  7699  		}
  7700  		v.reset(OpMIPSMOVWconst)
  7701  		v.AuxInt = 0
  7702  		return true
  7703  	}
  7704  	return false
  7705  }
  7706  func rewriteValueMIPS_OpRsh32Ux8_0(v *Value) bool {
  7707  	b := v.Block
  7708  	_ = b
  7709  	typ := &b.Func.Config.Types
  7710  	_ = typ
  7711  	// match: (Rsh32Ux8 <t> x y)
  7712  	// cond:
  7713  	// result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7714  	for {
  7715  		t := v.Type
  7716  		_ = v.Args[1]
  7717  		x := v.Args[0]
  7718  		y := v.Args[1]
  7719  		v.reset(OpMIPSCMOVZ)
  7720  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7721  		v0.AddArg(x)
  7722  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7723  		v1.AddArg(y)
  7724  		v0.AddArg(v1)
  7725  		v.AddArg(v0)
  7726  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7727  		v2.AuxInt = 0
  7728  		v.AddArg(v2)
  7729  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7730  		v3.AuxInt = 32
  7731  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7732  		v4.AddArg(y)
  7733  		v3.AddArg(v4)
  7734  		v.AddArg(v3)
  7735  		return true
  7736  	}
  7737  }
  7738  func rewriteValueMIPS_OpRsh32x16_0(v *Value) bool {
  7739  	b := v.Block
  7740  	_ = b
  7741  	typ := &b.Func.Config.Types
  7742  	_ = typ
  7743  	// match: (Rsh32x16 x y)
  7744  	// cond:
  7745  	// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7746  	for {
  7747  		_ = v.Args[1]
  7748  		x := v.Args[0]
  7749  		y := v.Args[1]
  7750  		v.reset(OpMIPSSRA)
  7751  		v.AddArg(x)
  7752  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7753  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7754  		v1.AddArg(y)
  7755  		v0.AddArg(v1)
  7756  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7757  		v2.AuxInt = -1
  7758  		v0.AddArg(v2)
  7759  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7760  		v3.AuxInt = 32
  7761  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7762  		v4.AddArg(y)
  7763  		v3.AddArg(v4)
  7764  		v0.AddArg(v3)
  7765  		v.AddArg(v0)
  7766  		return true
  7767  	}
  7768  }
  7769  func rewriteValueMIPS_OpRsh32x32_0(v *Value) bool {
  7770  	b := v.Block
  7771  	_ = b
  7772  	typ := &b.Func.Config.Types
  7773  	_ = typ
  7774  	// match: (Rsh32x32 x y)
  7775  	// cond:
  7776  	// result: (SRA x ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7777  	for {
  7778  		_ = v.Args[1]
  7779  		x := v.Args[0]
  7780  		y := v.Args[1]
  7781  		v.reset(OpMIPSSRA)
  7782  		v.AddArg(x)
  7783  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7784  		v0.AddArg(y)
  7785  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7786  		v1.AuxInt = -1
  7787  		v0.AddArg(v1)
  7788  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7789  		v2.AuxInt = 32
  7790  		v2.AddArg(y)
  7791  		v0.AddArg(v2)
  7792  		v.AddArg(v0)
  7793  		return true
  7794  	}
  7795  }
  7796  func rewriteValueMIPS_OpRsh32x64_0(v *Value) bool {
  7797  	// match: (Rsh32x64 x (Const64 [c]))
  7798  	// cond: uint32(c) < 32
  7799  	// result: (SRAconst x [c])
  7800  	for {
  7801  		_ = v.Args[1]
  7802  		x := v.Args[0]
  7803  		v_1 := v.Args[1]
  7804  		if v_1.Op != OpConst64 {
  7805  			break
  7806  		}
  7807  		c := v_1.AuxInt
  7808  		if !(uint32(c) < 32) {
  7809  			break
  7810  		}
  7811  		v.reset(OpMIPSSRAconst)
  7812  		v.AuxInt = c
  7813  		v.AddArg(x)
  7814  		return true
  7815  	}
  7816  	// match: (Rsh32x64 x (Const64 [c]))
  7817  	// cond: uint32(c) >= 32
  7818  	// result: (SRAconst x [31])
  7819  	for {
  7820  		_ = v.Args[1]
  7821  		x := v.Args[0]
  7822  		v_1 := v.Args[1]
  7823  		if v_1.Op != OpConst64 {
  7824  			break
  7825  		}
  7826  		c := v_1.AuxInt
  7827  		if !(uint32(c) >= 32) {
  7828  			break
  7829  		}
  7830  		v.reset(OpMIPSSRAconst)
  7831  		v.AuxInt = 31
  7832  		v.AddArg(x)
  7833  		return true
  7834  	}
  7835  	return false
  7836  }
  7837  func rewriteValueMIPS_OpRsh32x8_0(v *Value) bool {
  7838  	b := v.Block
  7839  	_ = b
  7840  	typ := &b.Func.Config.Types
  7841  	_ = typ
  7842  	// match: (Rsh32x8 x y)
  7843  	// cond:
  7844  	// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7845  	for {
  7846  		_ = v.Args[1]
  7847  		x := v.Args[0]
  7848  		y := v.Args[1]
  7849  		v.reset(OpMIPSSRA)
  7850  		v.AddArg(x)
  7851  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7852  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7853  		v1.AddArg(y)
  7854  		v0.AddArg(v1)
  7855  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7856  		v2.AuxInt = -1
  7857  		v0.AddArg(v2)
  7858  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7859  		v3.AuxInt = 32
  7860  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7861  		v4.AddArg(y)
  7862  		v3.AddArg(v4)
  7863  		v0.AddArg(v3)
  7864  		v.AddArg(v0)
  7865  		return true
  7866  	}
  7867  }
  7868  func rewriteValueMIPS_OpRsh8Ux16_0(v *Value) bool {
  7869  	b := v.Block
  7870  	_ = b
  7871  	typ := &b.Func.Config.Types
  7872  	_ = typ
  7873  	// match: (Rsh8Ux16 <t> x y)
  7874  	// cond:
  7875  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7876  	for {
  7877  		t := v.Type
  7878  		_ = v.Args[1]
  7879  		x := v.Args[0]
  7880  		y := v.Args[1]
  7881  		v.reset(OpMIPSCMOVZ)
  7882  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7883  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7884  		v1.AddArg(x)
  7885  		v0.AddArg(v1)
  7886  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7887  		v2.AddArg(y)
  7888  		v0.AddArg(v2)
  7889  		v.AddArg(v0)
  7890  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7891  		v3.AuxInt = 0
  7892  		v.AddArg(v3)
  7893  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7894  		v4.AuxInt = 32
  7895  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7896  		v5.AddArg(y)
  7897  		v4.AddArg(v5)
  7898  		v.AddArg(v4)
  7899  		return true
  7900  	}
  7901  }
  7902  func rewriteValueMIPS_OpRsh8Ux32_0(v *Value) bool {
  7903  	b := v.Block
  7904  	_ = b
  7905  	typ := &b.Func.Config.Types
  7906  	_ = typ
  7907  	// match: (Rsh8Ux32 <t> x y)
  7908  	// cond:
  7909  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7910  	for {
  7911  		t := v.Type
  7912  		_ = v.Args[1]
  7913  		x := v.Args[0]
  7914  		y := v.Args[1]
  7915  		v.reset(OpMIPSCMOVZ)
  7916  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7917  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7918  		v1.AddArg(x)
  7919  		v0.AddArg(v1)
  7920  		v0.AddArg(y)
  7921  		v.AddArg(v0)
  7922  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7923  		v2.AuxInt = 0
  7924  		v.AddArg(v2)
  7925  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7926  		v3.AuxInt = 32
  7927  		v3.AddArg(y)
  7928  		v.AddArg(v3)
  7929  		return true
  7930  	}
  7931  }
  7932  func rewriteValueMIPS_OpRsh8Ux64_0(v *Value) bool {
  7933  	b := v.Block
  7934  	_ = b
  7935  	typ := &b.Func.Config.Types
  7936  	_ = typ
  7937  	// match: (Rsh8Ux64 x (Const64 [c]))
  7938  	// cond: uint32(c) < 8
  7939  	// result: (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24])
  7940  	for {
  7941  		_ = v.Args[1]
  7942  		x := v.Args[0]
  7943  		v_1 := v.Args[1]
  7944  		if v_1.Op != OpConst64 {
  7945  			break
  7946  		}
  7947  		c := v_1.AuxInt
  7948  		if !(uint32(c) < 8) {
  7949  			break
  7950  		}
  7951  		v.reset(OpMIPSSRLconst)
  7952  		v.AuxInt = c + 24
  7953  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7954  		v0.AuxInt = 24
  7955  		v0.AddArg(x)
  7956  		v.AddArg(v0)
  7957  		return true
  7958  	}
  7959  	// match: (Rsh8Ux64 _ (Const64 [c]))
  7960  	// cond: uint32(c) >= 8
  7961  	// result: (MOVWconst [0])
  7962  	for {
  7963  		_ = v.Args[1]
  7964  		v_1 := v.Args[1]
  7965  		if v_1.Op != OpConst64 {
  7966  			break
  7967  		}
  7968  		c := v_1.AuxInt
  7969  		if !(uint32(c) >= 8) {
  7970  			break
  7971  		}
  7972  		v.reset(OpMIPSMOVWconst)
  7973  		v.AuxInt = 0
  7974  		return true
  7975  	}
  7976  	return false
  7977  }
  7978  func rewriteValueMIPS_OpRsh8Ux8_0(v *Value) bool {
  7979  	b := v.Block
  7980  	_ = b
  7981  	typ := &b.Func.Config.Types
  7982  	_ = typ
  7983  	// match: (Rsh8Ux8 <t> x y)
  7984  	// cond:
  7985  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7986  	for {
  7987  		t := v.Type
  7988  		_ = v.Args[1]
  7989  		x := v.Args[0]
  7990  		y := v.Args[1]
  7991  		v.reset(OpMIPSCMOVZ)
  7992  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7993  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7994  		v1.AddArg(x)
  7995  		v0.AddArg(v1)
  7996  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7997  		v2.AddArg(y)
  7998  		v0.AddArg(v2)
  7999  		v.AddArg(v0)
  8000  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8001  		v3.AuxInt = 0
  8002  		v.AddArg(v3)
  8003  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8004  		v4.AuxInt = 32
  8005  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8006  		v5.AddArg(y)
  8007  		v4.AddArg(v5)
  8008  		v.AddArg(v4)
  8009  		return true
  8010  	}
  8011  }
  8012  func rewriteValueMIPS_OpRsh8x16_0(v *Value) bool {
  8013  	b := v.Block
  8014  	_ = b
  8015  	typ := &b.Func.Config.Types
  8016  	_ = typ
  8017  	// match: (Rsh8x16 x y)
  8018  	// cond:
  8019  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  8020  	for {
  8021  		_ = v.Args[1]
  8022  		x := v.Args[0]
  8023  		y := v.Args[1]
  8024  		v.reset(OpMIPSSRA)
  8025  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  8026  		v0.AddArg(x)
  8027  		v.AddArg(v0)
  8028  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  8029  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  8030  		v2.AddArg(y)
  8031  		v1.AddArg(v2)
  8032  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8033  		v3.AuxInt = -1
  8034  		v1.AddArg(v3)
  8035  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8036  		v4.AuxInt = 32
  8037  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  8038  		v5.AddArg(y)
  8039  		v4.AddArg(v5)
  8040  		v1.AddArg(v4)
  8041  		v.AddArg(v1)
  8042  		return true
  8043  	}
  8044  }
  8045  func rewriteValueMIPS_OpRsh8x32_0(v *Value) bool {
  8046  	b := v.Block
  8047  	_ = b
  8048  	typ := &b.Func.Config.Types
  8049  	_ = typ
  8050  	// match: (Rsh8x32 x y)
  8051  	// cond:
  8052  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  8053  	for {
  8054  		_ = v.Args[1]
  8055  		x := v.Args[0]
  8056  		y := v.Args[1]
  8057  		v.reset(OpMIPSSRA)
  8058  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  8059  		v0.AddArg(x)
  8060  		v.AddArg(v0)
  8061  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  8062  		v1.AddArg(y)
  8063  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8064  		v2.AuxInt = -1
  8065  		v1.AddArg(v2)
  8066  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8067  		v3.AuxInt = 32
  8068  		v3.AddArg(y)
  8069  		v1.AddArg(v3)
  8070  		v.AddArg(v1)
  8071  		return true
  8072  	}
  8073  }
  8074  func rewriteValueMIPS_OpRsh8x64_0(v *Value) bool {
  8075  	b := v.Block
  8076  	_ = b
  8077  	typ := &b.Func.Config.Types
  8078  	_ = typ
  8079  	// match: (Rsh8x64 x (Const64 [c]))
  8080  	// cond: uint32(c) < 8
  8081  	// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24])
  8082  	for {
  8083  		_ = v.Args[1]
  8084  		x := v.Args[0]
  8085  		v_1 := v.Args[1]
  8086  		if v_1.Op != OpConst64 {
  8087  			break
  8088  		}
  8089  		c := v_1.AuxInt
  8090  		if !(uint32(c) < 8) {
  8091  			break
  8092  		}
  8093  		v.reset(OpMIPSSRAconst)
  8094  		v.AuxInt = c + 24
  8095  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  8096  		v0.AuxInt = 24
  8097  		v0.AddArg(x)
  8098  		v.AddArg(v0)
  8099  		return true
  8100  	}
  8101  	// match: (Rsh8x64 x (Const64 [c]))
  8102  	// cond: uint32(c) >= 8
  8103  	// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [31])
  8104  	for {
  8105  		_ = v.Args[1]
  8106  		x := v.Args[0]
  8107  		v_1 := v.Args[1]
  8108  		if v_1.Op != OpConst64 {
  8109  			break
  8110  		}
  8111  		c := v_1.AuxInt
  8112  		if !(uint32(c) >= 8) {
  8113  			break
  8114  		}
  8115  		v.reset(OpMIPSSRAconst)
  8116  		v.AuxInt = 31
  8117  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  8118  		v0.AuxInt = 24
  8119  		v0.AddArg(x)
  8120  		v.AddArg(v0)
  8121  		return true
  8122  	}
  8123  	return false
  8124  }
  8125  func rewriteValueMIPS_OpRsh8x8_0(v *Value) bool {
  8126  	b := v.Block
  8127  	_ = b
  8128  	typ := &b.Func.Config.Types
  8129  	_ = typ
  8130  	// match: (Rsh8x8 x y)
  8131  	// cond:
  8132  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  8133  	for {
  8134  		_ = v.Args[1]
  8135  		x := v.Args[0]
  8136  		y := v.Args[1]
  8137  		v.reset(OpMIPSSRA)
  8138  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  8139  		v0.AddArg(x)
  8140  		v.AddArg(v0)
  8141  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  8142  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8143  		v2.AddArg(y)
  8144  		v1.AddArg(v2)
  8145  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8146  		v3.AuxInt = -1
  8147  		v1.AddArg(v3)
  8148  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8149  		v4.AuxInt = 32
  8150  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8151  		v5.AddArg(y)
  8152  		v4.AddArg(v5)
  8153  		v1.AddArg(v4)
  8154  		v.AddArg(v1)
  8155  		return true
  8156  	}
  8157  }
  8158  func rewriteValueMIPS_OpSelect0_0(v *Value) bool {
  8159  	b := v.Block
  8160  	_ = b
  8161  	typ := &b.Func.Config.Types
  8162  	_ = typ
  8163  	// match: (Select0 (Add32carry <t> x y))
  8164  	// cond:
  8165  	// result: (ADD <t.FieldType(0)> x y)
  8166  	for {
  8167  		v_0 := v.Args[0]
  8168  		if v_0.Op != OpAdd32carry {
  8169  			break
  8170  		}
  8171  		t := v_0.Type
  8172  		_ = v_0.Args[1]
  8173  		x := v_0.Args[0]
  8174  		y := v_0.Args[1]
  8175  		v.reset(OpMIPSADD)
  8176  		v.Type = t.FieldType(0)
  8177  		v.AddArg(x)
  8178  		v.AddArg(y)
  8179  		return true
  8180  	}
  8181  	// match: (Select0 (Sub32carry <t> x y))
  8182  	// cond:
  8183  	// result: (SUB <t.FieldType(0)> x y)
  8184  	for {
  8185  		v_0 := v.Args[0]
  8186  		if v_0.Op != OpSub32carry {
  8187  			break
  8188  		}
  8189  		t := v_0.Type
  8190  		_ = v_0.Args[1]
  8191  		x := v_0.Args[0]
  8192  		y := v_0.Args[1]
  8193  		v.reset(OpMIPSSUB)
  8194  		v.Type = t.FieldType(0)
  8195  		v.AddArg(x)
  8196  		v.AddArg(y)
  8197  		return true
  8198  	}
  8199  	// match: (Select0 (MULTU (MOVWconst [0]) _))
  8200  	// cond:
  8201  	// result: (MOVWconst [0])
  8202  	for {
  8203  		v_0 := v.Args[0]
  8204  		if v_0.Op != OpMIPSMULTU {
  8205  			break
  8206  		}
  8207  		_ = v_0.Args[1]
  8208  		v_0_0 := v_0.Args[0]
  8209  		if v_0_0.Op != OpMIPSMOVWconst {
  8210  			break
  8211  		}
  8212  		if v_0_0.AuxInt != 0 {
  8213  			break
  8214  		}
  8215  		v.reset(OpMIPSMOVWconst)
  8216  		v.AuxInt = 0
  8217  		return true
  8218  	}
  8219  	// match: (Select0 (MULTU _ (MOVWconst [0])))
  8220  	// cond:
  8221  	// result: (MOVWconst [0])
  8222  	for {
  8223  		v_0 := v.Args[0]
  8224  		if v_0.Op != OpMIPSMULTU {
  8225  			break
  8226  		}
  8227  		_ = v_0.Args[1]
  8228  		v_0_1 := v_0.Args[1]
  8229  		if v_0_1.Op != OpMIPSMOVWconst {
  8230  			break
  8231  		}
  8232  		if v_0_1.AuxInt != 0 {
  8233  			break
  8234  		}
  8235  		v.reset(OpMIPSMOVWconst)
  8236  		v.AuxInt = 0
  8237  		return true
  8238  	}
  8239  	// match: (Select0 (MULTU (MOVWconst [1]) _))
  8240  	// cond:
  8241  	// result: (MOVWconst [0])
  8242  	for {
  8243  		v_0 := v.Args[0]
  8244  		if v_0.Op != OpMIPSMULTU {
  8245  			break
  8246  		}
  8247  		_ = v_0.Args[1]
  8248  		v_0_0 := v_0.Args[0]
  8249  		if v_0_0.Op != OpMIPSMOVWconst {
  8250  			break
  8251  		}
  8252  		if v_0_0.AuxInt != 1 {
  8253  			break
  8254  		}
  8255  		v.reset(OpMIPSMOVWconst)
  8256  		v.AuxInt = 0
  8257  		return true
  8258  	}
  8259  	// match: (Select0 (MULTU _ (MOVWconst [1])))
  8260  	// cond:
  8261  	// result: (MOVWconst [0])
  8262  	for {
  8263  		v_0 := v.Args[0]
  8264  		if v_0.Op != OpMIPSMULTU {
  8265  			break
  8266  		}
  8267  		_ = v_0.Args[1]
  8268  		v_0_1 := v_0.Args[1]
  8269  		if v_0_1.Op != OpMIPSMOVWconst {
  8270  			break
  8271  		}
  8272  		if v_0_1.AuxInt != 1 {
  8273  			break
  8274  		}
  8275  		v.reset(OpMIPSMOVWconst)
  8276  		v.AuxInt = 0
  8277  		return true
  8278  	}
  8279  	// match: (Select0 (MULTU (MOVWconst [-1]) x))
  8280  	// cond:
  8281  	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  8282  	for {
  8283  		v_0 := v.Args[0]
  8284  		if v_0.Op != OpMIPSMULTU {
  8285  			break
  8286  		}
  8287  		_ = v_0.Args[1]
  8288  		v_0_0 := v_0.Args[0]
  8289  		if v_0_0.Op != OpMIPSMOVWconst {
  8290  			break
  8291  		}
  8292  		if v_0_0.AuxInt != -1 {
  8293  			break
  8294  		}
  8295  		x := v_0.Args[1]
  8296  		v.reset(OpMIPSCMOVZ)
  8297  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  8298  		v0.AuxInt = -1
  8299  		v0.AddArg(x)
  8300  		v.AddArg(v0)
  8301  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8302  		v1.AuxInt = 0
  8303  		v.AddArg(v1)
  8304  		v.AddArg(x)
  8305  		return true
  8306  	}
  8307  	// match: (Select0 (MULTU x (MOVWconst [-1])))
  8308  	// cond:
  8309  	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  8310  	for {
  8311  		v_0 := v.Args[0]
  8312  		if v_0.Op != OpMIPSMULTU {
  8313  			break
  8314  		}
  8315  		_ = v_0.Args[1]
  8316  		x := v_0.Args[0]
  8317  		v_0_1 := v_0.Args[1]
  8318  		if v_0_1.Op != OpMIPSMOVWconst {
  8319  			break
  8320  		}
  8321  		if v_0_1.AuxInt != -1 {
  8322  			break
  8323  		}
  8324  		v.reset(OpMIPSCMOVZ)
  8325  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  8326  		v0.AuxInt = -1
  8327  		v0.AddArg(x)
  8328  		v.AddArg(v0)
  8329  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8330  		v1.AuxInt = 0
  8331  		v.AddArg(v1)
  8332  		v.AddArg(x)
  8333  		return true
  8334  	}
  8335  	// match: (Select0 (MULTU (MOVWconst [c]) x))
  8336  	// cond: isPowerOfTwo(int64(uint32(c)))
  8337  	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  8338  	for {
  8339  		v_0 := v.Args[0]
  8340  		if v_0.Op != OpMIPSMULTU {
  8341  			break
  8342  		}
  8343  		_ = v_0.Args[1]
  8344  		v_0_0 := v_0.Args[0]
  8345  		if v_0_0.Op != OpMIPSMOVWconst {
  8346  			break
  8347  		}
  8348  		c := v_0_0.AuxInt
  8349  		x := v_0.Args[1]
  8350  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8351  			break
  8352  		}
  8353  		v.reset(OpMIPSSRLconst)
  8354  		v.AuxInt = 32 - log2(int64(uint32(c)))
  8355  		v.AddArg(x)
  8356  		return true
  8357  	}
  8358  	// match: (Select0 (MULTU x (MOVWconst [c])))
  8359  	// cond: isPowerOfTwo(int64(uint32(c)))
  8360  	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  8361  	for {
  8362  		v_0 := v.Args[0]
  8363  		if v_0.Op != OpMIPSMULTU {
  8364  			break
  8365  		}
  8366  		_ = v_0.Args[1]
  8367  		x := v_0.Args[0]
  8368  		v_0_1 := v_0.Args[1]
  8369  		if v_0_1.Op != OpMIPSMOVWconst {
  8370  			break
  8371  		}
  8372  		c := v_0_1.AuxInt
  8373  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8374  			break
  8375  		}
  8376  		v.reset(OpMIPSSRLconst)
  8377  		v.AuxInt = 32 - log2(int64(uint32(c)))
  8378  		v.AddArg(x)
  8379  		return true
  8380  	}
  8381  	return false
  8382  }
  8383  func rewriteValueMIPS_OpSelect0_10(v *Value) bool {
  8384  	// match: (Select0 (MULTU (MOVWconst [c]) (MOVWconst [d])))
  8385  	// cond:
  8386  	// result: (MOVWconst [(c*d)>>32])
  8387  	for {
  8388  		v_0 := v.Args[0]
  8389  		if v_0.Op != OpMIPSMULTU {
  8390  			break
  8391  		}
  8392  		_ = v_0.Args[1]
  8393  		v_0_0 := v_0.Args[0]
  8394  		if v_0_0.Op != OpMIPSMOVWconst {
  8395  			break
  8396  		}
  8397  		c := v_0_0.AuxInt
  8398  		v_0_1 := v_0.Args[1]
  8399  		if v_0_1.Op != OpMIPSMOVWconst {
  8400  			break
  8401  		}
  8402  		d := v_0_1.AuxInt
  8403  		v.reset(OpMIPSMOVWconst)
  8404  		v.AuxInt = (c * d) >> 32
  8405  		return true
  8406  	}
  8407  	// match: (Select0 (MULTU (MOVWconst [d]) (MOVWconst [c])))
  8408  	// cond:
  8409  	// result: (MOVWconst [(c*d)>>32])
  8410  	for {
  8411  		v_0 := v.Args[0]
  8412  		if v_0.Op != OpMIPSMULTU {
  8413  			break
  8414  		}
  8415  		_ = v_0.Args[1]
  8416  		v_0_0 := v_0.Args[0]
  8417  		if v_0_0.Op != OpMIPSMOVWconst {
  8418  			break
  8419  		}
  8420  		d := v_0_0.AuxInt
  8421  		v_0_1 := v_0.Args[1]
  8422  		if v_0_1.Op != OpMIPSMOVWconst {
  8423  			break
  8424  		}
  8425  		c := v_0_1.AuxInt
  8426  		v.reset(OpMIPSMOVWconst)
  8427  		v.AuxInt = (c * d) >> 32
  8428  		return true
  8429  	}
  8430  	// match: (Select0 (DIV (MOVWconst [c]) (MOVWconst [d])))
  8431  	// cond:
  8432  	// result: (MOVWconst [int64(int32(c)%int32(d))])
  8433  	for {
  8434  		v_0 := v.Args[0]
  8435  		if v_0.Op != OpMIPSDIV {
  8436  			break
  8437  		}
  8438  		_ = v_0.Args[1]
  8439  		v_0_0 := v_0.Args[0]
  8440  		if v_0_0.Op != OpMIPSMOVWconst {
  8441  			break
  8442  		}
  8443  		c := v_0_0.AuxInt
  8444  		v_0_1 := v_0.Args[1]
  8445  		if v_0_1.Op != OpMIPSMOVWconst {
  8446  			break
  8447  		}
  8448  		d := v_0_1.AuxInt
  8449  		v.reset(OpMIPSMOVWconst)
  8450  		v.AuxInt = int64(int32(c) % int32(d))
  8451  		return true
  8452  	}
  8453  	// match: (Select0 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8454  	// cond:
  8455  	// result: (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
  8456  	for {
  8457  		v_0 := v.Args[0]
  8458  		if v_0.Op != OpMIPSDIVU {
  8459  			break
  8460  		}
  8461  		_ = v_0.Args[1]
  8462  		v_0_0 := v_0.Args[0]
  8463  		if v_0_0.Op != OpMIPSMOVWconst {
  8464  			break
  8465  		}
  8466  		c := v_0_0.AuxInt
  8467  		v_0_1 := v_0.Args[1]
  8468  		if v_0_1.Op != OpMIPSMOVWconst {
  8469  			break
  8470  		}
  8471  		d := v_0_1.AuxInt
  8472  		v.reset(OpMIPSMOVWconst)
  8473  		v.AuxInt = int64(int32(uint32(c) % uint32(d)))
  8474  		return true
  8475  	}
  8476  	return false
  8477  }
  8478  func rewriteValueMIPS_OpSelect1_0(v *Value) bool {
  8479  	b := v.Block
  8480  	_ = b
  8481  	typ := &b.Func.Config.Types
  8482  	_ = typ
  8483  	// match: (Select1 (Add32carry <t> x y))
  8484  	// cond:
  8485  	// result: (SGTU <typ.Bool> x (ADD <t.FieldType(0)> x y))
  8486  	for {
  8487  		v_0 := v.Args[0]
  8488  		if v_0.Op != OpAdd32carry {
  8489  			break
  8490  		}
  8491  		t := v_0.Type
  8492  		_ = v_0.Args[1]
  8493  		x := v_0.Args[0]
  8494  		y := v_0.Args[1]
  8495  		v.reset(OpMIPSSGTU)
  8496  		v.Type = typ.Bool
  8497  		v.AddArg(x)
  8498  		v0 := b.NewValue0(v.Pos, OpMIPSADD, t.FieldType(0))
  8499  		v0.AddArg(x)
  8500  		v0.AddArg(y)
  8501  		v.AddArg(v0)
  8502  		return true
  8503  	}
  8504  	// match: (Select1 (Sub32carry <t> x y))
  8505  	// cond:
  8506  	// result: (SGTU <typ.Bool> (SUB <t.FieldType(0)> x y) x)
  8507  	for {
  8508  		v_0 := v.Args[0]
  8509  		if v_0.Op != OpSub32carry {
  8510  			break
  8511  		}
  8512  		t := v_0.Type
  8513  		_ = v_0.Args[1]
  8514  		x := v_0.Args[0]
  8515  		y := v_0.Args[1]
  8516  		v.reset(OpMIPSSGTU)
  8517  		v.Type = typ.Bool
  8518  		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t.FieldType(0))
  8519  		v0.AddArg(x)
  8520  		v0.AddArg(y)
  8521  		v.AddArg(v0)
  8522  		v.AddArg(x)
  8523  		return true
  8524  	}
  8525  	// match: (Select1 (MULTU (MOVWconst [0]) _))
  8526  	// cond:
  8527  	// result: (MOVWconst [0])
  8528  	for {
  8529  		v_0 := v.Args[0]
  8530  		if v_0.Op != OpMIPSMULTU {
  8531  			break
  8532  		}
  8533  		_ = v_0.Args[1]
  8534  		v_0_0 := v_0.Args[0]
  8535  		if v_0_0.Op != OpMIPSMOVWconst {
  8536  			break
  8537  		}
  8538  		if v_0_0.AuxInt != 0 {
  8539  			break
  8540  		}
  8541  		v.reset(OpMIPSMOVWconst)
  8542  		v.AuxInt = 0
  8543  		return true
  8544  	}
  8545  	// match: (Select1 (MULTU _ (MOVWconst [0])))
  8546  	// cond:
  8547  	// result: (MOVWconst [0])
  8548  	for {
  8549  		v_0 := v.Args[0]
  8550  		if v_0.Op != OpMIPSMULTU {
  8551  			break
  8552  		}
  8553  		_ = v_0.Args[1]
  8554  		v_0_1 := v_0.Args[1]
  8555  		if v_0_1.Op != OpMIPSMOVWconst {
  8556  			break
  8557  		}
  8558  		if v_0_1.AuxInt != 0 {
  8559  			break
  8560  		}
  8561  		v.reset(OpMIPSMOVWconst)
  8562  		v.AuxInt = 0
  8563  		return true
  8564  	}
  8565  	// match: (Select1 (MULTU (MOVWconst [1]) x))
  8566  	// cond:
  8567  	// result: x
  8568  	for {
  8569  		v_0 := v.Args[0]
  8570  		if v_0.Op != OpMIPSMULTU {
  8571  			break
  8572  		}
  8573  		_ = v_0.Args[1]
  8574  		v_0_0 := v_0.Args[0]
  8575  		if v_0_0.Op != OpMIPSMOVWconst {
  8576  			break
  8577  		}
  8578  		if v_0_0.AuxInt != 1 {
  8579  			break
  8580  		}
  8581  		x := v_0.Args[1]
  8582  		v.reset(OpCopy)
  8583  		v.Type = x.Type
  8584  		v.AddArg(x)
  8585  		return true
  8586  	}
  8587  	// match: (Select1 (MULTU x (MOVWconst [1])))
  8588  	// cond:
  8589  	// result: x
  8590  	for {
  8591  		v_0 := v.Args[0]
  8592  		if v_0.Op != OpMIPSMULTU {
  8593  			break
  8594  		}
  8595  		_ = v_0.Args[1]
  8596  		x := v_0.Args[0]
  8597  		v_0_1 := v_0.Args[1]
  8598  		if v_0_1.Op != OpMIPSMOVWconst {
  8599  			break
  8600  		}
  8601  		if v_0_1.AuxInt != 1 {
  8602  			break
  8603  		}
  8604  		v.reset(OpCopy)
  8605  		v.Type = x.Type
  8606  		v.AddArg(x)
  8607  		return true
  8608  	}
  8609  	// match: (Select1 (MULTU (MOVWconst [-1]) x))
  8610  	// cond:
  8611  	// result: (NEG <x.Type> x)
  8612  	for {
  8613  		v_0 := v.Args[0]
  8614  		if v_0.Op != OpMIPSMULTU {
  8615  			break
  8616  		}
  8617  		_ = v_0.Args[1]
  8618  		v_0_0 := v_0.Args[0]
  8619  		if v_0_0.Op != OpMIPSMOVWconst {
  8620  			break
  8621  		}
  8622  		if v_0_0.AuxInt != -1 {
  8623  			break
  8624  		}
  8625  		x := v_0.Args[1]
  8626  		v.reset(OpMIPSNEG)
  8627  		v.Type = x.Type
  8628  		v.AddArg(x)
  8629  		return true
  8630  	}
  8631  	// match: (Select1 (MULTU x (MOVWconst [-1])))
  8632  	// cond:
  8633  	// result: (NEG <x.Type> x)
  8634  	for {
  8635  		v_0 := v.Args[0]
  8636  		if v_0.Op != OpMIPSMULTU {
  8637  			break
  8638  		}
  8639  		_ = v_0.Args[1]
  8640  		x := v_0.Args[0]
  8641  		v_0_1 := v_0.Args[1]
  8642  		if v_0_1.Op != OpMIPSMOVWconst {
  8643  			break
  8644  		}
  8645  		if v_0_1.AuxInt != -1 {
  8646  			break
  8647  		}
  8648  		v.reset(OpMIPSNEG)
  8649  		v.Type = x.Type
  8650  		v.AddArg(x)
  8651  		return true
  8652  	}
  8653  	// match: (Select1 (MULTU (MOVWconst [c]) x))
  8654  	// cond: isPowerOfTwo(int64(uint32(c)))
  8655  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  8656  	for {
  8657  		v_0 := v.Args[0]
  8658  		if v_0.Op != OpMIPSMULTU {
  8659  			break
  8660  		}
  8661  		_ = v_0.Args[1]
  8662  		v_0_0 := v_0.Args[0]
  8663  		if v_0_0.Op != OpMIPSMOVWconst {
  8664  			break
  8665  		}
  8666  		c := v_0_0.AuxInt
  8667  		x := v_0.Args[1]
  8668  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8669  			break
  8670  		}
  8671  		v.reset(OpMIPSSLLconst)
  8672  		v.AuxInt = log2(int64(uint32(c)))
  8673  		v.AddArg(x)
  8674  		return true
  8675  	}
  8676  	// match: (Select1 (MULTU x (MOVWconst [c])))
  8677  	// cond: isPowerOfTwo(int64(uint32(c)))
  8678  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  8679  	for {
  8680  		v_0 := v.Args[0]
  8681  		if v_0.Op != OpMIPSMULTU {
  8682  			break
  8683  		}
  8684  		_ = v_0.Args[1]
  8685  		x := v_0.Args[0]
  8686  		v_0_1 := v_0.Args[1]
  8687  		if v_0_1.Op != OpMIPSMOVWconst {
  8688  			break
  8689  		}
  8690  		c := v_0_1.AuxInt
  8691  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8692  			break
  8693  		}
  8694  		v.reset(OpMIPSSLLconst)
  8695  		v.AuxInt = log2(int64(uint32(c)))
  8696  		v.AddArg(x)
  8697  		return true
  8698  	}
  8699  	return false
  8700  }
  8701  func rewriteValueMIPS_OpSelect1_10(v *Value) bool {
  8702  	// match: (Select1 (MULTU (MOVWconst [c]) (MOVWconst [d])))
  8703  	// cond:
  8704  	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8705  	for {
  8706  		v_0 := v.Args[0]
  8707  		if v_0.Op != OpMIPSMULTU {
  8708  			break
  8709  		}
  8710  		_ = v_0.Args[1]
  8711  		v_0_0 := v_0.Args[0]
  8712  		if v_0_0.Op != OpMIPSMOVWconst {
  8713  			break
  8714  		}
  8715  		c := v_0_0.AuxInt
  8716  		v_0_1 := v_0.Args[1]
  8717  		if v_0_1.Op != OpMIPSMOVWconst {
  8718  			break
  8719  		}
  8720  		d := v_0_1.AuxInt
  8721  		v.reset(OpMIPSMOVWconst)
  8722  		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8723  		return true
  8724  	}
  8725  	// match: (Select1 (MULTU (MOVWconst [d]) (MOVWconst [c])))
  8726  	// cond:
  8727  	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8728  	for {
  8729  		v_0 := v.Args[0]
  8730  		if v_0.Op != OpMIPSMULTU {
  8731  			break
  8732  		}
  8733  		_ = v_0.Args[1]
  8734  		v_0_0 := v_0.Args[0]
  8735  		if v_0_0.Op != OpMIPSMOVWconst {
  8736  			break
  8737  		}
  8738  		d := v_0_0.AuxInt
  8739  		v_0_1 := v_0.Args[1]
  8740  		if v_0_1.Op != OpMIPSMOVWconst {
  8741  			break
  8742  		}
  8743  		c := v_0_1.AuxInt
  8744  		v.reset(OpMIPSMOVWconst)
  8745  		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8746  		return true
  8747  	}
  8748  	// match: (Select1 (DIV (MOVWconst [c]) (MOVWconst [d])))
  8749  	// cond:
  8750  	// result: (MOVWconst [int64(int32(c)/int32(d))])
  8751  	for {
  8752  		v_0 := v.Args[0]
  8753  		if v_0.Op != OpMIPSDIV {
  8754  			break
  8755  		}
  8756  		_ = v_0.Args[1]
  8757  		v_0_0 := v_0.Args[0]
  8758  		if v_0_0.Op != OpMIPSMOVWconst {
  8759  			break
  8760  		}
  8761  		c := v_0_0.AuxInt
  8762  		v_0_1 := v_0.Args[1]
  8763  		if v_0_1.Op != OpMIPSMOVWconst {
  8764  			break
  8765  		}
  8766  		d := v_0_1.AuxInt
  8767  		v.reset(OpMIPSMOVWconst)
  8768  		v.AuxInt = int64(int32(c) / int32(d))
  8769  		return true
  8770  	}
  8771  	// match: (Select1 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8772  	// cond:
  8773  	// result: (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
  8774  	for {
  8775  		v_0 := v.Args[0]
  8776  		if v_0.Op != OpMIPSDIVU {
  8777  			break
  8778  		}
  8779  		_ = v_0.Args[1]
  8780  		v_0_0 := v_0.Args[0]
  8781  		if v_0_0.Op != OpMIPSMOVWconst {
  8782  			break
  8783  		}
  8784  		c := v_0_0.AuxInt
  8785  		v_0_1 := v_0.Args[1]
  8786  		if v_0_1.Op != OpMIPSMOVWconst {
  8787  			break
  8788  		}
  8789  		d := v_0_1.AuxInt
  8790  		v.reset(OpMIPSMOVWconst)
  8791  		v.AuxInt = int64(int32(uint32(c) / uint32(d)))
  8792  		return true
  8793  	}
  8794  	return false
  8795  }
  8796  func rewriteValueMIPS_OpSignExt16to32_0(v *Value) bool {
  8797  	// match: (SignExt16to32 x)
  8798  	// cond:
  8799  	// result: (MOVHreg x)
  8800  	for {
  8801  		x := v.Args[0]
  8802  		v.reset(OpMIPSMOVHreg)
  8803  		v.AddArg(x)
  8804  		return true
  8805  	}
  8806  }
  8807  func rewriteValueMIPS_OpSignExt8to16_0(v *Value) bool {
  8808  	// match: (SignExt8to16 x)
  8809  	// cond:
  8810  	// result: (MOVBreg x)
  8811  	for {
  8812  		x := v.Args[0]
  8813  		v.reset(OpMIPSMOVBreg)
  8814  		v.AddArg(x)
  8815  		return true
  8816  	}
  8817  }
  8818  func rewriteValueMIPS_OpSignExt8to32_0(v *Value) bool {
  8819  	// match: (SignExt8to32 x)
  8820  	// cond:
  8821  	// result: (MOVBreg x)
  8822  	for {
  8823  		x := v.Args[0]
  8824  		v.reset(OpMIPSMOVBreg)
  8825  		v.AddArg(x)
  8826  		return true
  8827  	}
  8828  }
  8829  func rewriteValueMIPS_OpSignmask_0(v *Value) bool {
  8830  	// match: (Signmask x)
  8831  	// cond:
  8832  	// result: (SRAconst x [31])
  8833  	for {
  8834  		x := v.Args[0]
  8835  		v.reset(OpMIPSSRAconst)
  8836  		v.AuxInt = 31
  8837  		v.AddArg(x)
  8838  		return true
  8839  	}
  8840  }
  8841  func rewriteValueMIPS_OpSlicemask_0(v *Value) bool {
  8842  	b := v.Block
  8843  	_ = b
  8844  	// match: (Slicemask <t> x)
  8845  	// cond:
  8846  	// result: (SRAconst (NEG <t> x) [31])
  8847  	for {
  8848  		t := v.Type
  8849  		x := v.Args[0]
  8850  		v.reset(OpMIPSSRAconst)
  8851  		v.AuxInt = 31
  8852  		v0 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  8853  		v0.AddArg(x)
  8854  		v.AddArg(v0)
  8855  		return true
  8856  	}
  8857  }
  8858  func rewriteValueMIPS_OpSqrt_0(v *Value) bool {
  8859  	// match: (Sqrt x)
  8860  	// cond:
  8861  	// result: (SQRTD x)
  8862  	for {
  8863  		x := v.Args[0]
  8864  		v.reset(OpMIPSSQRTD)
  8865  		v.AddArg(x)
  8866  		return true
  8867  	}
  8868  }
  8869  func rewriteValueMIPS_OpStaticCall_0(v *Value) bool {
  8870  	// match: (StaticCall [argwid] {target} mem)
  8871  	// cond:
  8872  	// result: (CALLstatic [argwid] {target} mem)
  8873  	for {
  8874  		argwid := v.AuxInt
  8875  		target := v.Aux
  8876  		mem := v.Args[0]
  8877  		v.reset(OpMIPSCALLstatic)
  8878  		v.AuxInt = argwid
  8879  		v.Aux = target
  8880  		v.AddArg(mem)
  8881  		return true
  8882  	}
  8883  }
  8884  func rewriteValueMIPS_OpStore_0(v *Value) bool {
  8885  	// match: (Store {t} ptr val mem)
  8886  	// cond: t.(*types.Type).Size() == 1
  8887  	// result: (MOVBstore ptr val mem)
  8888  	for {
  8889  		t := v.Aux
  8890  		_ = v.Args[2]
  8891  		ptr := v.Args[0]
  8892  		val := v.Args[1]
  8893  		mem := v.Args[2]
  8894  		if !(t.(*types.Type).Size() == 1) {
  8895  			break
  8896  		}
  8897  		v.reset(OpMIPSMOVBstore)
  8898  		v.AddArg(ptr)
  8899  		v.AddArg(val)
  8900  		v.AddArg(mem)
  8901  		return true
  8902  	}
  8903  	// match: (Store {t} ptr val mem)
  8904  	// cond: t.(*types.Type).Size() == 2
  8905  	// result: (MOVHstore ptr val mem)
  8906  	for {
  8907  		t := v.Aux
  8908  		_ = v.Args[2]
  8909  		ptr := v.Args[0]
  8910  		val := v.Args[1]
  8911  		mem := v.Args[2]
  8912  		if !(t.(*types.Type).Size() == 2) {
  8913  			break
  8914  		}
  8915  		v.reset(OpMIPSMOVHstore)
  8916  		v.AddArg(ptr)
  8917  		v.AddArg(val)
  8918  		v.AddArg(mem)
  8919  		return true
  8920  	}
  8921  	// match: (Store {t} ptr val mem)
  8922  	// cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)
  8923  	// result: (MOVWstore ptr val mem)
  8924  	for {
  8925  		t := v.Aux
  8926  		_ = v.Args[2]
  8927  		ptr := v.Args[0]
  8928  		val := v.Args[1]
  8929  		mem := v.Args[2]
  8930  		if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) {
  8931  			break
  8932  		}
  8933  		v.reset(OpMIPSMOVWstore)
  8934  		v.AddArg(ptr)
  8935  		v.AddArg(val)
  8936  		v.AddArg(mem)
  8937  		return true
  8938  	}
  8939  	// match: (Store {t} ptr val mem)
  8940  	// cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)
  8941  	// result: (MOVFstore ptr val mem)
  8942  	for {
  8943  		t := v.Aux
  8944  		_ = v.Args[2]
  8945  		ptr := v.Args[0]
  8946  		val := v.Args[1]
  8947  		mem := v.Args[2]
  8948  		if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) {
  8949  			break
  8950  		}
  8951  		v.reset(OpMIPSMOVFstore)
  8952  		v.AddArg(ptr)
  8953  		v.AddArg(val)
  8954  		v.AddArg(mem)
  8955  		return true
  8956  	}
  8957  	// match: (Store {t} ptr val mem)
  8958  	// cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)
  8959  	// result: (MOVDstore ptr val mem)
  8960  	for {
  8961  		t := v.Aux
  8962  		_ = v.Args[2]
  8963  		ptr := v.Args[0]
  8964  		val := v.Args[1]
  8965  		mem := v.Args[2]
  8966  		if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) {
  8967  			break
  8968  		}
  8969  		v.reset(OpMIPSMOVDstore)
  8970  		v.AddArg(ptr)
  8971  		v.AddArg(val)
  8972  		v.AddArg(mem)
  8973  		return true
  8974  	}
  8975  	return false
  8976  }
  8977  func rewriteValueMIPS_OpSub16_0(v *Value) bool {
  8978  	// match: (Sub16 x y)
  8979  	// cond:
  8980  	// result: (SUB x y)
  8981  	for {
  8982  		_ = v.Args[1]
  8983  		x := v.Args[0]
  8984  		y := v.Args[1]
  8985  		v.reset(OpMIPSSUB)
  8986  		v.AddArg(x)
  8987  		v.AddArg(y)
  8988  		return true
  8989  	}
  8990  }
  8991  func rewriteValueMIPS_OpSub32_0(v *Value) bool {
  8992  	// match: (Sub32 x y)
  8993  	// cond:
  8994  	// result: (SUB x y)
  8995  	for {
  8996  		_ = v.Args[1]
  8997  		x := v.Args[0]
  8998  		y := v.Args[1]
  8999  		v.reset(OpMIPSSUB)
  9000  		v.AddArg(x)
  9001  		v.AddArg(y)
  9002  		return true
  9003  	}
  9004  }
  9005  func rewriteValueMIPS_OpSub32F_0(v *Value) bool {
  9006  	// match: (Sub32F x y)
  9007  	// cond:
  9008  	// result: (SUBF x y)
  9009  	for {
  9010  		_ = v.Args[1]
  9011  		x := v.Args[0]
  9012  		y := v.Args[1]
  9013  		v.reset(OpMIPSSUBF)
  9014  		v.AddArg(x)
  9015  		v.AddArg(y)
  9016  		return true
  9017  	}
  9018  }
  9019  func rewriteValueMIPS_OpSub32withcarry_0(v *Value) bool {
  9020  	b := v.Block
  9021  	_ = b
  9022  	// match: (Sub32withcarry <t> x y c)
  9023  	// cond:
  9024  	// result: (SUB (SUB <t> x y) c)
  9025  	for {
  9026  		t := v.Type
  9027  		_ = v.Args[2]
  9028  		x := v.Args[0]
  9029  		y := v.Args[1]
  9030  		c := v.Args[2]
  9031  		v.reset(OpMIPSSUB)
  9032  		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t)
  9033  		v0.AddArg(x)
  9034  		v0.AddArg(y)
  9035  		v.AddArg(v0)
  9036  		v.AddArg(c)
  9037  		return true
  9038  	}
  9039  }
  9040  func rewriteValueMIPS_OpSub64F_0(v *Value) bool {
  9041  	// match: (Sub64F x y)
  9042  	// cond:
  9043  	// result: (SUBD x y)
  9044  	for {
  9045  		_ = v.Args[1]
  9046  		x := v.Args[0]
  9047  		y := v.Args[1]
  9048  		v.reset(OpMIPSSUBD)
  9049  		v.AddArg(x)
  9050  		v.AddArg(y)
  9051  		return true
  9052  	}
  9053  }
  9054  func rewriteValueMIPS_OpSub8_0(v *Value) bool {
  9055  	// match: (Sub8 x y)
  9056  	// cond:
  9057  	// result: (SUB x y)
  9058  	for {
  9059  		_ = v.Args[1]
  9060  		x := v.Args[0]
  9061  		y := v.Args[1]
  9062  		v.reset(OpMIPSSUB)
  9063  		v.AddArg(x)
  9064  		v.AddArg(y)
  9065  		return true
  9066  	}
  9067  }
  9068  func rewriteValueMIPS_OpSubPtr_0(v *Value) bool {
  9069  	// match: (SubPtr x y)
  9070  	// cond:
  9071  	// result: (SUB x y)
  9072  	for {
  9073  		_ = v.Args[1]
  9074  		x := v.Args[0]
  9075  		y := v.Args[1]
  9076  		v.reset(OpMIPSSUB)
  9077  		v.AddArg(x)
  9078  		v.AddArg(y)
  9079  		return true
  9080  	}
  9081  }
  9082  func rewriteValueMIPS_OpTrunc16to8_0(v *Value) bool {
  9083  	// match: (Trunc16to8 x)
  9084  	// cond:
  9085  	// result: x
  9086  	for {
  9087  		x := v.Args[0]
  9088  		v.reset(OpCopy)
  9089  		v.Type = x.Type
  9090  		v.AddArg(x)
  9091  		return true
  9092  	}
  9093  }
  9094  func rewriteValueMIPS_OpTrunc32to16_0(v *Value) bool {
  9095  	// match: (Trunc32to16 x)
  9096  	// cond:
  9097  	// result: x
  9098  	for {
  9099  		x := v.Args[0]
  9100  		v.reset(OpCopy)
  9101  		v.Type = x.Type
  9102  		v.AddArg(x)
  9103  		return true
  9104  	}
  9105  }
  9106  func rewriteValueMIPS_OpTrunc32to8_0(v *Value) bool {
  9107  	// match: (Trunc32to8 x)
  9108  	// cond:
  9109  	// result: x
  9110  	for {
  9111  		x := v.Args[0]
  9112  		v.reset(OpCopy)
  9113  		v.Type = x.Type
  9114  		v.AddArg(x)
  9115  		return true
  9116  	}
  9117  }
  9118  func rewriteValueMIPS_OpWB_0(v *Value) bool {
  9119  	// match: (WB {fn} destptr srcptr mem)
  9120  	// cond:
  9121  	// result: (LoweredWB {fn} destptr srcptr mem)
  9122  	for {
  9123  		fn := v.Aux
  9124  		_ = v.Args[2]
  9125  		destptr := v.Args[0]
  9126  		srcptr := v.Args[1]
  9127  		mem := v.Args[2]
  9128  		v.reset(OpMIPSLoweredWB)
  9129  		v.Aux = fn
  9130  		v.AddArg(destptr)
  9131  		v.AddArg(srcptr)
  9132  		v.AddArg(mem)
  9133  		return true
  9134  	}
  9135  }
  9136  func rewriteValueMIPS_OpXor16_0(v *Value) bool {
  9137  	// match: (Xor16 x y)
  9138  	// cond:
  9139  	// result: (XOR x y)
  9140  	for {
  9141  		_ = v.Args[1]
  9142  		x := v.Args[0]
  9143  		y := v.Args[1]
  9144  		v.reset(OpMIPSXOR)
  9145  		v.AddArg(x)
  9146  		v.AddArg(y)
  9147  		return true
  9148  	}
  9149  }
  9150  func rewriteValueMIPS_OpXor32_0(v *Value) bool {
  9151  	// match: (Xor32 x y)
  9152  	// cond:
  9153  	// result: (XOR x y)
  9154  	for {
  9155  		_ = v.Args[1]
  9156  		x := v.Args[0]
  9157  		y := v.Args[1]
  9158  		v.reset(OpMIPSXOR)
  9159  		v.AddArg(x)
  9160  		v.AddArg(y)
  9161  		return true
  9162  	}
  9163  }
  9164  func rewriteValueMIPS_OpXor8_0(v *Value) bool {
  9165  	// match: (Xor8 x y)
  9166  	// cond:
  9167  	// result: (XOR x y)
  9168  	for {
  9169  		_ = v.Args[1]
  9170  		x := v.Args[0]
  9171  		y := v.Args[1]
  9172  		v.reset(OpMIPSXOR)
  9173  		v.AddArg(x)
  9174  		v.AddArg(y)
  9175  		return true
  9176  	}
  9177  }
  9178  func rewriteValueMIPS_OpZero_0(v *Value) bool {
  9179  	b := v.Block
  9180  	_ = b
  9181  	typ := &b.Func.Config.Types
  9182  	_ = typ
  9183  	// match: (Zero [0] _ mem)
  9184  	// cond:
  9185  	// result: mem
  9186  	for {
  9187  		if v.AuxInt != 0 {
  9188  			break
  9189  		}
  9190  		_ = v.Args[1]
  9191  		mem := v.Args[1]
  9192  		v.reset(OpCopy)
  9193  		v.Type = mem.Type
  9194  		v.AddArg(mem)
  9195  		return true
  9196  	}
  9197  	// match: (Zero [1] ptr mem)
  9198  	// cond:
  9199  	// result: (MOVBstore ptr (MOVWconst [0]) mem)
  9200  	for {
  9201  		if v.AuxInt != 1 {
  9202  			break
  9203  		}
  9204  		_ = v.Args[1]
  9205  		ptr := v.Args[0]
  9206  		mem := v.Args[1]
  9207  		v.reset(OpMIPSMOVBstore)
  9208  		v.AddArg(ptr)
  9209  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9210  		v0.AuxInt = 0
  9211  		v.AddArg(v0)
  9212  		v.AddArg(mem)
  9213  		return true
  9214  	}
  9215  	// match: (Zero [2] {t} ptr mem)
  9216  	// cond: t.(*types.Type).Alignment()%2 == 0
  9217  	// result: (MOVHstore ptr (MOVWconst [0]) mem)
  9218  	for {
  9219  		if v.AuxInt != 2 {
  9220  			break
  9221  		}
  9222  		t := v.Aux
  9223  		_ = v.Args[1]
  9224  		ptr := v.Args[0]
  9225  		mem := v.Args[1]
  9226  		if !(t.(*types.Type).Alignment()%2 == 0) {
  9227  			break
  9228  		}
  9229  		v.reset(OpMIPSMOVHstore)
  9230  		v.AddArg(ptr)
  9231  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9232  		v0.AuxInt = 0
  9233  		v.AddArg(v0)
  9234  		v.AddArg(mem)
  9235  		return true
  9236  	}
  9237  	// match: (Zero [2] ptr mem)
  9238  	// cond:
  9239  	// result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))
  9240  	for {
  9241  		if v.AuxInt != 2 {
  9242  			break
  9243  		}
  9244  		_ = v.Args[1]
  9245  		ptr := v.Args[0]
  9246  		mem := v.Args[1]
  9247  		v.reset(OpMIPSMOVBstore)
  9248  		v.AuxInt = 1
  9249  		v.AddArg(ptr)
  9250  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9251  		v0.AuxInt = 0
  9252  		v.AddArg(v0)
  9253  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9254  		v1.AuxInt = 0
  9255  		v1.AddArg(ptr)
  9256  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9257  		v2.AuxInt = 0
  9258  		v1.AddArg(v2)
  9259  		v1.AddArg(mem)
  9260  		v.AddArg(v1)
  9261  		return true
  9262  	}
  9263  	// match: (Zero [4] {t} ptr mem)
  9264  	// cond: t.(*types.Type).Alignment()%4 == 0
  9265  	// result: (MOVWstore ptr (MOVWconst [0]) mem)
  9266  	for {
  9267  		if v.AuxInt != 4 {
  9268  			break
  9269  		}
  9270  		t := v.Aux
  9271  		_ = v.Args[1]
  9272  		ptr := v.Args[0]
  9273  		mem := v.Args[1]
  9274  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9275  			break
  9276  		}
  9277  		v.reset(OpMIPSMOVWstore)
  9278  		v.AddArg(ptr)
  9279  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9280  		v0.AuxInt = 0
  9281  		v.AddArg(v0)
  9282  		v.AddArg(mem)
  9283  		return true
  9284  	}
  9285  	// match: (Zero [4] {t} ptr mem)
  9286  	// cond: t.(*types.Type).Alignment()%2 == 0
  9287  	// result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))
  9288  	for {
  9289  		if v.AuxInt != 4 {
  9290  			break
  9291  		}
  9292  		t := v.Aux
  9293  		_ = v.Args[1]
  9294  		ptr := v.Args[0]
  9295  		mem := v.Args[1]
  9296  		if !(t.(*types.Type).Alignment()%2 == 0) {
  9297  			break
  9298  		}
  9299  		v.reset(OpMIPSMOVHstore)
  9300  		v.AuxInt = 2
  9301  		v.AddArg(ptr)
  9302  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9303  		v0.AuxInt = 0
  9304  		v.AddArg(v0)
  9305  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9306  		v1.AuxInt = 0
  9307  		v1.AddArg(ptr)
  9308  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9309  		v2.AuxInt = 0
  9310  		v1.AddArg(v2)
  9311  		v1.AddArg(mem)
  9312  		v.AddArg(v1)
  9313  		return true
  9314  	}
  9315  	// match: (Zero [4] ptr mem)
  9316  	// cond:
  9317  	// result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
  9318  	for {
  9319  		if v.AuxInt != 4 {
  9320  			break
  9321  		}
  9322  		_ = v.Args[1]
  9323  		ptr := v.Args[0]
  9324  		mem := v.Args[1]
  9325  		v.reset(OpMIPSMOVBstore)
  9326  		v.AuxInt = 3
  9327  		v.AddArg(ptr)
  9328  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9329  		v0.AuxInt = 0
  9330  		v.AddArg(v0)
  9331  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9332  		v1.AuxInt = 2
  9333  		v1.AddArg(ptr)
  9334  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9335  		v2.AuxInt = 0
  9336  		v1.AddArg(v2)
  9337  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9338  		v3.AuxInt = 1
  9339  		v3.AddArg(ptr)
  9340  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9341  		v4.AuxInt = 0
  9342  		v3.AddArg(v4)
  9343  		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9344  		v5.AuxInt = 0
  9345  		v5.AddArg(ptr)
  9346  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9347  		v6.AuxInt = 0
  9348  		v5.AddArg(v6)
  9349  		v5.AddArg(mem)
  9350  		v3.AddArg(v5)
  9351  		v1.AddArg(v3)
  9352  		v.AddArg(v1)
  9353  		return true
  9354  	}
  9355  	// match: (Zero [3] ptr mem)
  9356  	// cond:
  9357  	// result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
  9358  	for {
  9359  		if v.AuxInt != 3 {
  9360  			break
  9361  		}
  9362  		_ = v.Args[1]
  9363  		ptr := v.Args[0]
  9364  		mem := v.Args[1]
  9365  		v.reset(OpMIPSMOVBstore)
  9366  		v.AuxInt = 2
  9367  		v.AddArg(ptr)
  9368  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9369  		v0.AuxInt = 0
  9370  		v.AddArg(v0)
  9371  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9372  		v1.AuxInt = 1
  9373  		v1.AddArg(ptr)
  9374  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9375  		v2.AuxInt = 0
  9376  		v1.AddArg(v2)
  9377  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9378  		v3.AuxInt = 0
  9379  		v3.AddArg(ptr)
  9380  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9381  		v4.AuxInt = 0
  9382  		v3.AddArg(v4)
  9383  		v3.AddArg(mem)
  9384  		v1.AddArg(v3)
  9385  		v.AddArg(v1)
  9386  		return true
  9387  	}
  9388  	// match: (Zero [6] {t} ptr mem)
  9389  	// cond: t.(*types.Type).Alignment()%2 == 0
  9390  	// result: (MOVHstore [4] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem)))
  9391  	for {
  9392  		if v.AuxInt != 6 {
  9393  			break
  9394  		}
  9395  		t := v.Aux
  9396  		_ = v.Args[1]
  9397  		ptr := v.Args[0]
  9398  		mem := v.Args[1]
  9399  		if !(t.(*types.Type).Alignment()%2 == 0) {
  9400  			break
  9401  		}
  9402  		v.reset(OpMIPSMOVHstore)
  9403  		v.AuxInt = 4
  9404  		v.AddArg(ptr)
  9405  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9406  		v0.AuxInt = 0
  9407  		v.AddArg(v0)
  9408  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9409  		v1.AuxInt = 2
  9410  		v1.AddArg(ptr)
  9411  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9412  		v2.AuxInt = 0
  9413  		v1.AddArg(v2)
  9414  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9415  		v3.AuxInt = 0
  9416  		v3.AddArg(ptr)
  9417  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9418  		v4.AuxInt = 0
  9419  		v3.AddArg(v4)
  9420  		v3.AddArg(mem)
  9421  		v1.AddArg(v3)
  9422  		v.AddArg(v1)
  9423  		return true
  9424  	}
  9425  	// match: (Zero [8] {t} ptr mem)
  9426  	// cond: t.(*types.Type).Alignment()%4 == 0
  9427  	// result: (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))
  9428  	for {
  9429  		if v.AuxInt != 8 {
  9430  			break
  9431  		}
  9432  		t := v.Aux
  9433  		_ = v.Args[1]
  9434  		ptr := v.Args[0]
  9435  		mem := v.Args[1]
  9436  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9437  			break
  9438  		}
  9439  		v.reset(OpMIPSMOVWstore)
  9440  		v.AuxInt = 4
  9441  		v.AddArg(ptr)
  9442  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9443  		v0.AuxInt = 0
  9444  		v.AddArg(v0)
  9445  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9446  		v1.AuxInt = 0
  9447  		v1.AddArg(ptr)
  9448  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9449  		v2.AuxInt = 0
  9450  		v1.AddArg(v2)
  9451  		v1.AddArg(mem)
  9452  		v.AddArg(v1)
  9453  		return true
  9454  	}
  9455  	return false
  9456  }
  9457  func rewriteValueMIPS_OpZero_10(v *Value) bool {
  9458  	b := v.Block
  9459  	_ = b
  9460  	config := b.Func.Config
  9461  	_ = config
  9462  	typ := &b.Func.Config.Types
  9463  	_ = typ
  9464  	// match: (Zero [12] {t} ptr mem)
  9465  	// cond: t.(*types.Type).Alignment()%4 == 0
  9466  	// result: (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)))
  9467  	for {
  9468  		if v.AuxInt != 12 {
  9469  			break
  9470  		}
  9471  		t := v.Aux
  9472  		_ = v.Args[1]
  9473  		ptr := v.Args[0]
  9474  		mem := v.Args[1]
  9475  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9476  			break
  9477  		}
  9478  		v.reset(OpMIPSMOVWstore)
  9479  		v.AuxInt = 8
  9480  		v.AddArg(ptr)
  9481  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9482  		v0.AuxInt = 0
  9483  		v.AddArg(v0)
  9484  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9485  		v1.AuxInt = 4
  9486  		v1.AddArg(ptr)
  9487  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9488  		v2.AuxInt = 0
  9489  		v1.AddArg(v2)
  9490  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9491  		v3.AuxInt = 0
  9492  		v3.AddArg(ptr)
  9493  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9494  		v4.AuxInt = 0
  9495  		v3.AddArg(v4)
  9496  		v3.AddArg(mem)
  9497  		v1.AddArg(v3)
  9498  		v.AddArg(v1)
  9499  		return true
  9500  	}
  9501  	// match: (Zero [16] {t} ptr mem)
  9502  	// cond: t.(*types.Type).Alignment()%4 == 0
  9503  	// result: (MOVWstore [12] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))))
  9504  	for {
  9505  		if v.AuxInt != 16 {
  9506  			break
  9507  		}
  9508  		t := v.Aux
  9509  		_ = v.Args[1]
  9510  		ptr := v.Args[0]
  9511  		mem := v.Args[1]
  9512  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9513  			break
  9514  		}
  9515  		v.reset(OpMIPSMOVWstore)
  9516  		v.AuxInt = 12
  9517  		v.AddArg(ptr)
  9518  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9519  		v0.AuxInt = 0
  9520  		v.AddArg(v0)
  9521  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9522  		v1.AuxInt = 8
  9523  		v1.AddArg(ptr)
  9524  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9525  		v2.AuxInt = 0
  9526  		v1.AddArg(v2)
  9527  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9528  		v3.AuxInt = 4
  9529  		v3.AddArg(ptr)
  9530  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9531  		v4.AuxInt = 0
  9532  		v3.AddArg(v4)
  9533  		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9534  		v5.AuxInt = 0
  9535  		v5.AddArg(ptr)
  9536  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9537  		v6.AuxInt = 0
  9538  		v5.AddArg(v6)
  9539  		v5.AddArg(mem)
  9540  		v3.AddArg(v5)
  9541  		v1.AddArg(v3)
  9542  		v.AddArg(v1)
  9543  		return true
  9544  	}
  9545  	// match: (Zero [s] {t} ptr mem)
  9546  	// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
  9547  	// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
  9548  	for {
  9549  		s := v.AuxInt
  9550  		t := v.Aux
  9551  		_ = v.Args[1]
  9552  		ptr := v.Args[0]
  9553  		mem := v.Args[1]
  9554  		if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) {
  9555  			break
  9556  		}
  9557  		v.reset(OpMIPSLoweredZero)
  9558  		v.AuxInt = t.(*types.Type).Alignment()
  9559  		v.AddArg(ptr)
  9560  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type)
  9561  		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  9562  		v0.AddArg(ptr)
  9563  		v.AddArg(v0)
  9564  		v.AddArg(mem)
  9565  		return true
  9566  	}
  9567  	return false
  9568  }
  9569  func rewriteValueMIPS_OpZeroExt16to32_0(v *Value) bool {
  9570  	// match: (ZeroExt16to32 x)
  9571  	// cond:
  9572  	// result: (MOVHUreg x)
  9573  	for {
  9574  		x := v.Args[0]
  9575  		v.reset(OpMIPSMOVHUreg)
  9576  		v.AddArg(x)
  9577  		return true
  9578  	}
  9579  }
  9580  func rewriteValueMIPS_OpZeroExt8to16_0(v *Value) bool {
  9581  	// match: (ZeroExt8to16 x)
  9582  	// cond:
  9583  	// result: (MOVBUreg x)
  9584  	for {
  9585  		x := v.Args[0]
  9586  		v.reset(OpMIPSMOVBUreg)
  9587  		v.AddArg(x)
  9588  		return true
  9589  	}
  9590  }
  9591  func rewriteValueMIPS_OpZeroExt8to32_0(v *Value) bool {
  9592  	// match: (ZeroExt8to32 x)
  9593  	// cond:
  9594  	// result: (MOVBUreg x)
  9595  	for {
  9596  		x := v.Args[0]
  9597  		v.reset(OpMIPSMOVBUreg)
  9598  		v.AddArg(x)
  9599  		return true
  9600  	}
  9601  }
  9602  func rewriteValueMIPS_OpZeromask_0(v *Value) bool {
  9603  	b := v.Block
  9604  	_ = b
  9605  	typ := &b.Func.Config.Types
  9606  	_ = typ
  9607  	// match: (Zeromask x)
  9608  	// cond:
  9609  	// result: (NEG (SGTU x (MOVWconst [0])))
  9610  	for {
  9611  		x := v.Args[0]
  9612  		v.reset(OpMIPSNEG)
  9613  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  9614  		v0.AddArg(x)
  9615  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9616  		v1.AuxInt = 0
  9617  		v0.AddArg(v1)
  9618  		v.AddArg(v0)
  9619  		return true
  9620  	}
  9621  }
  9622  func rewriteBlockMIPS(b *Block) bool {
  9623  	config := b.Func.Config
  9624  	_ = config
  9625  	fe := b.Func.fe
  9626  	_ = fe
  9627  	typ := &config.Types
  9628  	_ = typ
  9629  	switch b.Kind {
  9630  	case BlockMIPSEQ:
  9631  		// match: (EQ (FPFlagTrue cmp) yes no)
  9632  		// cond:
  9633  		// result: (FPF cmp yes no)
  9634  		for {
  9635  			v := b.Control
  9636  			if v.Op != OpMIPSFPFlagTrue {
  9637  				break
  9638  			}
  9639  			cmp := v.Args[0]
  9640  			b.Kind = BlockMIPSFPF
  9641  			b.SetControl(cmp)
  9642  			b.Aux = nil
  9643  			return true
  9644  		}
  9645  		// match: (EQ (FPFlagFalse cmp) yes no)
  9646  		// cond:
  9647  		// result: (FPT cmp yes no)
  9648  		for {
  9649  			v := b.Control
  9650  			if v.Op != OpMIPSFPFlagFalse {
  9651  				break
  9652  			}
  9653  			cmp := v.Args[0]
  9654  			b.Kind = BlockMIPSFPT
  9655  			b.SetControl(cmp)
  9656  			b.Aux = nil
  9657  			return true
  9658  		}
  9659  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  9660  		// cond:
  9661  		// result: (NE cmp yes no)
  9662  		for {
  9663  			v := b.Control
  9664  			if v.Op != OpMIPSXORconst {
  9665  				break
  9666  			}
  9667  			if v.AuxInt != 1 {
  9668  				break
  9669  			}
  9670  			cmp := v.Args[0]
  9671  			if cmp.Op != OpMIPSSGT {
  9672  				break
  9673  			}
  9674  			_ = cmp.Args[1]
  9675  			b.Kind = BlockMIPSNE
  9676  			b.SetControl(cmp)
  9677  			b.Aux = nil
  9678  			return true
  9679  		}
  9680  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  9681  		// cond:
  9682  		// result: (NE cmp yes no)
  9683  		for {
  9684  			v := b.Control
  9685  			if v.Op != OpMIPSXORconst {
  9686  				break
  9687  			}
  9688  			if v.AuxInt != 1 {
  9689  				break
  9690  			}
  9691  			cmp := v.Args[0]
  9692  			if cmp.Op != OpMIPSSGTU {
  9693  				break
  9694  			}
  9695  			_ = cmp.Args[1]
  9696  			b.Kind = BlockMIPSNE
  9697  			b.SetControl(cmp)
  9698  			b.Aux = nil
  9699  			return true
  9700  		}
  9701  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  9702  		// cond:
  9703  		// result: (NE cmp yes no)
  9704  		for {
  9705  			v := b.Control
  9706  			if v.Op != OpMIPSXORconst {
  9707  				break
  9708  			}
  9709  			if v.AuxInt != 1 {
  9710  				break
  9711  			}
  9712  			cmp := v.Args[0]
  9713  			if cmp.Op != OpMIPSSGTconst {
  9714  				break
  9715  			}
  9716  			b.Kind = BlockMIPSNE
  9717  			b.SetControl(cmp)
  9718  			b.Aux = nil
  9719  			return true
  9720  		}
  9721  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  9722  		// cond:
  9723  		// result: (NE cmp yes no)
  9724  		for {
  9725  			v := b.Control
  9726  			if v.Op != OpMIPSXORconst {
  9727  				break
  9728  			}
  9729  			if v.AuxInt != 1 {
  9730  				break
  9731  			}
  9732  			cmp := v.Args[0]
  9733  			if cmp.Op != OpMIPSSGTUconst {
  9734  				break
  9735  			}
  9736  			b.Kind = BlockMIPSNE
  9737  			b.SetControl(cmp)
  9738  			b.Aux = nil
  9739  			return true
  9740  		}
  9741  		// match: (EQ (XORconst [1] cmp:(SGTzero _)) yes no)
  9742  		// cond:
  9743  		// result: (NE cmp yes no)
  9744  		for {
  9745  			v := b.Control
  9746  			if v.Op != OpMIPSXORconst {
  9747  				break
  9748  			}
  9749  			if v.AuxInt != 1 {
  9750  				break
  9751  			}
  9752  			cmp := v.Args[0]
  9753  			if cmp.Op != OpMIPSSGTzero {
  9754  				break
  9755  			}
  9756  			b.Kind = BlockMIPSNE
  9757  			b.SetControl(cmp)
  9758  			b.Aux = nil
  9759  			return true
  9760  		}
  9761  		// match: (EQ (XORconst [1] cmp:(SGTUzero _)) yes no)
  9762  		// cond:
  9763  		// result: (NE cmp yes no)
  9764  		for {
  9765  			v := b.Control
  9766  			if v.Op != OpMIPSXORconst {
  9767  				break
  9768  			}
  9769  			if v.AuxInt != 1 {
  9770  				break
  9771  			}
  9772  			cmp := v.Args[0]
  9773  			if cmp.Op != OpMIPSSGTUzero {
  9774  				break
  9775  			}
  9776  			b.Kind = BlockMIPSNE
  9777  			b.SetControl(cmp)
  9778  			b.Aux = nil
  9779  			return true
  9780  		}
  9781  		// match: (EQ (SGTUconst [1] x) yes no)
  9782  		// cond:
  9783  		// result: (NE x yes no)
  9784  		for {
  9785  			v := b.Control
  9786  			if v.Op != OpMIPSSGTUconst {
  9787  				break
  9788  			}
  9789  			if v.AuxInt != 1 {
  9790  				break
  9791  			}
  9792  			x := v.Args[0]
  9793  			b.Kind = BlockMIPSNE
  9794  			b.SetControl(x)
  9795  			b.Aux = nil
  9796  			return true
  9797  		}
  9798  		// match: (EQ (SGTUzero x) yes no)
  9799  		// cond:
  9800  		// result: (EQ x yes no)
  9801  		for {
  9802  			v := b.Control
  9803  			if v.Op != OpMIPSSGTUzero {
  9804  				break
  9805  			}
  9806  			x := v.Args[0]
  9807  			b.Kind = BlockMIPSEQ
  9808  			b.SetControl(x)
  9809  			b.Aux = nil
  9810  			return true
  9811  		}
  9812  		// match: (EQ (SGTconst [0] x) yes no)
  9813  		// cond:
  9814  		// result: (GEZ x yes no)
  9815  		for {
  9816  			v := b.Control
  9817  			if v.Op != OpMIPSSGTconst {
  9818  				break
  9819  			}
  9820  			if v.AuxInt != 0 {
  9821  				break
  9822  			}
  9823  			x := v.Args[0]
  9824  			b.Kind = BlockMIPSGEZ
  9825  			b.SetControl(x)
  9826  			b.Aux = nil
  9827  			return true
  9828  		}
  9829  		// match: (EQ (SGTzero x) yes no)
  9830  		// cond:
  9831  		// result: (LEZ x yes no)
  9832  		for {
  9833  			v := b.Control
  9834  			if v.Op != OpMIPSSGTzero {
  9835  				break
  9836  			}
  9837  			x := v.Args[0]
  9838  			b.Kind = BlockMIPSLEZ
  9839  			b.SetControl(x)
  9840  			b.Aux = nil
  9841  			return true
  9842  		}
  9843  		// match: (EQ (MOVWconst [0]) yes no)
  9844  		// cond:
  9845  		// result: (First nil yes no)
  9846  		for {
  9847  			v := b.Control
  9848  			if v.Op != OpMIPSMOVWconst {
  9849  				break
  9850  			}
  9851  			if v.AuxInt != 0 {
  9852  				break
  9853  			}
  9854  			b.Kind = BlockFirst
  9855  			b.SetControl(nil)
  9856  			b.Aux = nil
  9857  			return true
  9858  		}
  9859  		// match: (EQ (MOVWconst [c]) yes no)
  9860  		// cond: c != 0
  9861  		// result: (First nil no yes)
  9862  		for {
  9863  			v := b.Control
  9864  			if v.Op != OpMIPSMOVWconst {
  9865  				break
  9866  			}
  9867  			c := v.AuxInt
  9868  			if !(c != 0) {
  9869  				break
  9870  			}
  9871  			b.Kind = BlockFirst
  9872  			b.SetControl(nil)
  9873  			b.Aux = nil
  9874  			b.swapSuccessors()
  9875  			return true
  9876  		}
  9877  	case BlockMIPSGEZ:
  9878  		// match: (GEZ (MOVWconst [c]) yes no)
  9879  		// cond: int32(c) >= 0
  9880  		// result: (First nil yes no)
  9881  		for {
  9882  			v := b.Control
  9883  			if v.Op != OpMIPSMOVWconst {
  9884  				break
  9885  			}
  9886  			c := v.AuxInt
  9887  			if !(int32(c) >= 0) {
  9888  				break
  9889  			}
  9890  			b.Kind = BlockFirst
  9891  			b.SetControl(nil)
  9892  			b.Aux = nil
  9893  			return true
  9894  		}
  9895  		// match: (GEZ (MOVWconst [c]) yes no)
  9896  		// cond: int32(c) < 0
  9897  		// result: (First nil no yes)
  9898  		for {
  9899  			v := b.Control
  9900  			if v.Op != OpMIPSMOVWconst {
  9901  				break
  9902  			}
  9903  			c := v.AuxInt
  9904  			if !(int32(c) < 0) {
  9905  				break
  9906  			}
  9907  			b.Kind = BlockFirst
  9908  			b.SetControl(nil)
  9909  			b.Aux = nil
  9910  			b.swapSuccessors()
  9911  			return true
  9912  		}
  9913  	case BlockMIPSGTZ:
  9914  		// match: (GTZ (MOVWconst [c]) yes no)
  9915  		// cond: int32(c) > 0
  9916  		// result: (First nil yes no)
  9917  		for {
  9918  			v := b.Control
  9919  			if v.Op != OpMIPSMOVWconst {
  9920  				break
  9921  			}
  9922  			c := v.AuxInt
  9923  			if !(int32(c) > 0) {
  9924  				break
  9925  			}
  9926  			b.Kind = BlockFirst
  9927  			b.SetControl(nil)
  9928  			b.Aux = nil
  9929  			return true
  9930  		}
  9931  		// match: (GTZ (MOVWconst [c]) yes no)
  9932  		// cond: int32(c) <= 0
  9933  		// result: (First nil no yes)
  9934  		for {
  9935  			v := b.Control
  9936  			if v.Op != OpMIPSMOVWconst {
  9937  				break
  9938  			}
  9939  			c := v.AuxInt
  9940  			if !(int32(c) <= 0) {
  9941  				break
  9942  			}
  9943  			b.Kind = BlockFirst
  9944  			b.SetControl(nil)
  9945  			b.Aux = nil
  9946  			b.swapSuccessors()
  9947  			return true
  9948  		}
  9949  	case BlockIf:
  9950  		// match: (If cond yes no)
  9951  		// cond:
  9952  		// result: (NE cond yes no)
  9953  		for {
  9954  			v := b.Control
  9955  			_ = v
  9956  			cond := b.Control
  9957  			b.Kind = BlockMIPSNE
  9958  			b.SetControl(cond)
  9959  			b.Aux = nil
  9960  			return true
  9961  		}
  9962  	case BlockMIPSLEZ:
  9963  		// match: (LEZ (MOVWconst [c]) yes no)
  9964  		// cond: int32(c) <= 0
  9965  		// result: (First nil yes no)
  9966  		for {
  9967  			v := b.Control
  9968  			if v.Op != OpMIPSMOVWconst {
  9969  				break
  9970  			}
  9971  			c := v.AuxInt
  9972  			if !(int32(c) <= 0) {
  9973  				break
  9974  			}
  9975  			b.Kind = BlockFirst
  9976  			b.SetControl(nil)
  9977  			b.Aux = nil
  9978  			return true
  9979  		}
  9980  		// match: (LEZ (MOVWconst [c]) yes no)
  9981  		// cond: int32(c) > 0
  9982  		// result: (First nil no yes)
  9983  		for {
  9984  			v := b.Control
  9985  			if v.Op != OpMIPSMOVWconst {
  9986  				break
  9987  			}
  9988  			c := v.AuxInt
  9989  			if !(int32(c) > 0) {
  9990  				break
  9991  			}
  9992  			b.Kind = BlockFirst
  9993  			b.SetControl(nil)
  9994  			b.Aux = nil
  9995  			b.swapSuccessors()
  9996  			return true
  9997  		}
  9998  	case BlockMIPSLTZ:
  9999  		// match: (LTZ (MOVWconst [c]) yes no)
 10000  		// cond: int32(c) < 0
 10001  		// result: (First nil yes no)
 10002  		for {
 10003  			v := b.Control
 10004  			if v.Op != OpMIPSMOVWconst {
 10005  				break
 10006  			}
 10007  			c := v.AuxInt
 10008  			if !(int32(c) < 0) {
 10009  				break
 10010  			}
 10011  			b.Kind = BlockFirst
 10012  			b.SetControl(nil)
 10013  			b.Aux = nil
 10014  			return true
 10015  		}
 10016  		// match: (LTZ (MOVWconst [c]) yes no)
 10017  		// cond: int32(c) >= 0
 10018  		// result: (First nil no yes)
 10019  		for {
 10020  			v := b.Control
 10021  			if v.Op != OpMIPSMOVWconst {
 10022  				break
 10023  			}
 10024  			c := v.AuxInt
 10025  			if !(int32(c) >= 0) {
 10026  				break
 10027  			}
 10028  			b.Kind = BlockFirst
 10029  			b.SetControl(nil)
 10030  			b.Aux = nil
 10031  			b.swapSuccessors()
 10032  			return true
 10033  		}
 10034  	case BlockMIPSNE:
 10035  		// match: (NE (FPFlagTrue cmp) yes no)
 10036  		// cond:
 10037  		// result: (FPT cmp yes no)
 10038  		for {
 10039  			v := b.Control
 10040  			if v.Op != OpMIPSFPFlagTrue {
 10041  				break
 10042  			}
 10043  			cmp := v.Args[0]
 10044  			b.Kind = BlockMIPSFPT
 10045  			b.SetControl(cmp)
 10046  			b.Aux = nil
 10047  			return true
 10048  		}
 10049  		// match: (NE (FPFlagFalse cmp) yes no)
 10050  		// cond:
 10051  		// result: (FPF cmp yes no)
 10052  		for {
 10053  			v := b.Control
 10054  			if v.Op != OpMIPSFPFlagFalse {
 10055  				break
 10056  			}
 10057  			cmp := v.Args[0]
 10058  			b.Kind = BlockMIPSFPF
 10059  			b.SetControl(cmp)
 10060  			b.Aux = nil
 10061  			return true
 10062  		}
 10063  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 10064  		// cond:
 10065  		// result: (EQ cmp yes no)
 10066  		for {
 10067  			v := b.Control
 10068  			if v.Op != OpMIPSXORconst {
 10069  				break
 10070  			}
 10071  			if v.AuxInt != 1 {
 10072  				break
 10073  			}
 10074  			cmp := v.Args[0]
 10075  			if cmp.Op != OpMIPSSGT {
 10076  				break
 10077  			}
 10078  			_ = cmp.Args[1]
 10079  			b.Kind = BlockMIPSEQ
 10080  			b.SetControl(cmp)
 10081  			b.Aux = nil
 10082  			return true
 10083  		}
 10084  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 10085  		// cond:
 10086  		// result: (EQ cmp yes no)
 10087  		for {
 10088  			v := b.Control
 10089  			if v.Op != OpMIPSXORconst {
 10090  				break
 10091  			}
 10092  			if v.AuxInt != 1 {
 10093  				break
 10094  			}
 10095  			cmp := v.Args[0]
 10096  			if cmp.Op != OpMIPSSGTU {
 10097  				break
 10098  			}
 10099  			_ = cmp.Args[1]
 10100  			b.Kind = BlockMIPSEQ
 10101  			b.SetControl(cmp)
 10102  			b.Aux = nil
 10103  			return true
 10104  		}
 10105  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 10106  		// cond:
 10107  		// result: (EQ cmp yes no)
 10108  		for {
 10109  			v := b.Control
 10110  			if v.Op != OpMIPSXORconst {
 10111  				break
 10112  			}
 10113  			if v.AuxInt != 1 {
 10114  				break
 10115  			}
 10116  			cmp := v.Args[0]
 10117  			if cmp.Op != OpMIPSSGTconst {
 10118  				break
 10119  			}
 10120  			b.Kind = BlockMIPSEQ
 10121  			b.SetControl(cmp)
 10122  			b.Aux = nil
 10123  			return true
 10124  		}
 10125  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 10126  		// cond:
 10127  		// result: (EQ cmp yes no)
 10128  		for {
 10129  			v := b.Control
 10130  			if v.Op != OpMIPSXORconst {
 10131  				break
 10132  			}
 10133  			if v.AuxInt != 1 {
 10134  				break
 10135  			}
 10136  			cmp := v.Args[0]
 10137  			if cmp.Op != OpMIPSSGTUconst {
 10138  				break
 10139  			}
 10140  			b.Kind = BlockMIPSEQ
 10141  			b.SetControl(cmp)
 10142  			b.Aux = nil
 10143  			return true
 10144  		}
 10145  		// match: (NE (XORconst [1] cmp:(SGTzero _)) yes no)
 10146  		// cond:
 10147  		// result: (EQ cmp yes no)
 10148  		for {
 10149  			v := b.Control
 10150  			if v.Op != OpMIPSXORconst {
 10151  				break
 10152  			}
 10153  			if v.AuxInt != 1 {
 10154  				break
 10155  			}
 10156  			cmp := v.Args[0]
 10157  			if cmp.Op != OpMIPSSGTzero {
 10158  				break
 10159  			}
 10160  			b.Kind = BlockMIPSEQ
 10161  			b.SetControl(cmp)
 10162  			b.Aux = nil
 10163  			return true
 10164  		}
 10165  		// match: (NE (XORconst [1] cmp:(SGTUzero _)) yes no)
 10166  		// cond:
 10167  		// result: (EQ cmp yes no)
 10168  		for {
 10169  			v := b.Control
 10170  			if v.Op != OpMIPSXORconst {
 10171  				break
 10172  			}
 10173  			if v.AuxInt != 1 {
 10174  				break
 10175  			}
 10176  			cmp := v.Args[0]
 10177  			if cmp.Op != OpMIPSSGTUzero {
 10178  				break
 10179  			}
 10180  			b.Kind = BlockMIPSEQ
 10181  			b.SetControl(cmp)
 10182  			b.Aux = nil
 10183  			return true
 10184  		}
 10185  		// match: (NE (SGTUconst [1] x) yes no)
 10186  		// cond:
 10187  		// result: (EQ x yes no)
 10188  		for {
 10189  			v := b.Control
 10190  			if v.Op != OpMIPSSGTUconst {
 10191  				break
 10192  			}
 10193  			if v.AuxInt != 1 {
 10194  				break
 10195  			}
 10196  			x := v.Args[0]
 10197  			b.Kind = BlockMIPSEQ
 10198  			b.SetControl(x)
 10199  			b.Aux = nil
 10200  			return true
 10201  		}
 10202  		// match: (NE (SGTUzero x) yes no)
 10203  		// cond:
 10204  		// result: (NE x yes no)
 10205  		for {
 10206  			v := b.Control
 10207  			if v.Op != OpMIPSSGTUzero {
 10208  				break
 10209  			}
 10210  			x := v.Args[0]
 10211  			b.Kind = BlockMIPSNE
 10212  			b.SetControl(x)
 10213  			b.Aux = nil
 10214  			return true
 10215  		}
 10216  		// match: (NE (SGTconst [0] x) yes no)
 10217  		// cond:
 10218  		// result: (LTZ x yes no)
 10219  		for {
 10220  			v := b.Control
 10221  			if v.Op != OpMIPSSGTconst {
 10222  				break
 10223  			}
 10224  			if v.AuxInt != 0 {
 10225  				break
 10226  			}
 10227  			x := v.Args[0]
 10228  			b.Kind = BlockMIPSLTZ
 10229  			b.SetControl(x)
 10230  			b.Aux = nil
 10231  			return true
 10232  		}
 10233  		// match: (NE (SGTzero x) yes no)
 10234  		// cond:
 10235  		// result: (GTZ x yes no)
 10236  		for {
 10237  			v := b.Control
 10238  			if v.Op != OpMIPSSGTzero {
 10239  				break
 10240  			}
 10241  			x := v.Args[0]
 10242  			b.Kind = BlockMIPSGTZ
 10243  			b.SetControl(x)
 10244  			b.Aux = nil
 10245  			return true
 10246  		}
 10247  		// match: (NE (MOVWconst [0]) yes no)
 10248  		// cond:
 10249  		// result: (First nil no yes)
 10250  		for {
 10251  			v := b.Control
 10252  			if v.Op != OpMIPSMOVWconst {
 10253  				break
 10254  			}
 10255  			if v.AuxInt != 0 {
 10256  				break
 10257  			}
 10258  			b.Kind = BlockFirst
 10259  			b.SetControl(nil)
 10260  			b.Aux = nil
 10261  			b.swapSuccessors()
 10262  			return true
 10263  		}
 10264  		// match: (NE (MOVWconst [c]) yes no)
 10265  		// cond: c != 0
 10266  		// result: (First nil yes no)
 10267  		for {
 10268  			v := b.Control
 10269  			if v.Op != OpMIPSMOVWconst {
 10270  				break
 10271  			}
 10272  			c := v.AuxInt
 10273  			if !(c != 0) {
 10274  				break
 10275  			}
 10276  			b.Kind = BlockFirst
 10277  			b.SetControl(nil)
 10278  			b.Aux = nil
 10279  			return true
 10280  		}
 10281  	}
 10282  	return false
 10283  }