github.com/sanprasirt/go@v0.0.0-20170607001320-a027466e4b6d/src/cmd/compile/internal/ssa/rewriteMIPS.go (about)

     1  // Code generated from gen/MIPS.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  import "cmd/internal/obj"
     8  import "cmd/internal/objabi"
     9  import "cmd/compile/internal/types"
    10  
    11  var _ = math.MinInt8  // in case not otherwise used
    12  var _ = obj.ANOP      // in case not otherwise used
    13  var _ = objabi.GOROOT // in case not otherwise used
    14  var _ = types.TypeMem // in case not otherwise used
    15  
    16  func rewriteValueMIPS(v *Value) bool {
    17  	switch v.Op {
    18  	case OpAdd16:
    19  		return rewriteValueMIPS_OpAdd16_0(v)
    20  	case OpAdd32:
    21  		return rewriteValueMIPS_OpAdd32_0(v)
    22  	case OpAdd32F:
    23  		return rewriteValueMIPS_OpAdd32F_0(v)
    24  	case OpAdd32withcarry:
    25  		return rewriteValueMIPS_OpAdd32withcarry_0(v)
    26  	case OpAdd64F:
    27  		return rewriteValueMIPS_OpAdd64F_0(v)
    28  	case OpAdd8:
    29  		return rewriteValueMIPS_OpAdd8_0(v)
    30  	case OpAddPtr:
    31  		return rewriteValueMIPS_OpAddPtr_0(v)
    32  	case OpAddr:
    33  		return rewriteValueMIPS_OpAddr_0(v)
    34  	case OpAnd16:
    35  		return rewriteValueMIPS_OpAnd16_0(v)
    36  	case OpAnd32:
    37  		return rewriteValueMIPS_OpAnd32_0(v)
    38  	case OpAnd8:
    39  		return rewriteValueMIPS_OpAnd8_0(v)
    40  	case OpAndB:
    41  		return rewriteValueMIPS_OpAndB_0(v)
    42  	case OpAtomicAdd32:
    43  		return rewriteValueMIPS_OpAtomicAdd32_0(v)
    44  	case OpAtomicAnd8:
    45  		return rewriteValueMIPS_OpAtomicAnd8_0(v)
    46  	case OpAtomicCompareAndSwap32:
    47  		return rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v)
    48  	case OpAtomicExchange32:
    49  		return rewriteValueMIPS_OpAtomicExchange32_0(v)
    50  	case OpAtomicLoad32:
    51  		return rewriteValueMIPS_OpAtomicLoad32_0(v)
    52  	case OpAtomicLoadPtr:
    53  		return rewriteValueMIPS_OpAtomicLoadPtr_0(v)
    54  	case OpAtomicOr8:
    55  		return rewriteValueMIPS_OpAtomicOr8_0(v)
    56  	case OpAtomicStore32:
    57  		return rewriteValueMIPS_OpAtomicStore32_0(v)
    58  	case OpAtomicStorePtrNoWB:
    59  		return rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v)
    60  	case OpAvg32u:
    61  		return rewriteValueMIPS_OpAvg32u_0(v)
    62  	case OpBitLen32:
    63  		return rewriteValueMIPS_OpBitLen32_0(v)
    64  	case OpClosureCall:
    65  		return rewriteValueMIPS_OpClosureCall_0(v)
    66  	case OpCom16:
    67  		return rewriteValueMIPS_OpCom16_0(v)
    68  	case OpCom32:
    69  		return rewriteValueMIPS_OpCom32_0(v)
    70  	case OpCom8:
    71  		return rewriteValueMIPS_OpCom8_0(v)
    72  	case OpConst16:
    73  		return rewriteValueMIPS_OpConst16_0(v)
    74  	case OpConst32:
    75  		return rewriteValueMIPS_OpConst32_0(v)
    76  	case OpConst32F:
    77  		return rewriteValueMIPS_OpConst32F_0(v)
    78  	case OpConst64F:
    79  		return rewriteValueMIPS_OpConst64F_0(v)
    80  	case OpConst8:
    81  		return rewriteValueMIPS_OpConst8_0(v)
    82  	case OpConstBool:
    83  		return rewriteValueMIPS_OpConstBool_0(v)
    84  	case OpConstNil:
    85  		return rewriteValueMIPS_OpConstNil_0(v)
    86  	case OpConvert:
    87  		return rewriteValueMIPS_OpConvert_0(v)
    88  	case OpCtz32:
    89  		return rewriteValueMIPS_OpCtz32_0(v)
    90  	case OpCvt32Fto32:
    91  		return rewriteValueMIPS_OpCvt32Fto32_0(v)
    92  	case OpCvt32Fto64F:
    93  		return rewriteValueMIPS_OpCvt32Fto64F_0(v)
    94  	case OpCvt32to32F:
    95  		return rewriteValueMIPS_OpCvt32to32F_0(v)
    96  	case OpCvt32to64F:
    97  		return rewriteValueMIPS_OpCvt32to64F_0(v)
    98  	case OpCvt64Fto32:
    99  		return rewriteValueMIPS_OpCvt64Fto32_0(v)
   100  	case OpCvt64Fto32F:
   101  		return rewriteValueMIPS_OpCvt64Fto32F_0(v)
   102  	case OpDiv16:
   103  		return rewriteValueMIPS_OpDiv16_0(v)
   104  	case OpDiv16u:
   105  		return rewriteValueMIPS_OpDiv16u_0(v)
   106  	case OpDiv32:
   107  		return rewriteValueMIPS_OpDiv32_0(v)
   108  	case OpDiv32F:
   109  		return rewriteValueMIPS_OpDiv32F_0(v)
   110  	case OpDiv32u:
   111  		return rewriteValueMIPS_OpDiv32u_0(v)
   112  	case OpDiv64F:
   113  		return rewriteValueMIPS_OpDiv64F_0(v)
   114  	case OpDiv8:
   115  		return rewriteValueMIPS_OpDiv8_0(v)
   116  	case OpDiv8u:
   117  		return rewriteValueMIPS_OpDiv8u_0(v)
   118  	case OpEq16:
   119  		return rewriteValueMIPS_OpEq16_0(v)
   120  	case OpEq32:
   121  		return rewriteValueMIPS_OpEq32_0(v)
   122  	case OpEq32F:
   123  		return rewriteValueMIPS_OpEq32F_0(v)
   124  	case OpEq64F:
   125  		return rewriteValueMIPS_OpEq64F_0(v)
   126  	case OpEq8:
   127  		return rewriteValueMIPS_OpEq8_0(v)
   128  	case OpEqB:
   129  		return rewriteValueMIPS_OpEqB_0(v)
   130  	case OpEqPtr:
   131  		return rewriteValueMIPS_OpEqPtr_0(v)
   132  	case OpGeq16:
   133  		return rewriteValueMIPS_OpGeq16_0(v)
   134  	case OpGeq16U:
   135  		return rewriteValueMIPS_OpGeq16U_0(v)
   136  	case OpGeq32:
   137  		return rewriteValueMIPS_OpGeq32_0(v)
   138  	case OpGeq32F:
   139  		return rewriteValueMIPS_OpGeq32F_0(v)
   140  	case OpGeq32U:
   141  		return rewriteValueMIPS_OpGeq32U_0(v)
   142  	case OpGeq64F:
   143  		return rewriteValueMIPS_OpGeq64F_0(v)
   144  	case OpGeq8:
   145  		return rewriteValueMIPS_OpGeq8_0(v)
   146  	case OpGeq8U:
   147  		return rewriteValueMIPS_OpGeq8U_0(v)
   148  	case OpGetClosurePtr:
   149  		return rewriteValueMIPS_OpGetClosurePtr_0(v)
   150  	case OpGreater16:
   151  		return rewriteValueMIPS_OpGreater16_0(v)
   152  	case OpGreater16U:
   153  		return rewriteValueMIPS_OpGreater16U_0(v)
   154  	case OpGreater32:
   155  		return rewriteValueMIPS_OpGreater32_0(v)
   156  	case OpGreater32F:
   157  		return rewriteValueMIPS_OpGreater32F_0(v)
   158  	case OpGreater32U:
   159  		return rewriteValueMIPS_OpGreater32U_0(v)
   160  	case OpGreater64F:
   161  		return rewriteValueMIPS_OpGreater64F_0(v)
   162  	case OpGreater8:
   163  		return rewriteValueMIPS_OpGreater8_0(v)
   164  	case OpGreater8U:
   165  		return rewriteValueMIPS_OpGreater8U_0(v)
   166  	case OpHmul32:
   167  		return rewriteValueMIPS_OpHmul32_0(v)
   168  	case OpHmul32u:
   169  		return rewriteValueMIPS_OpHmul32u_0(v)
   170  	case OpInterCall:
   171  		return rewriteValueMIPS_OpInterCall_0(v)
   172  	case OpIsInBounds:
   173  		return rewriteValueMIPS_OpIsInBounds_0(v)
   174  	case OpIsNonNil:
   175  		return rewriteValueMIPS_OpIsNonNil_0(v)
   176  	case OpIsSliceInBounds:
   177  		return rewriteValueMIPS_OpIsSliceInBounds_0(v)
   178  	case OpLeq16:
   179  		return rewriteValueMIPS_OpLeq16_0(v)
   180  	case OpLeq16U:
   181  		return rewriteValueMIPS_OpLeq16U_0(v)
   182  	case OpLeq32:
   183  		return rewriteValueMIPS_OpLeq32_0(v)
   184  	case OpLeq32F:
   185  		return rewriteValueMIPS_OpLeq32F_0(v)
   186  	case OpLeq32U:
   187  		return rewriteValueMIPS_OpLeq32U_0(v)
   188  	case OpLeq64F:
   189  		return rewriteValueMIPS_OpLeq64F_0(v)
   190  	case OpLeq8:
   191  		return rewriteValueMIPS_OpLeq8_0(v)
   192  	case OpLeq8U:
   193  		return rewriteValueMIPS_OpLeq8U_0(v)
   194  	case OpLess16:
   195  		return rewriteValueMIPS_OpLess16_0(v)
   196  	case OpLess16U:
   197  		return rewriteValueMIPS_OpLess16U_0(v)
   198  	case OpLess32:
   199  		return rewriteValueMIPS_OpLess32_0(v)
   200  	case OpLess32F:
   201  		return rewriteValueMIPS_OpLess32F_0(v)
   202  	case OpLess32U:
   203  		return rewriteValueMIPS_OpLess32U_0(v)
   204  	case OpLess64F:
   205  		return rewriteValueMIPS_OpLess64F_0(v)
   206  	case OpLess8:
   207  		return rewriteValueMIPS_OpLess8_0(v)
   208  	case OpLess8U:
   209  		return rewriteValueMIPS_OpLess8U_0(v)
   210  	case OpLoad:
   211  		return rewriteValueMIPS_OpLoad_0(v)
   212  	case OpLsh16x16:
   213  		return rewriteValueMIPS_OpLsh16x16_0(v)
   214  	case OpLsh16x32:
   215  		return rewriteValueMIPS_OpLsh16x32_0(v)
   216  	case OpLsh16x64:
   217  		return rewriteValueMIPS_OpLsh16x64_0(v)
   218  	case OpLsh16x8:
   219  		return rewriteValueMIPS_OpLsh16x8_0(v)
   220  	case OpLsh32x16:
   221  		return rewriteValueMIPS_OpLsh32x16_0(v)
   222  	case OpLsh32x32:
   223  		return rewriteValueMIPS_OpLsh32x32_0(v)
   224  	case OpLsh32x64:
   225  		return rewriteValueMIPS_OpLsh32x64_0(v)
   226  	case OpLsh32x8:
   227  		return rewriteValueMIPS_OpLsh32x8_0(v)
   228  	case OpLsh8x16:
   229  		return rewriteValueMIPS_OpLsh8x16_0(v)
   230  	case OpLsh8x32:
   231  		return rewriteValueMIPS_OpLsh8x32_0(v)
   232  	case OpLsh8x64:
   233  		return rewriteValueMIPS_OpLsh8x64_0(v)
   234  	case OpLsh8x8:
   235  		return rewriteValueMIPS_OpLsh8x8_0(v)
   236  	case OpMIPSADD:
   237  		return rewriteValueMIPS_OpMIPSADD_0(v)
   238  	case OpMIPSADDconst:
   239  		return rewriteValueMIPS_OpMIPSADDconst_0(v)
   240  	case OpMIPSAND:
   241  		return rewriteValueMIPS_OpMIPSAND_0(v)
   242  	case OpMIPSANDconst:
   243  		return rewriteValueMIPS_OpMIPSANDconst_0(v)
   244  	case OpMIPSCMOVZ:
   245  		return rewriteValueMIPS_OpMIPSCMOVZ_0(v)
   246  	case OpMIPSCMOVZzero:
   247  		return rewriteValueMIPS_OpMIPSCMOVZzero_0(v)
   248  	case OpMIPSLoweredAtomicAdd:
   249  		return rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v)
   250  	case OpMIPSLoweredAtomicStore:
   251  		return rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v)
   252  	case OpMIPSMOVBUload:
   253  		return rewriteValueMIPS_OpMIPSMOVBUload_0(v)
   254  	case OpMIPSMOVBUreg:
   255  		return rewriteValueMIPS_OpMIPSMOVBUreg_0(v)
   256  	case OpMIPSMOVBload:
   257  		return rewriteValueMIPS_OpMIPSMOVBload_0(v)
   258  	case OpMIPSMOVBreg:
   259  		return rewriteValueMIPS_OpMIPSMOVBreg_0(v)
   260  	case OpMIPSMOVBstore:
   261  		return rewriteValueMIPS_OpMIPSMOVBstore_0(v)
   262  	case OpMIPSMOVBstorezero:
   263  		return rewriteValueMIPS_OpMIPSMOVBstorezero_0(v)
   264  	case OpMIPSMOVDload:
   265  		return rewriteValueMIPS_OpMIPSMOVDload_0(v)
   266  	case OpMIPSMOVDstore:
   267  		return rewriteValueMIPS_OpMIPSMOVDstore_0(v)
   268  	case OpMIPSMOVFload:
   269  		return rewriteValueMIPS_OpMIPSMOVFload_0(v)
   270  	case OpMIPSMOVFstore:
   271  		return rewriteValueMIPS_OpMIPSMOVFstore_0(v)
   272  	case OpMIPSMOVHUload:
   273  		return rewriteValueMIPS_OpMIPSMOVHUload_0(v)
   274  	case OpMIPSMOVHUreg:
   275  		return rewriteValueMIPS_OpMIPSMOVHUreg_0(v)
   276  	case OpMIPSMOVHload:
   277  		return rewriteValueMIPS_OpMIPSMOVHload_0(v)
   278  	case OpMIPSMOVHreg:
   279  		return rewriteValueMIPS_OpMIPSMOVHreg_0(v)
   280  	case OpMIPSMOVHstore:
   281  		return rewriteValueMIPS_OpMIPSMOVHstore_0(v)
   282  	case OpMIPSMOVHstorezero:
   283  		return rewriteValueMIPS_OpMIPSMOVHstorezero_0(v)
   284  	case OpMIPSMOVWload:
   285  		return rewriteValueMIPS_OpMIPSMOVWload_0(v)
   286  	case OpMIPSMOVWreg:
   287  		return rewriteValueMIPS_OpMIPSMOVWreg_0(v)
   288  	case OpMIPSMOVWstore:
   289  		return rewriteValueMIPS_OpMIPSMOVWstore_0(v)
   290  	case OpMIPSMOVWstorezero:
   291  		return rewriteValueMIPS_OpMIPSMOVWstorezero_0(v)
   292  	case OpMIPSMUL:
   293  		return rewriteValueMIPS_OpMIPSMUL_0(v)
   294  	case OpMIPSNEG:
   295  		return rewriteValueMIPS_OpMIPSNEG_0(v)
   296  	case OpMIPSNOR:
   297  		return rewriteValueMIPS_OpMIPSNOR_0(v)
   298  	case OpMIPSNORconst:
   299  		return rewriteValueMIPS_OpMIPSNORconst_0(v)
   300  	case OpMIPSOR:
   301  		return rewriteValueMIPS_OpMIPSOR_0(v)
   302  	case OpMIPSORconst:
   303  		return rewriteValueMIPS_OpMIPSORconst_0(v)
   304  	case OpMIPSSGT:
   305  		return rewriteValueMIPS_OpMIPSSGT_0(v)
   306  	case OpMIPSSGTU:
   307  		return rewriteValueMIPS_OpMIPSSGTU_0(v)
   308  	case OpMIPSSGTUconst:
   309  		return rewriteValueMIPS_OpMIPSSGTUconst_0(v)
   310  	case OpMIPSSGTUzero:
   311  		return rewriteValueMIPS_OpMIPSSGTUzero_0(v)
   312  	case OpMIPSSGTconst:
   313  		return rewriteValueMIPS_OpMIPSSGTconst_0(v) || rewriteValueMIPS_OpMIPSSGTconst_10(v)
   314  	case OpMIPSSGTzero:
   315  		return rewriteValueMIPS_OpMIPSSGTzero_0(v)
   316  	case OpMIPSSLL:
   317  		return rewriteValueMIPS_OpMIPSSLL_0(v)
   318  	case OpMIPSSLLconst:
   319  		return rewriteValueMIPS_OpMIPSSLLconst_0(v)
   320  	case OpMIPSSRA:
   321  		return rewriteValueMIPS_OpMIPSSRA_0(v)
   322  	case OpMIPSSRAconst:
   323  		return rewriteValueMIPS_OpMIPSSRAconst_0(v)
   324  	case OpMIPSSRL:
   325  		return rewriteValueMIPS_OpMIPSSRL_0(v)
   326  	case OpMIPSSRLconst:
   327  		return rewriteValueMIPS_OpMIPSSRLconst_0(v)
   328  	case OpMIPSSUB:
   329  		return rewriteValueMIPS_OpMIPSSUB_0(v)
   330  	case OpMIPSSUBconst:
   331  		return rewriteValueMIPS_OpMIPSSUBconst_0(v)
   332  	case OpMIPSXOR:
   333  		return rewriteValueMIPS_OpMIPSXOR_0(v)
   334  	case OpMIPSXORconst:
   335  		return rewriteValueMIPS_OpMIPSXORconst_0(v)
   336  	case OpMod16:
   337  		return rewriteValueMIPS_OpMod16_0(v)
   338  	case OpMod16u:
   339  		return rewriteValueMIPS_OpMod16u_0(v)
   340  	case OpMod32:
   341  		return rewriteValueMIPS_OpMod32_0(v)
   342  	case OpMod32u:
   343  		return rewriteValueMIPS_OpMod32u_0(v)
   344  	case OpMod8:
   345  		return rewriteValueMIPS_OpMod8_0(v)
   346  	case OpMod8u:
   347  		return rewriteValueMIPS_OpMod8u_0(v)
   348  	case OpMove:
   349  		return rewriteValueMIPS_OpMove_0(v) || rewriteValueMIPS_OpMove_10(v)
   350  	case OpMul16:
   351  		return rewriteValueMIPS_OpMul16_0(v)
   352  	case OpMul32:
   353  		return rewriteValueMIPS_OpMul32_0(v)
   354  	case OpMul32F:
   355  		return rewriteValueMIPS_OpMul32F_0(v)
   356  	case OpMul32uhilo:
   357  		return rewriteValueMIPS_OpMul32uhilo_0(v)
   358  	case OpMul64F:
   359  		return rewriteValueMIPS_OpMul64F_0(v)
   360  	case OpMul8:
   361  		return rewriteValueMIPS_OpMul8_0(v)
   362  	case OpNeg16:
   363  		return rewriteValueMIPS_OpNeg16_0(v)
   364  	case OpNeg32:
   365  		return rewriteValueMIPS_OpNeg32_0(v)
   366  	case OpNeg32F:
   367  		return rewriteValueMIPS_OpNeg32F_0(v)
   368  	case OpNeg64F:
   369  		return rewriteValueMIPS_OpNeg64F_0(v)
   370  	case OpNeg8:
   371  		return rewriteValueMIPS_OpNeg8_0(v)
   372  	case OpNeq16:
   373  		return rewriteValueMIPS_OpNeq16_0(v)
   374  	case OpNeq32:
   375  		return rewriteValueMIPS_OpNeq32_0(v)
   376  	case OpNeq32F:
   377  		return rewriteValueMIPS_OpNeq32F_0(v)
   378  	case OpNeq64F:
   379  		return rewriteValueMIPS_OpNeq64F_0(v)
   380  	case OpNeq8:
   381  		return rewriteValueMIPS_OpNeq8_0(v)
   382  	case OpNeqB:
   383  		return rewriteValueMIPS_OpNeqB_0(v)
   384  	case OpNeqPtr:
   385  		return rewriteValueMIPS_OpNeqPtr_0(v)
   386  	case OpNilCheck:
   387  		return rewriteValueMIPS_OpNilCheck_0(v)
   388  	case OpNot:
   389  		return rewriteValueMIPS_OpNot_0(v)
   390  	case OpOffPtr:
   391  		return rewriteValueMIPS_OpOffPtr_0(v)
   392  	case OpOr16:
   393  		return rewriteValueMIPS_OpOr16_0(v)
   394  	case OpOr32:
   395  		return rewriteValueMIPS_OpOr32_0(v)
   396  	case OpOr8:
   397  		return rewriteValueMIPS_OpOr8_0(v)
   398  	case OpOrB:
   399  		return rewriteValueMIPS_OpOrB_0(v)
   400  	case OpRound32F:
   401  		return rewriteValueMIPS_OpRound32F_0(v)
   402  	case OpRound64F:
   403  		return rewriteValueMIPS_OpRound64F_0(v)
   404  	case OpRsh16Ux16:
   405  		return rewriteValueMIPS_OpRsh16Ux16_0(v)
   406  	case OpRsh16Ux32:
   407  		return rewriteValueMIPS_OpRsh16Ux32_0(v)
   408  	case OpRsh16Ux64:
   409  		return rewriteValueMIPS_OpRsh16Ux64_0(v)
   410  	case OpRsh16Ux8:
   411  		return rewriteValueMIPS_OpRsh16Ux8_0(v)
   412  	case OpRsh16x16:
   413  		return rewriteValueMIPS_OpRsh16x16_0(v)
   414  	case OpRsh16x32:
   415  		return rewriteValueMIPS_OpRsh16x32_0(v)
   416  	case OpRsh16x64:
   417  		return rewriteValueMIPS_OpRsh16x64_0(v)
   418  	case OpRsh16x8:
   419  		return rewriteValueMIPS_OpRsh16x8_0(v)
   420  	case OpRsh32Ux16:
   421  		return rewriteValueMIPS_OpRsh32Ux16_0(v)
   422  	case OpRsh32Ux32:
   423  		return rewriteValueMIPS_OpRsh32Ux32_0(v)
   424  	case OpRsh32Ux64:
   425  		return rewriteValueMIPS_OpRsh32Ux64_0(v)
   426  	case OpRsh32Ux8:
   427  		return rewriteValueMIPS_OpRsh32Ux8_0(v)
   428  	case OpRsh32x16:
   429  		return rewriteValueMIPS_OpRsh32x16_0(v)
   430  	case OpRsh32x32:
   431  		return rewriteValueMIPS_OpRsh32x32_0(v)
   432  	case OpRsh32x64:
   433  		return rewriteValueMIPS_OpRsh32x64_0(v)
   434  	case OpRsh32x8:
   435  		return rewriteValueMIPS_OpRsh32x8_0(v)
   436  	case OpRsh8Ux16:
   437  		return rewriteValueMIPS_OpRsh8Ux16_0(v)
   438  	case OpRsh8Ux32:
   439  		return rewriteValueMIPS_OpRsh8Ux32_0(v)
   440  	case OpRsh8Ux64:
   441  		return rewriteValueMIPS_OpRsh8Ux64_0(v)
   442  	case OpRsh8Ux8:
   443  		return rewriteValueMIPS_OpRsh8Ux8_0(v)
   444  	case OpRsh8x16:
   445  		return rewriteValueMIPS_OpRsh8x16_0(v)
   446  	case OpRsh8x32:
   447  		return rewriteValueMIPS_OpRsh8x32_0(v)
   448  	case OpRsh8x64:
   449  		return rewriteValueMIPS_OpRsh8x64_0(v)
   450  	case OpRsh8x8:
   451  		return rewriteValueMIPS_OpRsh8x8_0(v)
   452  	case OpSelect0:
   453  		return rewriteValueMIPS_OpSelect0_0(v) || rewriteValueMIPS_OpSelect0_10(v)
   454  	case OpSelect1:
   455  		return rewriteValueMIPS_OpSelect1_0(v) || rewriteValueMIPS_OpSelect1_10(v)
   456  	case OpSignExt16to32:
   457  		return rewriteValueMIPS_OpSignExt16to32_0(v)
   458  	case OpSignExt8to16:
   459  		return rewriteValueMIPS_OpSignExt8to16_0(v)
   460  	case OpSignExt8to32:
   461  		return rewriteValueMIPS_OpSignExt8to32_0(v)
   462  	case OpSignmask:
   463  		return rewriteValueMIPS_OpSignmask_0(v)
   464  	case OpSlicemask:
   465  		return rewriteValueMIPS_OpSlicemask_0(v)
   466  	case OpSqrt:
   467  		return rewriteValueMIPS_OpSqrt_0(v)
   468  	case OpStaticCall:
   469  		return rewriteValueMIPS_OpStaticCall_0(v)
   470  	case OpStore:
   471  		return rewriteValueMIPS_OpStore_0(v)
   472  	case OpSub16:
   473  		return rewriteValueMIPS_OpSub16_0(v)
   474  	case OpSub32:
   475  		return rewriteValueMIPS_OpSub32_0(v)
   476  	case OpSub32F:
   477  		return rewriteValueMIPS_OpSub32F_0(v)
   478  	case OpSub32withcarry:
   479  		return rewriteValueMIPS_OpSub32withcarry_0(v)
   480  	case OpSub64F:
   481  		return rewriteValueMIPS_OpSub64F_0(v)
   482  	case OpSub8:
   483  		return rewriteValueMIPS_OpSub8_0(v)
   484  	case OpSubPtr:
   485  		return rewriteValueMIPS_OpSubPtr_0(v)
   486  	case OpTrunc16to8:
   487  		return rewriteValueMIPS_OpTrunc16to8_0(v)
   488  	case OpTrunc32to16:
   489  		return rewriteValueMIPS_OpTrunc32to16_0(v)
   490  	case OpTrunc32to8:
   491  		return rewriteValueMIPS_OpTrunc32to8_0(v)
   492  	case OpXor16:
   493  		return rewriteValueMIPS_OpXor16_0(v)
   494  	case OpXor32:
   495  		return rewriteValueMIPS_OpXor32_0(v)
   496  	case OpXor8:
   497  		return rewriteValueMIPS_OpXor8_0(v)
   498  	case OpZero:
   499  		return rewriteValueMIPS_OpZero_0(v) || rewriteValueMIPS_OpZero_10(v)
   500  	case OpZeroExt16to32:
   501  		return rewriteValueMIPS_OpZeroExt16to32_0(v)
   502  	case OpZeroExt8to16:
   503  		return rewriteValueMIPS_OpZeroExt8to16_0(v)
   504  	case OpZeroExt8to32:
   505  		return rewriteValueMIPS_OpZeroExt8to32_0(v)
   506  	case OpZeromask:
   507  		return rewriteValueMIPS_OpZeromask_0(v)
   508  	}
   509  	return false
   510  }
   511  func rewriteValueMIPS_OpAdd16_0(v *Value) bool {
   512  	// match: (Add16 x y)
   513  	// cond:
   514  	// result: (ADD x y)
   515  	for {
   516  		_ = v.Args[1]
   517  		x := v.Args[0]
   518  		y := v.Args[1]
   519  		v.reset(OpMIPSADD)
   520  		v.AddArg(x)
   521  		v.AddArg(y)
   522  		return true
   523  	}
   524  }
   525  func rewriteValueMIPS_OpAdd32_0(v *Value) bool {
   526  	// match: (Add32 x y)
   527  	// cond:
   528  	// result: (ADD x y)
   529  	for {
   530  		_ = v.Args[1]
   531  		x := v.Args[0]
   532  		y := v.Args[1]
   533  		v.reset(OpMIPSADD)
   534  		v.AddArg(x)
   535  		v.AddArg(y)
   536  		return true
   537  	}
   538  }
   539  func rewriteValueMIPS_OpAdd32F_0(v *Value) bool {
   540  	// match: (Add32F x y)
   541  	// cond:
   542  	// result: (ADDF x y)
   543  	for {
   544  		_ = v.Args[1]
   545  		x := v.Args[0]
   546  		y := v.Args[1]
   547  		v.reset(OpMIPSADDF)
   548  		v.AddArg(x)
   549  		v.AddArg(y)
   550  		return true
   551  	}
   552  }
   553  func rewriteValueMIPS_OpAdd32withcarry_0(v *Value) bool {
   554  	b := v.Block
   555  	_ = b
   556  	// match: (Add32withcarry <t> x y c)
   557  	// cond:
   558  	// result: (ADD c (ADD <t> x y))
   559  	for {
   560  		t := v.Type
   561  		_ = v.Args[2]
   562  		x := v.Args[0]
   563  		y := v.Args[1]
   564  		c := v.Args[2]
   565  		v.reset(OpMIPSADD)
   566  		v.AddArg(c)
   567  		v0 := b.NewValue0(v.Pos, OpMIPSADD, t)
   568  		v0.AddArg(x)
   569  		v0.AddArg(y)
   570  		v.AddArg(v0)
   571  		return true
   572  	}
   573  }
   574  func rewriteValueMIPS_OpAdd64F_0(v *Value) bool {
   575  	// match: (Add64F x y)
   576  	// cond:
   577  	// result: (ADDD x y)
   578  	for {
   579  		_ = v.Args[1]
   580  		x := v.Args[0]
   581  		y := v.Args[1]
   582  		v.reset(OpMIPSADDD)
   583  		v.AddArg(x)
   584  		v.AddArg(y)
   585  		return true
   586  	}
   587  }
   588  func rewriteValueMIPS_OpAdd8_0(v *Value) bool {
   589  	// match: (Add8 x y)
   590  	// cond:
   591  	// result: (ADD x y)
   592  	for {
   593  		_ = v.Args[1]
   594  		x := v.Args[0]
   595  		y := v.Args[1]
   596  		v.reset(OpMIPSADD)
   597  		v.AddArg(x)
   598  		v.AddArg(y)
   599  		return true
   600  	}
   601  }
   602  func rewriteValueMIPS_OpAddPtr_0(v *Value) bool {
   603  	// match: (AddPtr x y)
   604  	// cond:
   605  	// result: (ADD x y)
   606  	for {
   607  		_ = v.Args[1]
   608  		x := v.Args[0]
   609  		y := v.Args[1]
   610  		v.reset(OpMIPSADD)
   611  		v.AddArg(x)
   612  		v.AddArg(y)
   613  		return true
   614  	}
   615  }
   616  func rewriteValueMIPS_OpAddr_0(v *Value) bool {
   617  	// match: (Addr {sym} base)
   618  	// cond:
   619  	// result: (MOVWaddr {sym} base)
   620  	for {
   621  		sym := v.Aux
   622  		base := v.Args[0]
   623  		v.reset(OpMIPSMOVWaddr)
   624  		v.Aux = sym
   625  		v.AddArg(base)
   626  		return true
   627  	}
   628  }
   629  func rewriteValueMIPS_OpAnd16_0(v *Value) bool {
   630  	// match: (And16 x y)
   631  	// cond:
   632  	// result: (AND x y)
   633  	for {
   634  		_ = v.Args[1]
   635  		x := v.Args[0]
   636  		y := v.Args[1]
   637  		v.reset(OpMIPSAND)
   638  		v.AddArg(x)
   639  		v.AddArg(y)
   640  		return true
   641  	}
   642  }
   643  func rewriteValueMIPS_OpAnd32_0(v *Value) bool {
   644  	// match: (And32 x y)
   645  	// cond:
   646  	// result: (AND x y)
   647  	for {
   648  		_ = v.Args[1]
   649  		x := v.Args[0]
   650  		y := v.Args[1]
   651  		v.reset(OpMIPSAND)
   652  		v.AddArg(x)
   653  		v.AddArg(y)
   654  		return true
   655  	}
   656  }
   657  func rewriteValueMIPS_OpAnd8_0(v *Value) bool {
   658  	// match: (And8 x y)
   659  	// cond:
   660  	// result: (AND x y)
   661  	for {
   662  		_ = v.Args[1]
   663  		x := v.Args[0]
   664  		y := v.Args[1]
   665  		v.reset(OpMIPSAND)
   666  		v.AddArg(x)
   667  		v.AddArg(y)
   668  		return true
   669  	}
   670  }
   671  func rewriteValueMIPS_OpAndB_0(v *Value) bool {
   672  	// match: (AndB x y)
   673  	// cond:
   674  	// result: (AND x y)
   675  	for {
   676  		_ = v.Args[1]
   677  		x := v.Args[0]
   678  		y := v.Args[1]
   679  		v.reset(OpMIPSAND)
   680  		v.AddArg(x)
   681  		v.AddArg(y)
   682  		return true
   683  	}
   684  }
   685  func rewriteValueMIPS_OpAtomicAdd32_0(v *Value) bool {
   686  	// match: (AtomicAdd32 ptr val mem)
   687  	// cond:
   688  	// result: (LoweredAtomicAdd ptr val mem)
   689  	for {
   690  		_ = v.Args[2]
   691  		ptr := v.Args[0]
   692  		val := v.Args[1]
   693  		mem := v.Args[2]
   694  		v.reset(OpMIPSLoweredAtomicAdd)
   695  		v.AddArg(ptr)
   696  		v.AddArg(val)
   697  		v.AddArg(mem)
   698  		return true
   699  	}
   700  }
   701  func rewriteValueMIPS_OpAtomicAnd8_0(v *Value) bool {
   702  	b := v.Block
   703  	_ = b
   704  	config := b.Func.Config
   705  	_ = config
   706  	typ := &b.Func.Config.Types
   707  	_ = typ
   708  	// match: (AtomicAnd8 ptr val mem)
   709  	// cond: !config.BigEndian
   710  	// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) 		(OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) 			(SLLconst <typ.UInt32> [3] 				(ANDconst  <typ.UInt32> [3] ptr))) 		(NORconst [0] <typ.UInt32> (SLL <typ.UInt32> 			(MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] 				(ANDconst <typ.UInt32> [3] ptr))))) mem)
   711  	for {
   712  		_ = v.Args[2]
   713  		ptr := v.Args[0]
   714  		val := v.Args[1]
   715  		mem := v.Args[2]
   716  		if !(!config.BigEndian) {
   717  			break
   718  		}
   719  		v.reset(OpMIPSLoweredAtomicAnd)
   720  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   721  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   722  		v1.AuxInt = ^3
   723  		v0.AddArg(v1)
   724  		v0.AddArg(ptr)
   725  		v.AddArg(v0)
   726  		v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32)
   727  		v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   728  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   729  		v4.AddArg(val)
   730  		v3.AddArg(v4)
   731  		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   732  		v5.AuxInt = 3
   733  		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   734  		v6.AuxInt = 3
   735  		v6.AddArg(ptr)
   736  		v5.AddArg(v6)
   737  		v3.AddArg(v5)
   738  		v2.AddArg(v3)
   739  		v7 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32)
   740  		v7.AuxInt = 0
   741  		v8 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   742  		v9 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   743  		v9.AuxInt = 0xff
   744  		v8.AddArg(v9)
   745  		v10 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   746  		v10.AuxInt = 3
   747  		v11 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   748  		v11.AuxInt = 3
   749  		v11.AddArg(ptr)
   750  		v10.AddArg(v11)
   751  		v8.AddArg(v10)
   752  		v7.AddArg(v8)
   753  		v2.AddArg(v7)
   754  		v.AddArg(v2)
   755  		v.AddArg(mem)
   756  		return true
   757  	}
   758  	// match: (AtomicAnd8 ptr val mem)
   759  	// cond: config.BigEndian
   760  	// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) 		(OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) 			(SLLconst <typ.UInt32> [3] 				(ANDconst  <typ.UInt32> [3] 					(XORconst <typ.UInt32> [3] ptr)))) 		(NORconst [0] <typ.UInt32> (SLL <typ.UInt32> 			(MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] 				(ANDconst <typ.UInt32> [3] 					(XORconst <typ.UInt32> [3] ptr)))))) mem)
   761  	for {
   762  		_ = v.Args[2]
   763  		ptr := v.Args[0]
   764  		val := v.Args[1]
   765  		mem := v.Args[2]
   766  		if !(config.BigEndian) {
   767  			break
   768  		}
   769  		v.reset(OpMIPSLoweredAtomicAnd)
   770  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   771  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   772  		v1.AuxInt = ^3
   773  		v0.AddArg(v1)
   774  		v0.AddArg(ptr)
   775  		v.AddArg(v0)
   776  		v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32)
   777  		v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   778  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   779  		v4.AddArg(val)
   780  		v3.AddArg(v4)
   781  		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   782  		v5.AuxInt = 3
   783  		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   784  		v6.AuxInt = 3
   785  		v7 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   786  		v7.AuxInt = 3
   787  		v7.AddArg(ptr)
   788  		v6.AddArg(v7)
   789  		v5.AddArg(v6)
   790  		v3.AddArg(v5)
   791  		v2.AddArg(v3)
   792  		v8 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32)
   793  		v8.AuxInt = 0
   794  		v9 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   795  		v10 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   796  		v10.AuxInt = 0xff
   797  		v9.AddArg(v10)
   798  		v11 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   799  		v11.AuxInt = 3
   800  		v12 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   801  		v12.AuxInt = 3
   802  		v13 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   803  		v13.AuxInt = 3
   804  		v13.AddArg(ptr)
   805  		v12.AddArg(v13)
   806  		v11.AddArg(v12)
   807  		v9.AddArg(v11)
   808  		v8.AddArg(v9)
   809  		v2.AddArg(v8)
   810  		v.AddArg(v2)
   811  		v.AddArg(mem)
   812  		return true
   813  	}
   814  	return false
   815  }
   816  func rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v *Value) bool {
   817  	// match: (AtomicCompareAndSwap32 ptr old new_ mem)
   818  	// cond:
   819  	// result: (LoweredAtomicCas ptr old new_ mem)
   820  	for {
   821  		_ = v.Args[3]
   822  		ptr := v.Args[0]
   823  		old := v.Args[1]
   824  		new_ := v.Args[2]
   825  		mem := v.Args[3]
   826  		v.reset(OpMIPSLoweredAtomicCas)
   827  		v.AddArg(ptr)
   828  		v.AddArg(old)
   829  		v.AddArg(new_)
   830  		v.AddArg(mem)
   831  		return true
   832  	}
   833  }
   834  func rewriteValueMIPS_OpAtomicExchange32_0(v *Value) bool {
   835  	// match: (AtomicExchange32 ptr val mem)
   836  	// cond:
   837  	// result: (LoweredAtomicExchange ptr val mem)
   838  	for {
   839  		_ = v.Args[2]
   840  		ptr := v.Args[0]
   841  		val := v.Args[1]
   842  		mem := v.Args[2]
   843  		v.reset(OpMIPSLoweredAtomicExchange)
   844  		v.AddArg(ptr)
   845  		v.AddArg(val)
   846  		v.AddArg(mem)
   847  		return true
   848  	}
   849  }
   850  func rewriteValueMIPS_OpAtomicLoad32_0(v *Value) bool {
   851  	// match: (AtomicLoad32 ptr mem)
   852  	// cond:
   853  	// result: (LoweredAtomicLoad ptr mem)
   854  	for {
   855  		_ = v.Args[1]
   856  		ptr := v.Args[0]
   857  		mem := v.Args[1]
   858  		v.reset(OpMIPSLoweredAtomicLoad)
   859  		v.AddArg(ptr)
   860  		v.AddArg(mem)
   861  		return true
   862  	}
   863  }
   864  func rewriteValueMIPS_OpAtomicLoadPtr_0(v *Value) bool {
   865  	// match: (AtomicLoadPtr ptr mem)
   866  	// cond:
   867  	// result: (LoweredAtomicLoad  ptr mem)
   868  	for {
   869  		_ = v.Args[1]
   870  		ptr := v.Args[0]
   871  		mem := v.Args[1]
   872  		v.reset(OpMIPSLoweredAtomicLoad)
   873  		v.AddArg(ptr)
   874  		v.AddArg(mem)
   875  		return true
   876  	}
   877  }
   878  func rewriteValueMIPS_OpAtomicOr8_0(v *Value) bool {
   879  	b := v.Block
   880  	_ = b
   881  	config := b.Func.Config
   882  	_ = config
   883  	typ := &b.Func.Config.Types
   884  	_ = typ
   885  	// match: (AtomicOr8 ptr val mem)
   886  	// cond: !config.BigEndian
   887  	// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) 		(SLL <typ.UInt32> (ZeroExt8to32 val) 			(SLLconst <typ.UInt32> [3] 				(ANDconst <typ.UInt32> [3] ptr))) mem)
   888  	for {
   889  		_ = v.Args[2]
   890  		ptr := v.Args[0]
   891  		val := v.Args[1]
   892  		mem := v.Args[2]
   893  		if !(!config.BigEndian) {
   894  			break
   895  		}
   896  		v.reset(OpMIPSLoweredAtomicOr)
   897  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   898  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   899  		v1.AuxInt = ^3
   900  		v0.AddArg(v1)
   901  		v0.AddArg(ptr)
   902  		v.AddArg(v0)
   903  		v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   904  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   905  		v3.AddArg(val)
   906  		v2.AddArg(v3)
   907  		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   908  		v4.AuxInt = 3
   909  		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   910  		v5.AuxInt = 3
   911  		v5.AddArg(ptr)
   912  		v4.AddArg(v5)
   913  		v2.AddArg(v4)
   914  		v.AddArg(v2)
   915  		v.AddArg(mem)
   916  		return true
   917  	}
   918  	// match: (AtomicOr8 ptr val mem)
   919  	// cond: config.BigEndian
   920  	// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) 		(SLL <typ.UInt32> (ZeroExt8to32 val) 			(SLLconst <typ.UInt32> [3] 				(ANDconst <typ.UInt32> [3] 					(XORconst <typ.UInt32> [3] ptr)))) mem)
   921  	for {
   922  		_ = v.Args[2]
   923  		ptr := v.Args[0]
   924  		val := v.Args[1]
   925  		mem := v.Args[2]
   926  		if !(config.BigEndian) {
   927  			break
   928  		}
   929  		v.reset(OpMIPSLoweredAtomicOr)
   930  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   931  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   932  		v1.AuxInt = ^3
   933  		v0.AddArg(v1)
   934  		v0.AddArg(ptr)
   935  		v.AddArg(v0)
   936  		v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   937  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   938  		v3.AddArg(val)
   939  		v2.AddArg(v3)
   940  		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   941  		v4.AuxInt = 3
   942  		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   943  		v5.AuxInt = 3
   944  		v6 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   945  		v6.AuxInt = 3
   946  		v6.AddArg(ptr)
   947  		v5.AddArg(v6)
   948  		v4.AddArg(v5)
   949  		v2.AddArg(v4)
   950  		v.AddArg(v2)
   951  		v.AddArg(mem)
   952  		return true
   953  	}
   954  	return false
   955  }
   956  func rewriteValueMIPS_OpAtomicStore32_0(v *Value) bool {
   957  	// match: (AtomicStore32 ptr val mem)
   958  	// cond:
   959  	// result: (LoweredAtomicStore ptr val mem)
   960  	for {
   961  		_ = v.Args[2]
   962  		ptr := v.Args[0]
   963  		val := v.Args[1]
   964  		mem := v.Args[2]
   965  		v.reset(OpMIPSLoweredAtomicStore)
   966  		v.AddArg(ptr)
   967  		v.AddArg(val)
   968  		v.AddArg(mem)
   969  		return true
   970  	}
   971  }
   972  func rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v *Value) bool {
   973  	// match: (AtomicStorePtrNoWB ptr val mem)
   974  	// cond:
   975  	// result: (LoweredAtomicStore  ptr val mem)
   976  	for {
   977  		_ = v.Args[2]
   978  		ptr := v.Args[0]
   979  		val := v.Args[1]
   980  		mem := v.Args[2]
   981  		v.reset(OpMIPSLoweredAtomicStore)
   982  		v.AddArg(ptr)
   983  		v.AddArg(val)
   984  		v.AddArg(mem)
   985  		return true
   986  	}
   987  }
   988  func rewriteValueMIPS_OpAvg32u_0(v *Value) bool {
   989  	b := v.Block
   990  	_ = b
   991  	// match: (Avg32u <t> x y)
   992  	// cond:
   993  	// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
   994  	for {
   995  		t := v.Type
   996  		_ = v.Args[1]
   997  		x := v.Args[0]
   998  		y := v.Args[1]
   999  		v.reset(OpMIPSADD)
  1000  		v0 := b.NewValue0(v.Pos, OpMIPSSRLconst, t)
  1001  		v0.AuxInt = 1
  1002  		v1 := b.NewValue0(v.Pos, OpMIPSSUB, t)
  1003  		v1.AddArg(x)
  1004  		v1.AddArg(y)
  1005  		v0.AddArg(v1)
  1006  		v.AddArg(v0)
  1007  		v.AddArg(y)
  1008  		return true
  1009  	}
  1010  }
  1011  func rewriteValueMIPS_OpBitLen32_0(v *Value) bool {
  1012  	b := v.Block
  1013  	_ = b
  1014  	typ := &b.Func.Config.Types
  1015  	_ = typ
  1016  	// match: (BitLen32 <t> x)
  1017  	// cond:
  1018  	// result: (SUB (MOVWconst [32]) (CLZ <t> x))
  1019  	for {
  1020  		t := v.Type
  1021  		x := v.Args[0]
  1022  		v.reset(OpMIPSSUB)
  1023  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  1024  		v0.AuxInt = 32
  1025  		v.AddArg(v0)
  1026  		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1027  		v1.AddArg(x)
  1028  		v.AddArg(v1)
  1029  		return true
  1030  	}
  1031  }
  1032  func rewriteValueMIPS_OpClosureCall_0(v *Value) bool {
  1033  	// match: (ClosureCall [argwid] entry closure mem)
  1034  	// cond:
  1035  	// result: (CALLclosure [argwid] entry closure mem)
  1036  	for {
  1037  		argwid := v.AuxInt
  1038  		_ = v.Args[2]
  1039  		entry := v.Args[0]
  1040  		closure := v.Args[1]
  1041  		mem := v.Args[2]
  1042  		v.reset(OpMIPSCALLclosure)
  1043  		v.AuxInt = argwid
  1044  		v.AddArg(entry)
  1045  		v.AddArg(closure)
  1046  		v.AddArg(mem)
  1047  		return true
  1048  	}
  1049  }
  1050  func rewriteValueMIPS_OpCom16_0(v *Value) bool {
  1051  	// match: (Com16 x)
  1052  	// cond:
  1053  	// result: (NORconst [0] x)
  1054  	for {
  1055  		x := v.Args[0]
  1056  		v.reset(OpMIPSNORconst)
  1057  		v.AuxInt = 0
  1058  		v.AddArg(x)
  1059  		return true
  1060  	}
  1061  }
  1062  func rewriteValueMIPS_OpCom32_0(v *Value) bool {
  1063  	// match: (Com32 x)
  1064  	// cond:
  1065  	// result: (NORconst [0] x)
  1066  	for {
  1067  		x := v.Args[0]
  1068  		v.reset(OpMIPSNORconst)
  1069  		v.AuxInt = 0
  1070  		v.AddArg(x)
  1071  		return true
  1072  	}
  1073  }
  1074  func rewriteValueMIPS_OpCom8_0(v *Value) bool {
  1075  	// match: (Com8 x)
  1076  	// cond:
  1077  	// result: (NORconst [0] x)
  1078  	for {
  1079  		x := v.Args[0]
  1080  		v.reset(OpMIPSNORconst)
  1081  		v.AuxInt = 0
  1082  		v.AddArg(x)
  1083  		return true
  1084  	}
  1085  }
  1086  func rewriteValueMIPS_OpConst16_0(v *Value) bool {
  1087  	// match: (Const16 [val])
  1088  	// cond:
  1089  	// result: (MOVWconst [val])
  1090  	for {
  1091  		val := v.AuxInt
  1092  		v.reset(OpMIPSMOVWconst)
  1093  		v.AuxInt = val
  1094  		return true
  1095  	}
  1096  }
  1097  func rewriteValueMIPS_OpConst32_0(v *Value) bool {
  1098  	// match: (Const32 [val])
  1099  	// cond:
  1100  	// result: (MOVWconst [val])
  1101  	for {
  1102  		val := v.AuxInt
  1103  		v.reset(OpMIPSMOVWconst)
  1104  		v.AuxInt = val
  1105  		return true
  1106  	}
  1107  }
  1108  func rewriteValueMIPS_OpConst32F_0(v *Value) bool {
  1109  	// match: (Const32F [val])
  1110  	// cond:
  1111  	// result: (MOVFconst [val])
  1112  	for {
  1113  		val := v.AuxInt
  1114  		v.reset(OpMIPSMOVFconst)
  1115  		v.AuxInt = val
  1116  		return true
  1117  	}
  1118  }
  1119  func rewriteValueMIPS_OpConst64F_0(v *Value) bool {
  1120  	// match: (Const64F [val])
  1121  	// cond:
  1122  	// result: (MOVDconst [val])
  1123  	for {
  1124  		val := v.AuxInt
  1125  		v.reset(OpMIPSMOVDconst)
  1126  		v.AuxInt = val
  1127  		return true
  1128  	}
  1129  }
  1130  func rewriteValueMIPS_OpConst8_0(v *Value) bool {
  1131  	// match: (Const8 [val])
  1132  	// cond:
  1133  	// result: (MOVWconst [val])
  1134  	for {
  1135  		val := v.AuxInt
  1136  		v.reset(OpMIPSMOVWconst)
  1137  		v.AuxInt = val
  1138  		return true
  1139  	}
  1140  }
  1141  func rewriteValueMIPS_OpConstBool_0(v *Value) bool {
  1142  	// match: (ConstBool [b])
  1143  	// cond:
  1144  	// result: (MOVWconst [b])
  1145  	for {
  1146  		b := v.AuxInt
  1147  		v.reset(OpMIPSMOVWconst)
  1148  		v.AuxInt = b
  1149  		return true
  1150  	}
  1151  }
  1152  func rewriteValueMIPS_OpConstNil_0(v *Value) bool {
  1153  	// match: (ConstNil)
  1154  	// cond:
  1155  	// result: (MOVWconst [0])
  1156  	for {
  1157  		v.reset(OpMIPSMOVWconst)
  1158  		v.AuxInt = 0
  1159  		return true
  1160  	}
  1161  }
  1162  func rewriteValueMIPS_OpConvert_0(v *Value) bool {
  1163  	// match: (Convert x mem)
  1164  	// cond:
  1165  	// result: (MOVWconvert x mem)
  1166  	for {
  1167  		_ = v.Args[1]
  1168  		x := v.Args[0]
  1169  		mem := v.Args[1]
  1170  		v.reset(OpMIPSMOVWconvert)
  1171  		v.AddArg(x)
  1172  		v.AddArg(mem)
  1173  		return true
  1174  	}
  1175  }
  1176  func rewriteValueMIPS_OpCtz32_0(v *Value) bool {
  1177  	b := v.Block
  1178  	_ = b
  1179  	typ := &b.Func.Config.Types
  1180  	_ = typ
  1181  	// match: (Ctz32 <t> x)
  1182  	// cond:
  1183  	// result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x)))))
  1184  	for {
  1185  		t := v.Type
  1186  		x := v.Args[0]
  1187  		v.reset(OpMIPSSUB)
  1188  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  1189  		v0.AuxInt = 32
  1190  		v.AddArg(v0)
  1191  		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1192  		v2 := b.NewValue0(v.Pos, OpMIPSSUBconst, t)
  1193  		v2.AuxInt = 1
  1194  		v3 := b.NewValue0(v.Pos, OpMIPSAND, t)
  1195  		v3.AddArg(x)
  1196  		v4 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  1197  		v4.AddArg(x)
  1198  		v3.AddArg(v4)
  1199  		v2.AddArg(v3)
  1200  		v1.AddArg(v2)
  1201  		v.AddArg(v1)
  1202  		return true
  1203  	}
  1204  }
  1205  func rewriteValueMIPS_OpCvt32Fto32_0(v *Value) bool {
  1206  	// match: (Cvt32Fto32 x)
  1207  	// cond:
  1208  	// result: (TRUNCFW x)
  1209  	for {
  1210  		x := v.Args[0]
  1211  		v.reset(OpMIPSTRUNCFW)
  1212  		v.AddArg(x)
  1213  		return true
  1214  	}
  1215  }
  1216  func rewriteValueMIPS_OpCvt32Fto64F_0(v *Value) bool {
  1217  	// match: (Cvt32Fto64F x)
  1218  	// cond:
  1219  	// result: (MOVFD x)
  1220  	for {
  1221  		x := v.Args[0]
  1222  		v.reset(OpMIPSMOVFD)
  1223  		v.AddArg(x)
  1224  		return true
  1225  	}
  1226  }
  1227  func rewriteValueMIPS_OpCvt32to32F_0(v *Value) bool {
  1228  	// match: (Cvt32to32F x)
  1229  	// cond:
  1230  	// result: (MOVWF x)
  1231  	for {
  1232  		x := v.Args[0]
  1233  		v.reset(OpMIPSMOVWF)
  1234  		v.AddArg(x)
  1235  		return true
  1236  	}
  1237  }
  1238  func rewriteValueMIPS_OpCvt32to64F_0(v *Value) bool {
  1239  	// match: (Cvt32to64F x)
  1240  	// cond:
  1241  	// result: (MOVWD x)
  1242  	for {
  1243  		x := v.Args[0]
  1244  		v.reset(OpMIPSMOVWD)
  1245  		v.AddArg(x)
  1246  		return true
  1247  	}
  1248  }
  1249  func rewriteValueMIPS_OpCvt64Fto32_0(v *Value) bool {
  1250  	// match: (Cvt64Fto32 x)
  1251  	// cond:
  1252  	// result: (TRUNCDW x)
  1253  	for {
  1254  		x := v.Args[0]
  1255  		v.reset(OpMIPSTRUNCDW)
  1256  		v.AddArg(x)
  1257  		return true
  1258  	}
  1259  }
  1260  func rewriteValueMIPS_OpCvt64Fto32F_0(v *Value) bool {
  1261  	// match: (Cvt64Fto32F x)
  1262  	// cond:
  1263  	// result: (MOVDF x)
  1264  	for {
  1265  		x := v.Args[0]
  1266  		v.reset(OpMIPSMOVDF)
  1267  		v.AddArg(x)
  1268  		return true
  1269  	}
  1270  }
  1271  func rewriteValueMIPS_OpDiv16_0(v *Value) bool {
  1272  	b := v.Block
  1273  	_ = b
  1274  	typ := &b.Func.Config.Types
  1275  	_ = typ
  1276  	// match: (Div16 x y)
  1277  	// cond:
  1278  	// result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  1279  	for {
  1280  		_ = v.Args[1]
  1281  		x := v.Args[0]
  1282  		y := v.Args[1]
  1283  		v.reset(OpSelect1)
  1284  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1285  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1286  		v1.AddArg(x)
  1287  		v0.AddArg(v1)
  1288  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1289  		v2.AddArg(y)
  1290  		v0.AddArg(v2)
  1291  		v.AddArg(v0)
  1292  		return true
  1293  	}
  1294  }
  1295  func rewriteValueMIPS_OpDiv16u_0(v *Value) bool {
  1296  	b := v.Block
  1297  	_ = b
  1298  	typ := &b.Func.Config.Types
  1299  	_ = typ
  1300  	// match: (Div16u x y)
  1301  	// cond:
  1302  	// result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1303  	for {
  1304  		_ = v.Args[1]
  1305  		x := v.Args[0]
  1306  		y := v.Args[1]
  1307  		v.reset(OpSelect1)
  1308  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1309  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1310  		v1.AddArg(x)
  1311  		v0.AddArg(v1)
  1312  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1313  		v2.AddArg(y)
  1314  		v0.AddArg(v2)
  1315  		v.AddArg(v0)
  1316  		return true
  1317  	}
  1318  }
  1319  func rewriteValueMIPS_OpDiv32_0(v *Value) bool {
  1320  	b := v.Block
  1321  	_ = b
  1322  	typ := &b.Func.Config.Types
  1323  	_ = typ
  1324  	// match: (Div32 x y)
  1325  	// cond:
  1326  	// result: (Select1 (DIV x y))
  1327  	for {
  1328  		_ = v.Args[1]
  1329  		x := v.Args[0]
  1330  		y := v.Args[1]
  1331  		v.reset(OpSelect1)
  1332  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1333  		v0.AddArg(x)
  1334  		v0.AddArg(y)
  1335  		v.AddArg(v0)
  1336  		return true
  1337  	}
  1338  }
  1339  func rewriteValueMIPS_OpDiv32F_0(v *Value) bool {
  1340  	// match: (Div32F x y)
  1341  	// cond:
  1342  	// result: (DIVF x y)
  1343  	for {
  1344  		_ = v.Args[1]
  1345  		x := v.Args[0]
  1346  		y := v.Args[1]
  1347  		v.reset(OpMIPSDIVF)
  1348  		v.AddArg(x)
  1349  		v.AddArg(y)
  1350  		return true
  1351  	}
  1352  }
  1353  func rewriteValueMIPS_OpDiv32u_0(v *Value) bool {
  1354  	b := v.Block
  1355  	_ = b
  1356  	typ := &b.Func.Config.Types
  1357  	_ = typ
  1358  	// match: (Div32u x y)
  1359  	// cond:
  1360  	// result: (Select1 (DIVU x y))
  1361  	for {
  1362  		_ = v.Args[1]
  1363  		x := v.Args[0]
  1364  		y := v.Args[1]
  1365  		v.reset(OpSelect1)
  1366  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1367  		v0.AddArg(x)
  1368  		v0.AddArg(y)
  1369  		v.AddArg(v0)
  1370  		return true
  1371  	}
  1372  }
  1373  func rewriteValueMIPS_OpDiv64F_0(v *Value) bool {
  1374  	// match: (Div64F x y)
  1375  	// cond:
  1376  	// result: (DIVD x y)
  1377  	for {
  1378  		_ = v.Args[1]
  1379  		x := v.Args[0]
  1380  		y := v.Args[1]
  1381  		v.reset(OpMIPSDIVD)
  1382  		v.AddArg(x)
  1383  		v.AddArg(y)
  1384  		return true
  1385  	}
  1386  }
  1387  func rewriteValueMIPS_OpDiv8_0(v *Value) bool {
  1388  	b := v.Block
  1389  	_ = b
  1390  	typ := &b.Func.Config.Types
  1391  	_ = typ
  1392  	// match: (Div8 x y)
  1393  	// cond:
  1394  	// result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  1395  	for {
  1396  		_ = v.Args[1]
  1397  		x := v.Args[0]
  1398  		y := v.Args[1]
  1399  		v.reset(OpSelect1)
  1400  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1401  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1402  		v1.AddArg(x)
  1403  		v0.AddArg(v1)
  1404  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1405  		v2.AddArg(y)
  1406  		v0.AddArg(v2)
  1407  		v.AddArg(v0)
  1408  		return true
  1409  	}
  1410  }
  1411  func rewriteValueMIPS_OpDiv8u_0(v *Value) bool {
  1412  	b := v.Block
  1413  	_ = b
  1414  	typ := &b.Func.Config.Types
  1415  	_ = typ
  1416  	// match: (Div8u x y)
  1417  	// cond:
  1418  	// result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1419  	for {
  1420  		_ = v.Args[1]
  1421  		x := v.Args[0]
  1422  		y := v.Args[1]
  1423  		v.reset(OpSelect1)
  1424  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1425  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1426  		v1.AddArg(x)
  1427  		v0.AddArg(v1)
  1428  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1429  		v2.AddArg(y)
  1430  		v0.AddArg(v2)
  1431  		v.AddArg(v0)
  1432  		return true
  1433  	}
  1434  }
  1435  func rewriteValueMIPS_OpEq16_0(v *Value) bool {
  1436  	b := v.Block
  1437  	_ = b
  1438  	typ := &b.Func.Config.Types
  1439  	_ = typ
  1440  	// match: (Eq16 x y)
  1441  	// cond:
  1442  	// result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1443  	for {
  1444  		_ = v.Args[1]
  1445  		x := v.Args[0]
  1446  		y := v.Args[1]
  1447  		v.reset(OpMIPSSGTUconst)
  1448  		v.AuxInt = 1
  1449  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1450  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1451  		v1.AddArg(x)
  1452  		v0.AddArg(v1)
  1453  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1454  		v2.AddArg(y)
  1455  		v0.AddArg(v2)
  1456  		v.AddArg(v0)
  1457  		return true
  1458  	}
  1459  }
  1460  func rewriteValueMIPS_OpEq32_0(v *Value) bool {
  1461  	b := v.Block
  1462  	_ = b
  1463  	typ := &b.Func.Config.Types
  1464  	_ = typ
  1465  	// match: (Eq32 x y)
  1466  	// cond:
  1467  	// result: (SGTUconst [1] (XOR x y))
  1468  	for {
  1469  		_ = v.Args[1]
  1470  		x := v.Args[0]
  1471  		y := v.Args[1]
  1472  		v.reset(OpMIPSSGTUconst)
  1473  		v.AuxInt = 1
  1474  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1475  		v0.AddArg(x)
  1476  		v0.AddArg(y)
  1477  		v.AddArg(v0)
  1478  		return true
  1479  	}
  1480  }
  1481  func rewriteValueMIPS_OpEq32F_0(v *Value) bool {
  1482  	b := v.Block
  1483  	_ = b
  1484  	// match: (Eq32F x y)
  1485  	// cond:
  1486  	// result: (FPFlagTrue (CMPEQF x y))
  1487  	for {
  1488  		_ = v.Args[1]
  1489  		x := v.Args[0]
  1490  		y := v.Args[1]
  1491  		v.reset(OpMIPSFPFlagTrue)
  1492  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags)
  1493  		v0.AddArg(x)
  1494  		v0.AddArg(y)
  1495  		v.AddArg(v0)
  1496  		return true
  1497  	}
  1498  }
  1499  func rewriteValueMIPS_OpEq64F_0(v *Value) bool {
  1500  	b := v.Block
  1501  	_ = b
  1502  	// match: (Eq64F x y)
  1503  	// cond:
  1504  	// result: (FPFlagTrue (CMPEQD x y))
  1505  	for {
  1506  		_ = v.Args[1]
  1507  		x := v.Args[0]
  1508  		y := v.Args[1]
  1509  		v.reset(OpMIPSFPFlagTrue)
  1510  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags)
  1511  		v0.AddArg(x)
  1512  		v0.AddArg(y)
  1513  		v.AddArg(v0)
  1514  		return true
  1515  	}
  1516  }
  1517  func rewriteValueMIPS_OpEq8_0(v *Value) bool {
  1518  	b := v.Block
  1519  	_ = b
  1520  	typ := &b.Func.Config.Types
  1521  	_ = typ
  1522  	// match: (Eq8 x y)
  1523  	// cond:
  1524  	// result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1525  	for {
  1526  		_ = v.Args[1]
  1527  		x := v.Args[0]
  1528  		y := v.Args[1]
  1529  		v.reset(OpMIPSSGTUconst)
  1530  		v.AuxInt = 1
  1531  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1532  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1533  		v1.AddArg(x)
  1534  		v0.AddArg(v1)
  1535  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1536  		v2.AddArg(y)
  1537  		v0.AddArg(v2)
  1538  		v.AddArg(v0)
  1539  		return true
  1540  	}
  1541  }
  1542  func rewriteValueMIPS_OpEqB_0(v *Value) bool {
  1543  	b := v.Block
  1544  	_ = b
  1545  	typ := &b.Func.Config.Types
  1546  	_ = typ
  1547  	// match: (EqB x y)
  1548  	// cond:
  1549  	// result: (XORconst [1] (XOR <typ.Bool> x y))
  1550  	for {
  1551  		_ = v.Args[1]
  1552  		x := v.Args[0]
  1553  		y := v.Args[1]
  1554  		v.reset(OpMIPSXORconst)
  1555  		v.AuxInt = 1
  1556  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.Bool)
  1557  		v0.AddArg(x)
  1558  		v0.AddArg(y)
  1559  		v.AddArg(v0)
  1560  		return true
  1561  	}
  1562  }
  1563  func rewriteValueMIPS_OpEqPtr_0(v *Value) bool {
  1564  	b := v.Block
  1565  	_ = b
  1566  	typ := &b.Func.Config.Types
  1567  	_ = typ
  1568  	// match: (EqPtr x y)
  1569  	// cond:
  1570  	// result: (SGTUconst [1] (XOR x y))
  1571  	for {
  1572  		_ = v.Args[1]
  1573  		x := v.Args[0]
  1574  		y := v.Args[1]
  1575  		v.reset(OpMIPSSGTUconst)
  1576  		v.AuxInt = 1
  1577  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1578  		v0.AddArg(x)
  1579  		v0.AddArg(y)
  1580  		v.AddArg(v0)
  1581  		return true
  1582  	}
  1583  }
  1584  func rewriteValueMIPS_OpGeq16_0(v *Value) bool {
  1585  	b := v.Block
  1586  	_ = b
  1587  	typ := &b.Func.Config.Types
  1588  	_ = typ
  1589  	// match: (Geq16 x y)
  1590  	// cond:
  1591  	// result: (XORconst [1] (SGT (SignExt16to32 y) (SignExt16to32 x)))
  1592  	for {
  1593  		_ = v.Args[1]
  1594  		x := v.Args[0]
  1595  		y := v.Args[1]
  1596  		v.reset(OpMIPSXORconst)
  1597  		v.AuxInt = 1
  1598  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1599  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1600  		v1.AddArg(y)
  1601  		v0.AddArg(v1)
  1602  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1603  		v2.AddArg(x)
  1604  		v0.AddArg(v2)
  1605  		v.AddArg(v0)
  1606  		return true
  1607  	}
  1608  }
  1609  func rewriteValueMIPS_OpGeq16U_0(v *Value) bool {
  1610  	b := v.Block
  1611  	_ = b
  1612  	typ := &b.Func.Config.Types
  1613  	_ = typ
  1614  	// match: (Geq16U x y)
  1615  	// cond:
  1616  	// result: (XORconst [1] (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)))
  1617  	for {
  1618  		_ = v.Args[1]
  1619  		x := v.Args[0]
  1620  		y := v.Args[1]
  1621  		v.reset(OpMIPSXORconst)
  1622  		v.AuxInt = 1
  1623  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1624  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1625  		v1.AddArg(y)
  1626  		v0.AddArg(v1)
  1627  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1628  		v2.AddArg(x)
  1629  		v0.AddArg(v2)
  1630  		v.AddArg(v0)
  1631  		return true
  1632  	}
  1633  }
  1634  func rewriteValueMIPS_OpGeq32_0(v *Value) bool {
  1635  	b := v.Block
  1636  	_ = b
  1637  	typ := &b.Func.Config.Types
  1638  	_ = typ
  1639  	// match: (Geq32 x y)
  1640  	// cond:
  1641  	// result: (XORconst [1] (SGT y x))
  1642  	for {
  1643  		_ = v.Args[1]
  1644  		x := v.Args[0]
  1645  		y := v.Args[1]
  1646  		v.reset(OpMIPSXORconst)
  1647  		v.AuxInt = 1
  1648  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1649  		v0.AddArg(y)
  1650  		v0.AddArg(x)
  1651  		v.AddArg(v0)
  1652  		return true
  1653  	}
  1654  }
  1655  func rewriteValueMIPS_OpGeq32F_0(v *Value) bool {
  1656  	b := v.Block
  1657  	_ = b
  1658  	// match: (Geq32F x y)
  1659  	// cond:
  1660  	// result: (FPFlagTrue (CMPGEF x y))
  1661  	for {
  1662  		_ = v.Args[1]
  1663  		x := v.Args[0]
  1664  		y := v.Args[1]
  1665  		v.reset(OpMIPSFPFlagTrue)
  1666  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags)
  1667  		v0.AddArg(x)
  1668  		v0.AddArg(y)
  1669  		v.AddArg(v0)
  1670  		return true
  1671  	}
  1672  }
  1673  func rewriteValueMIPS_OpGeq32U_0(v *Value) bool {
  1674  	b := v.Block
  1675  	_ = b
  1676  	typ := &b.Func.Config.Types
  1677  	_ = typ
  1678  	// match: (Geq32U x y)
  1679  	// cond:
  1680  	// result: (XORconst [1] (SGTU y x))
  1681  	for {
  1682  		_ = v.Args[1]
  1683  		x := v.Args[0]
  1684  		y := v.Args[1]
  1685  		v.reset(OpMIPSXORconst)
  1686  		v.AuxInt = 1
  1687  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1688  		v0.AddArg(y)
  1689  		v0.AddArg(x)
  1690  		v.AddArg(v0)
  1691  		return true
  1692  	}
  1693  }
  1694  func rewriteValueMIPS_OpGeq64F_0(v *Value) bool {
  1695  	b := v.Block
  1696  	_ = b
  1697  	// match: (Geq64F x y)
  1698  	// cond:
  1699  	// result: (FPFlagTrue (CMPGED x y))
  1700  	for {
  1701  		_ = v.Args[1]
  1702  		x := v.Args[0]
  1703  		y := v.Args[1]
  1704  		v.reset(OpMIPSFPFlagTrue)
  1705  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags)
  1706  		v0.AddArg(x)
  1707  		v0.AddArg(y)
  1708  		v.AddArg(v0)
  1709  		return true
  1710  	}
  1711  }
  1712  func rewriteValueMIPS_OpGeq8_0(v *Value) bool {
  1713  	b := v.Block
  1714  	_ = b
  1715  	typ := &b.Func.Config.Types
  1716  	_ = typ
  1717  	// match: (Geq8 x y)
  1718  	// cond:
  1719  	// result: (XORconst [1] (SGT (SignExt8to32 y) (SignExt8to32 x)))
  1720  	for {
  1721  		_ = v.Args[1]
  1722  		x := v.Args[0]
  1723  		y := v.Args[1]
  1724  		v.reset(OpMIPSXORconst)
  1725  		v.AuxInt = 1
  1726  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1727  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1728  		v1.AddArg(y)
  1729  		v0.AddArg(v1)
  1730  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1731  		v2.AddArg(x)
  1732  		v0.AddArg(v2)
  1733  		v.AddArg(v0)
  1734  		return true
  1735  	}
  1736  }
  1737  func rewriteValueMIPS_OpGeq8U_0(v *Value) bool {
  1738  	b := v.Block
  1739  	_ = b
  1740  	typ := &b.Func.Config.Types
  1741  	_ = typ
  1742  	// match: (Geq8U x y)
  1743  	// cond:
  1744  	// result: (XORconst [1] (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)))
  1745  	for {
  1746  		_ = v.Args[1]
  1747  		x := v.Args[0]
  1748  		y := v.Args[1]
  1749  		v.reset(OpMIPSXORconst)
  1750  		v.AuxInt = 1
  1751  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1752  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1753  		v1.AddArg(y)
  1754  		v0.AddArg(v1)
  1755  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1756  		v2.AddArg(x)
  1757  		v0.AddArg(v2)
  1758  		v.AddArg(v0)
  1759  		return true
  1760  	}
  1761  }
  1762  func rewriteValueMIPS_OpGetClosurePtr_0(v *Value) bool {
  1763  	// match: (GetClosurePtr)
  1764  	// cond:
  1765  	// result: (LoweredGetClosurePtr)
  1766  	for {
  1767  		v.reset(OpMIPSLoweredGetClosurePtr)
  1768  		return true
  1769  	}
  1770  }
  1771  func rewriteValueMIPS_OpGreater16_0(v *Value) bool {
  1772  	b := v.Block
  1773  	_ = b
  1774  	typ := &b.Func.Config.Types
  1775  	_ = typ
  1776  	// match: (Greater16 x y)
  1777  	// cond:
  1778  	// result: (SGT (SignExt16to32 x) (SignExt16to32 y))
  1779  	for {
  1780  		_ = v.Args[1]
  1781  		x := v.Args[0]
  1782  		y := v.Args[1]
  1783  		v.reset(OpMIPSSGT)
  1784  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1785  		v0.AddArg(x)
  1786  		v.AddArg(v0)
  1787  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1788  		v1.AddArg(y)
  1789  		v.AddArg(v1)
  1790  		return true
  1791  	}
  1792  }
  1793  func rewriteValueMIPS_OpGreater16U_0(v *Value) bool {
  1794  	b := v.Block
  1795  	_ = b
  1796  	typ := &b.Func.Config.Types
  1797  	_ = typ
  1798  	// match: (Greater16U x y)
  1799  	// cond:
  1800  	// result: (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))
  1801  	for {
  1802  		_ = v.Args[1]
  1803  		x := v.Args[0]
  1804  		y := v.Args[1]
  1805  		v.reset(OpMIPSSGTU)
  1806  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1807  		v0.AddArg(x)
  1808  		v.AddArg(v0)
  1809  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1810  		v1.AddArg(y)
  1811  		v.AddArg(v1)
  1812  		return true
  1813  	}
  1814  }
  1815  func rewriteValueMIPS_OpGreater32_0(v *Value) bool {
  1816  	// match: (Greater32 x y)
  1817  	// cond:
  1818  	// result: (SGT x y)
  1819  	for {
  1820  		_ = v.Args[1]
  1821  		x := v.Args[0]
  1822  		y := v.Args[1]
  1823  		v.reset(OpMIPSSGT)
  1824  		v.AddArg(x)
  1825  		v.AddArg(y)
  1826  		return true
  1827  	}
  1828  }
  1829  func rewriteValueMIPS_OpGreater32F_0(v *Value) bool {
  1830  	b := v.Block
  1831  	_ = b
  1832  	// match: (Greater32F x y)
  1833  	// cond:
  1834  	// result: (FPFlagTrue (CMPGTF x y))
  1835  	for {
  1836  		_ = v.Args[1]
  1837  		x := v.Args[0]
  1838  		y := v.Args[1]
  1839  		v.reset(OpMIPSFPFlagTrue)
  1840  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags)
  1841  		v0.AddArg(x)
  1842  		v0.AddArg(y)
  1843  		v.AddArg(v0)
  1844  		return true
  1845  	}
  1846  }
  1847  func rewriteValueMIPS_OpGreater32U_0(v *Value) bool {
  1848  	// match: (Greater32U x y)
  1849  	// cond:
  1850  	// result: (SGTU x y)
  1851  	for {
  1852  		_ = v.Args[1]
  1853  		x := v.Args[0]
  1854  		y := v.Args[1]
  1855  		v.reset(OpMIPSSGTU)
  1856  		v.AddArg(x)
  1857  		v.AddArg(y)
  1858  		return true
  1859  	}
  1860  }
  1861  func rewriteValueMIPS_OpGreater64F_0(v *Value) bool {
  1862  	b := v.Block
  1863  	_ = b
  1864  	// match: (Greater64F x y)
  1865  	// cond:
  1866  	// result: (FPFlagTrue (CMPGTD x y))
  1867  	for {
  1868  		_ = v.Args[1]
  1869  		x := v.Args[0]
  1870  		y := v.Args[1]
  1871  		v.reset(OpMIPSFPFlagTrue)
  1872  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags)
  1873  		v0.AddArg(x)
  1874  		v0.AddArg(y)
  1875  		v.AddArg(v0)
  1876  		return true
  1877  	}
  1878  }
  1879  func rewriteValueMIPS_OpGreater8_0(v *Value) bool {
  1880  	b := v.Block
  1881  	_ = b
  1882  	typ := &b.Func.Config.Types
  1883  	_ = typ
  1884  	// match: (Greater8 x y)
  1885  	// cond:
  1886  	// result: (SGT (SignExt8to32 x) (SignExt8to32 y))
  1887  	for {
  1888  		_ = v.Args[1]
  1889  		x := v.Args[0]
  1890  		y := v.Args[1]
  1891  		v.reset(OpMIPSSGT)
  1892  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1893  		v0.AddArg(x)
  1894  		v.AddArg(v0)
  1895  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1896  		v1.AddArg(y)
  1897  		v.AddArg(v1)
  1898  		return true
  1899  	}
  1900  }
  1901  func rewriteValueMIPS_OpGreater8U_0(v *Value) bool {
  1902  	b := v.Block
  1903  	_ = b
  1904  	typ := &b.Func.Config.Types
  1905  	_ = typ
  1906  	// match: (Greater8U x y)
  1907  	// cond:
  1908  	// result: (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))
  1909  	for {
  1910  		_ = v.Args[1]
  1911  		x := v.Args[0]
  1912  		y := v.Args[1]
  1913  		v.reset(OpMIPSSGTU)
  1914  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1915  		v0.AddArg(x)
  1916  		v.AddArg(v0)
  1917  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1918  		v1.AddArg(y)
  1919  		v.AddArg(v1)
  1920  		return true
  1921  	}
  1922  }
  1923  func rewriteValueMIPS_OpHmul32_0(v *Value) bool {
  1924  	b := v.Block
  1925  	_ = b
  1926  	typ := &b.Func.Config.Types
  1927  	_ = typ
  1928  	// match: (Hmul32 x y)
  1929  	// cond:
  1930  	// result: (Select0 (MULT x y))
  1931  	for {
  1932  		_ = v.Args[1]
  1933  		x := v.Args[0]
  1934  		y := v.Args[1]
  1935  		v.reset(OpSelect0)
  1936  		v0 := b.NewValue0(v.Pos, OpMIPSMULT, types.NewTuple(typ.Int32, typ.Int32))
  1937  		v0.AddArg(x)
  1938  		v0.AddArg(y)
  1939  		v.AddArg(v0)
  1940  		return true
  1941  	}
  1942  }
  1943  func rewriteValueMIPS_OpHmul32u_0(v *Value) bool {
  1944  	b := v.Block
  1945  	_ = b
  1946  	typ := &b.Func.Config.Types
  1947  	_ = typ
  1948  	// match: (Hmul32u x y)
  1949  	// cond:
  1950  	// result: (Select0 (MULTU x y))
  1951  	for {
  1952  		_ = v.Args[1]
  1953  		x := v.Args[0]
  1954  		y := v.Args[1]
  1955  		v.reset(OpSelect0)
  1956  		v0 := b.NewValue0(v.Pos, OpMIPSMULTU, types.NewTuple(typ.UInt32, typ.UInt32))
  1957  		v0.AddArg(x)
  1958  		v0.AddArg(y)
  1959  		v.AddArg(v0)
  1960  		return true
  1961  	}
  1962  }
  1963  func rewriteValueMIPS_OpInterCall_0(v *Value) bool {
  1964  	// match: (InterCall [argwid] entry mem)
  1965  	// cond:
  1966  	// result: (CALLinter [argwid] entry mem)
  1967  	for {
  1968  		argwid := v.AuxInt
  1969  		_ = v.Args[1]
  1970  		entry := v.Args[0]
  1971  		mem := v.Args[1]
  1972  		v.reset(OpMIPSCALLinter)
  1973  		v.AuxInt = argwid
  1974  		v.AddArg(entry)
  1975  		v.AddArg(mem)
  1976  		return true
  1977  	}
  1978  }
  1979  func rewriteValueMIPS_OpIsInBounds_0(v *Value) bool {
  1980  	// match: (IsInBounds idx len)
  1981  	// cond:
  1982  	// result: (SGTU len idx)
  1983  	for {
  1984  		_ = v.Args[1]
  1985  		idx := v.Args[0]
  1986  		len := v.Args[1]
  1987  		v.reset(OpMIPSSGTU)
  1988  		v.AddArg(len)
  1989  		v.AddArg(idx)
  1990  		return true
  1991  	}
  1992  }
  1993  func rewriteValueMIPS_OpIsNonNil_0(v *Value) bool {
  1994  	b := v.Block
  1995  	_ = b
  1996  	typ := &b.Func.Config.Types
  1997  	_ = typ
  1998  	// match: (IsNonNil ptr)
  1999  	// cond:
  2000  	// result: (SGTU ptr (MOVWconst [0]))
  2001  	for {
  2002  		ptr := v.Args[0]
  2003  		v.reset(OpMIPSSGTU)
  2004  		v.AddArg(ptr)
  2005  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2006  		v0.AuxInt = 0
  2007  		v.AddArg(v0)
  2008  		return true
  2009  	}
  2010  }
  2011  func rewriteValueMIPS_OpIsSliceInBounds_0(v *Value) bool {
  2012  	b := v.Block
  2013  	_ = b
  2014  	typ := &b.Func.Config.Types
  2015  	_ = typ
  2016  	// match: (IsSliceInBounds idx len)
  2017  	// cond:
  2018  	// result: (XORconst [1] (SGTU idx len))
  2019  	for {
  2020  		_ = v.Args[1]
  2021  		idx := v.Args[0]
  2022  		len := v.Args[1]
  2023  		v.reset(OpMIPSXORconst)
  2024  		v.AuxInt = 1
  2025  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2026  		v0.AddArg(idx)
  2027  		v0.AddArg(len)
  2028  		v.AddArg(v0)
  2029  		return true
  2030  	}
  2031  }
  2032  func rewriteValueMIPS_OpLeq16_0(v *Value) bool {
  2033  	b := v.Block
  2034  	_ = b
  2035  	typ := &b.Func.Config.Types
  2036  	_ = typ
  2037  	// match: (Leq16 x y)
  2038  	// cond:
  2039  	// result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y)))
  2040  	for {
  2041  		_ = v.Args[1]
  2042  		x := v.Args[0]
  2043  		y := v.Args[1]
  2044  		v.reset(OpMIPSXORconst)
  2045  		v.AuxInt = 1
  2046  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  2047  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2048  		v1.AddArg(x)
  2049  		v0.AddArg(v1)
  2050  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2051  		v2.AddArg(y)
  2052  		v0.AddArg(v2)
  2053  		v.AddArg(v0)
  2054  		return true
  2055  	}
  2056  }
  2057  func rewriteValueMIPS_OpLeq16U_0(v *Value) bool {
  2058  	b := v.Block
  2059  	_ = b
  2060  	typ := &b.Func.Config.Types
  2061  	_ = typ
  2062  	// match: (Leq16U x y)
  2063  	// cond:
  2064  	// result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  2065  	for {
  2066  		_ = v.Args[1]
  2067  		x := v.Args[0]
  2068  		y := v.Args[1]
  2069  		v.reset(OpMIPSXORconst)
  2070  		v.AuxInt = 1
  2071  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2072  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2073  		v1.AddArg(x)
  2074  		v0.AddArg(v1)
  2075  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2076  		v2.AddArg(y)
  2077  		v0.AddArg(v2)
  2078  		v.AddArg(v0)
  2079  		return true
  2080  	}
  2081  }
  2082  func rewriteValueMIPS_OpLeq32_0(v *Value) bool {
  2083  	b := v.Block
  2084  	_ = b
  2085  	typ := &b.Func.Config.Types
  2086  	_ = typ
  2087  	// match: (Leq32 x y)
  2088  	// cond:
  2089  	// result: (XORconst [1] (SGT x y))
  2090  	for {
  2091  		_ = v.Args[1]
  2092  		x := v.Args[0]
  2093  		y := v.Args[1]
  2094  		v.reset(OpMIPSXORconst)
  2095  		v.AuxInt = 1
  2096  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  2097  		v0.AddArg(x)
  2098  		v0.AddArg(y)
  2099  		v.AddArg(v0)
  2100  		return true
  2101  	}
  2102  }
  2103  func rewriteValueMIPS_OpLeq32F_0(v *Value) bool {
  2104  	b := v.Block
  2105  	_ = b
  2106  	// match: (Leq32F x y)
  2107  	// cond:
  2108  	// result: (FPFlagTrue (CMPGEF y x))
  2109  	for {
  2110  		_ = v.Args[1]
  2111  		x := v.Args[0]
  2112  		y := v.Args[1]
  2113  		v.reset(OpMIPSFPFlagTrue)
  2114  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags)
  2115  		v0.AddArg(y)
  2116  		v0.AddArg(x)
  2117  		v.AddArg(v0)
  2118  		return true
  2119  	}
  2120  }
  2121  func rewriteValueMIPS_OpLeq32U_0(v *Value) bool {
  2122  	b := v.Block
  2123  	_ = b
  2124  	typ := &b.Func.Config.Types
  2125  	_ = typ
  2126  	// match: (Leq32U x y)
  2127  	// cond:
  2128  	// result: (XORconst [1] (SGTU x y))
  2129  	for {
  2130  		_ = v.Args[1]
  2131  		x := v.Args[0]
  2132  		y := v.Args[1]
  2133  		v.reset(OpMIPSXORconst)
  2134  		v.AuxInt = 1
  2135  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2136  		v0.AddArg(x)
  2137  		v0.AddArg(y)
  2138  		v.AddArg(v0)
  2139  		return true
  2140  	}
  2141  }
  2142  func rewriteValueMIPS_OpLeq64F_0(v *Value) bool {
  2143  	b := v.Block
  2144  	_ = b
  2145  	// match: (Leq64F x y)
  2146  	// cond:
  2147  	// result: (FPFlagTrue (CMPGED y x))
  2148  	for {
  2149  		_ = v.Args[1]
  2150  		x := v.Args[0]
  2151  		y := v.Args[1]
  2152  		v.reset(OpMIPSFPFlagTrue)
  2153  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags)
  2154  		v0.AddArg(y)
  2155  		v0.AddArg(x)
  2156  		v.AddArg(v0)
  2157  		return true
  2158  	}
  2159  }
  2160  func rewriteValueMIPS_OpLeq8_0(v *Value) bool {
  2161  	b := v.Block
  2162  	_ = b
  2163  	typ := &b.Func.Config.Types
  2164  	_ = typ
  2165  	// match: (Leq8 x y)
  2166  	// cond:
  2167  	// result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y)))
  2168  	for {
  2169  		_ = v.Args[1]
  2170  		x := v.Args[0]
  2171  		y := v.Args[1]
  2172  		v.reset(OpMIPSXORconst)
  2173  		v.AuxInt = 1
  2174  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  2175  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2176  		v1.AddArg(x)
  2177  		v0.AddArg(v1)
  2178  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2179  		v2.AddArg(y)
  2180  		v0.AddArg(v2)
  2181  		v.AddArg(v0)
  2182  		return true
  2183  	}
  2184  }
  2185  func rewriteValueMIPS_OpLeq8U_0(v *Value) bool {
  2186  	b := v.Block
  2187  	_ = b
  2188  	typ := &b.Func.Config.Types
  2189  	_ = typ
  2190  	// match: (Leq8U x y)
  2191  	// cond:
  2192  	// result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  2193  	for {
  2194  		_ = v.Args[1]
  2195  		x := v.Args[0]
  2196  		y := v.Args[1]
  2197  		v.reset(OpMIPSXORconst)
  2198  		v.AuxInt = 1
  2199  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2200  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2201  		v1.AddArg(x)
  2202  		v0.AddArg(v1)
  2203  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2204  		v2.AddArg(y)
  2205  		v0.AddArg(v2)
  2206  		v.AddArg(v0)
  2207  		return true
  2208  	}
  2209  }
  2210  func rewriteValueMIPS_OpLess16_0(v *Value) bool {
  2211  	b := v.Block
  2212  	_ = b
  2213  	typ := &b.Func.Config.Types
  2214  	_ = typ
  2215  	// match: (Less16 x y)
  2216  	// cond:
  2217  	// result: (SGT (SignExt16to32 y) (SignExt16to32 x))
  2218  	for {
  2219  		_ = v.Args[1]
  2220  		x := v.Args[0]
  2221  		y := v.Args[1]
  2222  		v.reset(OpMIPSSGT)
  2223  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2224  		v0.AddArg(y)
  2225  		v.AddArg(v0)
  2226  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2227  		v1.AddArg(x)
  2228  		v.AddArg(v1)
  2229  		return true
  2230  	}
  2231  }
  2232  func rewriteValueMIPS_OpLess16U_0(v *Value) bool {
  2233  	b := v.Block
  2234  	_ = b
  2235  	typ := &b.Func.Config.Types
  2236  	_ = typ
  2237  	// match: (Less16U x y)
  2238  	// cond:
  2239  	// result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x))
  2240  	for {
  2241  		_ = v.Args[1]
  2242  		x := v.Args[0]
  2243  		y := v.Args[1]
  2244  		v.reset(OpMIPSSGTU)
  2245  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2246  		v0.AddArg(y)
  2247  		v.AddArg(v0)
  2248  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2249  		v1.AddArg(x)
  2250  		v.AddArg(v1)
  2251  		return true
  2252  	}
  2253  }
  2254  func rewriteValueMIPS_OpLess32_0(v *Value) bool {
  2255  	// match: (Less32 x y)
  2256  	// cond:
  2257  	// result: (SGT y x)
  2258  	for {
  2259  		_ = v.Args[1]
  2260  		x := v.Args[0]
  2261  		y := v.Args[1]
  2262  		v.reset(OpMIPSSGT)
  2263  		v.AddArg(y)
  2264  		v.AddArg(x)
  2265  		return true
  2266  	}
  2267  }
  2268  func rewriteValueMIPS_OpLess32F_0(v *Value) bool {
  2269  	b := v.Block
  2270  	_ = b
  2271  	// match: (Less32F x y)
  2272  	// cond:
  2273  	// result: (FPFlagTrue (CMPGTF y x))
  2274  	for {
  2275  		_ = v.Args[1]
  2276  		x := v.Args[0]
  2277  		y := v.Args[1]
  2278  		v.reset(OpMIPSFPFlagTrue)
  2279  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags)
  2280  		v0.AddArg(y)
  2281  		v0.AddArg(x)
  2282  		v.AddArg(v0)
  2283  		return true
  2284  	}
  2285  }
  2286  func rewriteValueMIPS_OpLess32U_0(v *Value) bool {
  2287  	// match: (Less32U x y)
  2288  	// cond:
  2289  	// result: (SGTU y x)
  2290  	for {
  2291  		_ = v.Args[1]
  2292  		x := v.Args[0]
  2293  		y := v.Args[1]
  2294  		v.reset(OpMIPSSGTU)
  2295  		v.AddArg(y)
  2296  		v.AddArg(x)
  2297  		return true
  2298  	}
  2299  }
  2300  func rewriteValueMIPS_OpLess64F_0(v *Value) bool {
  2301  	b := v.Block
  2302  	_ = b
  2303  	// match: (Less64F x y)
  2304  	// cond:
  2305  	// result: (FPFlagTrue (CMPGTD y x))
  2306  	for {
  2307  		_ = v.Args[1]
  2308  		x := v.Args[0]
  2309  		y := v.Args[1]
  2310  		v.reset(OpMIPSFPFlagTrue)
  2311  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags)
  2312  		v0.AddArg(y)
  2313  		v0.AddArg(x)
  2314  		v.AddArg(v0)
  2315  		return true
  2316  	}
  2317  }
  2318  func rewriteValueMIPS_OpLess8_0(v *Value) bool {
  2319  	b := v.Block
  2320  	_ = b
  2321  	typ := &b.Func.Config.Types
  2322  	_ = typ
  2323  	// match: (Less8 x y)
  2324  	// cond:
  2325  	// result: (SGT (SignExt8to32 y) (SignExt8to32 x))
  2326  	for {
  2327  		_ = v.Args[1]
  2328  		x := v.Args[0]
  2329  		y := v.Args[1]
  2330  		v.reset(OpMIPSSGT)
  2331  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2332  		v0.AddArg(y)
  2333  		v.AddArg(v0)
  2334  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2335  		v1.AddArg(x)
  2336  		v.AddArg(v1)
  2337  		return true
  2338  	}
  2339  }
  2340  func rewriteValueMIPS_OpLess8U_0(v *Value) bool {
  2341  	b := v.Block
  2342  	_ = b
  2343  	typ := &b.Func.Config.Types
  2344  	_ = typ
  2345  	// match: (Less8U x y)
  2346  	// cond:
  2347  	// result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x))
  2348  	for {
  2349  		_ = v.Args[1]
  2350  		x := v.Args[0]
  2351  		y := v.Args[1]
  2352  		v.reset(OpMIPSSGTU)
  2353  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2354  		v0.AddArg(y)
  2355  		v.AddArg(v0)
  2356  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2357  		v1.AddArg(x)
  2358  		v.AddArg(v1)
  2359  		return true
  2360  	}
  2361  }
  2362  func rewriteValueMIPS_OpLoad_0(v *Value) bool {
  2363  	// match: (Load <t> ptr mem)
  2364  	// cond: t.IsBoolean()
  2365  	// result: (MOVBUload ptr mem)
  2366  	for {
  2367  		t := v.Type
  2368  		_ = v.Args[1]
  2369  		ptr := v.Args[0]
  2370  		mem := v.Args[1]
  2371  		if !(t.IsBoolean()) {
  2372  			break
  2373  		}
  2374  		v.reset(OpMIPSMOVBUload)
  2375  		v.AddArg(ptr)
  2376  		v.AddArg(mem)
  2377  		return true
  2378  	}
  2379  	// match: (Load <t> ptr mem)
  2380  	// cond: (is8BitInt(t) && isSigned(t))
  2381  	// result: (MOVBload ptr mem)
  2382  	for {
  2383  		t := v.Type
  2384  		_ = v.Args[1]
  2385  		ptr := v.Args[0]
  2386  		mem := v.Args[1]
  2387  		if !(is8BitInt(t) && isSigned(t)) {
  2388  			break
  2389  		}
  2390  		v.reset(OpMIPSMOVBload)
  2391  		v.AddArg(ptr)
  2392  		v.AddArg(mem)
  2393  		return true
  2394  	}
  2395  	// match: (Load <t> ptr mem)
  2396  	// cond: (is8BitInt(t) && !isSigned(t))
  2397  	// result: (MOVBUload ptr mem)
  2398  	for {
  2399  		t := v.Type
  2400  		_ = v.Args[1]
  2401  		ptr := v.Args[0]
  2402  		mem := v.Args[1]
  2403  		if !(is8BitInt(t) && !isSigned(t)) {
  2404  			break
  2405  		}
  2406  		v.reset(OpMIPSMOVBUload)
  2407  		v.AddArg(ptr)
  2408  		v.AddArg(mem)
  2409  		return true
  2410  	}
  2411  	// match: (Load <t> ptr mem)
  2412  	// cond: (is16BitInt(t) && isSigned(t))
  2413  	// result: (MOVHload ptr mem)
  2414  	for {
  2415  		t := v.Type
  2416  		_ = v.Args[1]
  2417  		ptr := v.Args[0]
  2418  		mem := v.Args[1]
  2419  		if !(is16BitInt(t) && isSigned(t)) {
  2420  			break
  2421  		}
  2422  		v.reset(OpMIPSMOVHload)
  2423  		v.AddArg(ptr)
  2424  		v.AddArg(mem)
  2425  		return true
  2426  	}
  2427  	// match: (Load <t> ptr mem)
  2428  	// cond: (is16BitInt(t) && !isSigned(t))
  2429  	// result: (MOVHUload ptr mem)
  2430  	for {
  2431  		t := v.Type
  2432  		_ = v.Args[1]
  2433  		ptr := v.Args[0]
  2434  		mem := v.Args[1]
  2435  		if !(is16BitInt(t) && !isSigned(t)) {
  2436  			break
  2437  		}
  2438  		v.reset(OpMIPSMOVHUload)
  2439  		v.AddArg(ptr)
  2440  		v.AddArg(mem)
  2441  		return true
  2442  	}
  2443  	// match: (Load <t> ptr mem)
  2444  	// cond: (is32BitInt(t) || isPtr(t))
  2445  	// result: (MOVWload ptr mem)
  2446  	for {
  2447  		t := v.Type
  2448  		_ = v.Args[1]
  2449  		ptr := v.Args[0]
  2450  		mem := v.Args[1]
  2451  		if !(is32BitInt(t) || isPtr(t)) {
  2452  			break
  2453  		}
  2454  		v.reset(OpMIPSMOVWload)
  2455  		v.AddArg(ptr)
  2456  		v.AddArg(mem)
  2457  		return true
  2458  	}
  2459  	// match: (Load <t> ptr mem)
  2460  	// cond: is32BitFloat(t)
  2461  	// result: (MOVFload ptr mem)
  2462  	for {
  2463  		t := v.Type
  2464  		_ = v.Args[1]
  2465  		ptr := v.Args[0]
  2466  		mem := v.Args[1]
  2467  		if !(is32BitFloat(t)) {
  2468  			break
  2469  		}
  2470  		v.reset(OpMIPSMOVFload)
  2471  		v.AddArg(ptr)
  2472  		v.AddArg(mem)
  2473  		return true
  2474  	}
  2475  	// match: (Load <t> ptr mem)
  2476  	// cond: is64BitFloat(t)
  2477  	// result: (MOVDload ptr mem)
  2478  	for {
  2479  		t := v.Type
  2480  		_ = v.Args[1]
  2481  		ptr := v.Args[0]
  2482  		mem := v.Args[1]
  2483  		if !(is64BitFloat(t)) {
  2484  			break
  2485  		}
  2486  		v.reset(OpMIPSMOVDload)
  2487  		v.AddArg(ptr)
  2488  		v.AddArg(mem)
  2489  		return true
  2490  	}
  2491  	return false
  2492  }
  2493  func rewriteValueMIPS_OpLsh16x16_0(v *Value) bool {
  2494  	b := v.Block
  2495  	_ = b
  2496  	typ := &b.Func.Config.Types
  2497  	_ = typ
  2498  	// match: (Lsh16x16 <t> x y)
  2499  	// cond:
  2500  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2501  	for {
  2502  		t := v.Type
  2503  		_ = v.Args[1]
  2504  		x := v.Args[0]
  2505  		y := v.Args[1]
  2506  		v.reset(OpMIPSCMOVZ)
  2507  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2508  		v0.AddArg(x)
  2509  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2510  		v1.AddArg(y)
  2511  		v0.AddArg(v1)
  2512  		v.AddArg(v0)
  2513  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2514  		v2.AuxInt = 0
  2515  		v.AddArg(v2)
  2516  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2517  		v3.AuxInt = 32
  2518  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2519  		v4.AddArg(y)
  2520  		v3.AddArg(v4)
  2521  		v.AddArg(v3)
  2522  		return true
  2523  	}
  2524  }
  2525  func rewriteValueMIPS_OpLsh16x32_0(v *Value) bool {
  2526  	b := v.Block
  2527  	_ = b
  2528  	typ := &b.Func.Config.Types
  2529  	_ = typ
  2530  	// match: (Lsh16x32 <t> x y)
  2531  	// cond:
  2532  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2533  	for {
  2534  		t := v.Type
  2535  		_ = v.Args[1]
  2536  		x := v.Args[0]
  2537  		y := v.Args[1]
  2538  		v.reset(OpMIPSCMOVZ)
  2539  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2540  		v0.AddArg(x)
  2541  		v0.AddArg(y)
  2542  		v.AddArg(v0)
  2543  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2544  		v1.AuxInt = 0
  2545  		v.AddArg(v1)
  2546  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2547  		v2.AuxInt = 32
  2548  		v2.AddArg(y)
  2549  		v.AddArg(v2)
  2550  		return true
  2551  	}
  2552  }
  2553  func rewriteValueMIPS_OpLsh16x64_0(v *Value) bool {
  2554  	// match: (Lsh16x64 x (Const64 [c]))
  2555  	// cond: uint32(c) < 16
  2556  	// result: (SLLconst x [c])
  2557  	for {
  2558  		_ = v.Args[1]
  2559  		x := v.Args[0]
  2560  		v_1 := v.Args[1]
  2561  		if v_1.Op != OpConst64 {
  2562  			break
  2563  		}
  2564  		c := v_1.AuxInt
  2565  		if !(uint32(c) < 16) {
  2566  			break
  2567  		}
  2568  		v.reset(OpMIPSSLLconst)
  2569  		v.AuxInt = c
  2570  		v.AddArg(x)
  2571  		return true
  2572  	}
  2573  	// match: (Lsh16x64 _ (Const64 [c]))
  2574  	// cond: uint32(c) >= 16
  2575  	// result: (MOVWconst [0])
  2576  	for {
  2577  		_ = v.Args[1]
  2578  		v_1 := v.Args[1]
  2579  		if v_1.Op != OpConst64 {
  2580  			break
  2581  		}
  2582  		c := v_1.AuxInt
  2583  		if !(uint32(c) >= 16) {
  2584  			break
  2585  		}
  2586  		v.reset(OpMIPSMOVWconst)
  2587  		v.AuxInt = 0
  2588  		return true
  2589  	}
  2590  	return false
  2591  }
  2592  func rewriteValueMIPS_OpLsh16x8_0(v *Value) bool {
  2593  	b := v.Block
  2594  	_ = b
  2595  	typ := &b.Func.Config.Types
  2596  	_ = typ
  2597  	// match: (Lsh16x8 <t> x y)
  2598  	// cond:
  2599  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2600  	for {
  2601  		t := v.Type
  2602  		_ = v.Args[1]
  2603  		x := v.Args[0]
  2604  		y := v.Args[1]
  2605  		v.reset(OpMIPSCMOVZ)
  2606  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2607  		v0.AddArg(x)
  2608  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2609  		v1.AddArg(y)
  2610  		v0.AddArg(v1)
  2611  		v.AddArg(v0)
  2612  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2613  		v2.AuxInt = 0
  2614  		v.AddArg(v2)
  2615  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2616  		v3.AuxInt = 32
  2617  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2618  		v4.AddArg(y)
  2619  		v3.AddArg(v4)
  2620  		v.AddArg(v3)
  2621  		return true
  2622  	}
  2623  }
  2624  func rewriteValueMIPS_OpLsh32x16_0(v *Value) bool {
  2625  	b := v.Block
  2626  	_ = b
  2627  	typ := &b.Func.Config.Types
  2628  	_ = typ
  2629  	// match: (Lsh32x16 <t> x y)
  2630  	// cond:
  2631  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2632  	for {
  2633  		t := v.Type
  2634  		_ = v.Args[1]
  2635  		x := v.Args[0]
  2636  		y := v.Args[1]
  2637  		v.reset(OpMIPSCMOVZ)
  2638  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2639  		v0.AddArg(x)
  2640  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2641  		v1.AddArg(y)
  2642  		v0.AddArg(v1)
  2643  		v.AddArg(v0)
  2644  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2645  		v2.AuxInt = 0
  2646  		v.AddArg(v2)
  2647  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2648  		v3.AuxInt = 32
  2649  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2650  		v4.AddArg(y)
  2651  		v3.AddArg(v4)
  2652  		v.AddArg(v3)
  2653  		return true
  2654  	}
  2655  }
  2656  func rewriteValueMIPS_OpLsh32x32_0(v *Value) bool {
  2657  	b := v.Block
  2658  	_ = b
  2659  	typ := &b.Func.Config.Types
  2660  	_ = typ
  2661  	// match: (Lsh32x32 <t> x y)
  2662  	// cond:
  2663  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2664  	for {
  2665  		t := v.Type
  2666  		_ = v.Args[1]
  2667  		x := v.Args[0]
  2668  		y := v.Args[1]
  2669  		v.reset(OpMIPSCMOVZ)
  2670  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2671  		v0.AddArg(x)
  2672  		v0.AddArg(y)
  2673  		v.AddArg(v0)
  2674  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2675  		v1.AuxInt = 0
  2676  		v.AddArg(v1)
  2677  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2678  		v2.AuxInt = 32
  2679  		v2.AddArg(y)
  2680  		v.AddArg(v2)
  2681  		return true
  2682  	}
  2683  }
  2684  func rewriteValueMIPS_OpLsh32x64_0(v *Value) bool {
  2685  	// match: (Lsh32x64 x (Const64 [c]))
  2686  	// cond: uint32(c) < 32
  2687  	// result: (SLLconst x [c])
  2688  	for {
  2689  		_ = v.Args[1]
  2690  		x := v.Args[0]
  2691  		v_1 := v.Args[1]
  2692  		if v_1.Op != OpConst64 {
  2693  			break
  2694  		}
  2695  		c := v_1.AuxInt
  2696  		if !(uint32(c) < 32) {
  2697  			break
  2698  		}
  2699  		v.reset(OpMIPSSLLconst)
  2700  		v.AuxInt = c
  2701  		v.AddArg(x)
  2702  		return true
  2703  	}
  2704  	// match: (Lsh32x64 _ (Const64 [c]))
  2705  	// cond: uint32(c) >= 32
  2706  	// result: (MOVWconst [0])
  2707  	for {
  2708  		_ = v.Args[1]
  2709  		v_1 := v.Args[1]
  2710  		if v_1.Op != OpConst64 {
  2711  			break
  2712  		}
  2713  		c := v_1.AuxInt
  2714  		if !(uint32(c) >= 32) {
  2715  			break
  2716  		}
  2717  		v.reset(OpMIPSMOVWconst)
  2718  		v.AuxInt = 0
  2719  		return true
  2720  	}
  2721  	return false
  2722  }
  2723  func rewriteValueMIPS_OpLsh32x8_0(v *Value) bool {
  2724  	b := v.Block
  2725  	_ = b
  2726  	typ := &b.Func.Config.Types
  2727  	_ = typ
  2728  	// match: (Lsh32x8 <t> x y)
  2729  	// cond:
  2730  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2731  	for {
  2732  		t := v.Type
  2733  		_ = v.Args[1]
  2734  		x := v.Args[0]
  2735  		y := v.Args[1]
  2736  		v.reset(OpMIPSCMOVZ)
  2737  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2738  		v0.AddArg(x)
  2739  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2740  		v1.AddArg(y)
  2741  		v0.AddArg(v1)
  2742  		v.AddArg(v0)
  2743  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2744  		v2.AuxInt = 0
  2745  		v.AddArg(v2)
  2746  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2747  		v3.AuxInt = 32
  2748  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2749  		v4.AddArg(y)
  2750  		v3.AddArg(v4)
  2751  		v.AddArg(v3)
  2752  		return true
  2753  	}
  2754  }
  2755  func rewriteValueMIPS_OpLsh8x16_0(v *Value) bool {
  2756  	b := v.Block
  2757  	_ = b
  2758  	typ := &b.Func.Config.Types
  2759  	_ = typ
  2760  	// match: (Lsh8x16 <t> x y)
  2761  	// cond:
  2762  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2763  	for {
  2764  		t := v.Type
  2765  		_ = v.Args[1]
  2766  		x := v.Args[0]
  2767  		y := v.Args[1]
  2768  		v.reset(OpMIPSCMOVZ)
  2769  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2770  		v0.AddArg(x)
  2771  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2772  		v1.AddArg(y)
  2773  		v0.AddArg(v1)
  2774  		v.AddArg(v0)
  2775  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2776  		v2.AuxInt = 0
  2777  		v.AddArg(v2)
  2778  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2779  		v3.AuxInt = 32
  2780  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2781  		v4.AddArg(y)
  2782  		v3.AddArg(v4)
  2783  		v.AddArg(v3)
  2784  		return true
  2785  	}
  2786  }
  2787  func rewriteValueMIPS_OpLsh8x32_0(v *Value) bool {
  2788  	b := v.Block
  2789  	_ = b
  2790  	typ := &b.Func.Config.Types
  2791  	_ = typ
  2792  	// match: (Lsh8x32 <t> x y)
  2793  	// cond:
  2794  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2795  	for {
  2796  		t := v.Type
  2797  		_ = v.Args[1]
  2798  		x := v.Args[0]
  2799  		y := v.Args[1]
  2800  		v.reset(OpMIPSCMOVZ)
  2801  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2802  		v0.AddArg(x)
  2803  		v0.AddArg(y)
  2804  		v.AddArg(v0)
  2805  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2806  		v1.AuxInt = 0
  2807  		v.AddArg(v1)
  2808  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2809  		v2.AuxInt = 32
  2810  		v2.AddArg(y)
  2811  		v.AddArg(v2)
  2812  		return true
  2813  	}
  2814  }
  2815  func rewriteValueMIPS_OpLsh8x64_0(v *Value) bool {
  2816  	// match: (Lsh8x64 x (Const64 [c]))
  2817  	// cond: uint32(c) < 8
  2818  	// result: (SLLconst x [c])
  2819  	for {
  2820  		_ = v.Args[1]
  2821  		x := v.Args[0]
  2822  		v_1 := v.Args[1]
  2823  		if v_1.Op != OpConst64 {
  2824  			break
  2825  		}
  2826  		c := v_1.AuxInt
  2827  		if !(uint32(c) < 8) {
  2828  			break
  2829  		}
  2830  		v.reset(OpMIPSSLLconst)
  2831  		v.AuxInt = c
  2832  		v.AddArg(x)
  2833  		return true
  2834  	}
  2835  	// match: (Lsh8x64 _ (Const64 [c]))
  2836  	// cond: uint32(c) >= 8
  2837  	// result: (MOVWconst [0])
  2838  	for {
  2839  		_ = v.Args[1]
  2840  		v_1 := v.Args[1]
  2841  		if v_1.Op != OpConst64 {
  2842  			break
  2843  		}
  2844  		c := v_1.AuxInt
  2845  		if !(uint32(c) >= 8) {
  2846  			break
  2847  		}
  2848  		v.reset(OpMIPSMOVWconst)
  2849  		v.AuxInt = 0
  2850  		return true
  2851  	}
  2852  	return false
  2853  }
  2854  func rewriteValueMIPS_OpLsh8x8_0(v *Value) bool {
  2855  	b := v.Block
  2856  	_ = b
  2857  	typ := &b.Func.Config.Types
  2858  	_ = typ
  2859  	// match: (Lsh8x8 <t> x y)
  2860  	// cond:
  2861  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2862  	for {
  2863  		t := v.Type
  2864  		_ = v.Args[1]
  2865  		x := v.Args[0]
  2866  		y := v.Args[1]
  2867  		v.reset(OpMIPSCMOVZ)
  2868  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2869  		v0.AddArg(x)
  2870  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2871  		v1.AddArg(y)
  2872  		v0.AddArg(v1)
  2873  		v.AddArg(v0)
  2874  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2875  		v2.AuxInt = 0
  2876  		v.AddArg(v2)
  2877  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2878  		v3.AuxInt = 32
  2879  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2880  		v4.AddArg(y)
  2881  		v3.AddArg(v4)
  2882  		v.AddArg(v3)
  2883  		return true
  2884  	}
  2885  }
  2886  func rewriteValueMIPS_OpMIPSADD_0(v *Value) bool {
  2887  	// match: (ADD x (MOVWconst [c]))
  2888  	// cond:
  2889  	// result: (ADDconst [c] x)
  2890  	for {
  2891  		_ = v.Args[1]
  2892  		x := v.Args[0]
  2893  		v_1 := v.Args[1]
  2894  		if v_1.Op != OpMIPSMOVWconst {
  2895  			break
  2896  		}
  2897  		c := v_1.AuxInt
  2898  		v.reset(OpMIPSADDconst)
  2899  		v.AuxInt = c
  2900  		v.AddArg(x)
  2901  		return true
  2902  	}
  2903  	// match: (ADD (MOVWconst [c]) x)
  2904  	// cond:
  2905  	// result: (ADDconst [c] x)
  2906  	for {
  2907  		_ = v.Args[1]
  2908  		v_0 := v.Args[0]
  2909  		if v_0.Op != OpMIPSMOVWconst {
  2910  			break
  2911  		}
  2912  		c := v_0.AuxInt
  2913  		x := v.Args[1]
  2914  		v.reset(OpMIPSADDconst)
  2915  		v.AuxInt = c
  2916  		v.AddArg(x)
  2917  		return true
  2918  	}
  2919  	// match: (ADD x (NEG y))
  2920  	// cond:
  2921  	// result: (SUB x y)
  2922  	for {
  2923  		_ = v.Args[1]
  2924  		x := v.Args[0]
  2925  		v_1 := v.Args[1]
  2926  		if v_1.Op != OpMIPSNEG {
  2927  			break
  2928  		}
  2929  		y := v_1.Args[0]
  2930  		v.reset(OpMIPSSUB)
  2931  		v.AddArg(x)
  2932  		v.AddArg(y)
  2933  		return true
  2934  	}
  2935  	// match: (ADD (NEG y) x)
  2936  	// cond:
  2937  	// result: (SUB x y)
  2938  	for {
  2939  		_ = v.Args[1]
  2940  		v_0 := v.Args[0]
  2941  		if v_0.Op != OpMIPSNEG {
  2942  			break
  2943  		}
  2944  		y := v_0.Args[0]
  2945  		x := v.Args[1]
  2946  		v.reset(OpMIPSSUB)
  2947  		v.AddArg(x)
  2948  		v.AddArg(y)
  2949  		return true
  2950  	}
  2951  	return false
  2952  }
  2953  func rewriteValueMIPS_OpMIPSADDconst_0(v *Value) bool {
  2954  	// match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr))
  2955  	// cond:
  2956  	// result: (MOVWaddr [off1+off2] {sym} ptr)
  2957  	for {
  2958  		off1 := v.AuxInt
  2959  		v_0 := v.Args[0]
  2960  		if v_0.Op != OpMIPSMOVWaddr {
  2961  			break
  2962  		}
  2963  		off2 := v_0.AuxInt
  2964  		sym := v_0.Aux
  2965  		ptr := v_0.Args[0]
  2966  		v.reset(OpMIPSMOVWaddr)
  2967  		v.AuxInt = off1 + off2
  2968  		v.Aux = sym
  2969  		v.AddArg(ptr)
  2970  		return true
  2971  	}
  2972  	// match: (ADDconst [0] x)
  2973  	// cond:
  2974  	// result: x
  2975  	for {
  2976  		if v.AuxInt != 0 {
  2977  			break
  2978  		}
  2979  		x := v.Args[0]
  2980  		v.reset(OpCopy)
  2981  		v.Type = x.Type
  2982  		v.AddArg(x)
  2983  		return true
  2984  	}
  2985  	// match: (ADDconst [c] (MOVWconst [d]))
  2986  	// cond:
  2987  	// result: (MOVWconst [int64(int32(c+d))])
  2988  	for {
  2989  		c := v.AuxInt
  2990  		v_0 := v.Args[0]
  2991  		if v_0.Op != OpMIPSMOVWconst {
  2992  			break
  2993  		}
  2994  		d := v_0.AuxInt
  2995  		v.reset(OpMIPSMOVWconst)
  2996  		v.AuxInt = int64(int32(c + d))
  2997  		return true
  2998  	}
  2999  	// match: (ADDconst [c] (ADDconst [d] x))
  3000  	// cond:
  3001  	// result: (ADDconst [int64(int32(c+d))] x)
  3002  	for {
  3003  		c := v.AuxInt
  3004  		v_0 := v.Args[0]
  3005  		if v_0.Op != OpMIPSADDconst {
  3006  			break
  3007  		}
  3008  		d := v_0.AuxInt
  3009  		x := v_0.Args[0]
  3010  		v.reset(OpMIPSADDconst)
  3011  		v.AuxInt = int64(int32(c + d))
  3012  		v.AddArg(x)
  3013  		return true
  3014  	}
  3015  	// match: (ADDconst [c] (SUBconst [d] x))
  3016  	// cond:
  3017  	// result: (ADDconst [int64(int32(c-d))] x)
  3018  	for {
  3019  		c := v.AuxInt
  3020  		v_0 := v.Args[0]
  3021  		if v_0.Op != OpMIPSSUBconst {
  3022  			break
  3023  		}
  3024  		d := v_0.AuxInt
  3025  		x := v_0.Args[0]
  3026  		v.reset(OpMIPSADDconst)
  3027  		v.AuxInt = int64(int32(c - d))
  3028  		v.AddArg(x)
  3029  		return true
  3030  	}
  3031  	return false
  3032  }
  3033  func rewriteValueMIPS_OpMIPSAND_0(v *Value) bool {
  3034  	b := v.Block
  3035  	_ = b
  3036  	// match: (AND x (MOVWconst [c]))
  3037  	// cond:
  3038  	// result: (ANDconst [c] x)
  3039  	for {
  3040  		_ = v.Args[1]
  3041  		x := v.Args[0]
  3042  		v_1 := v.Args[1]
  3043  		if v_1.Op != OpMIPSMOVWconst {
  3044  			break
  3045  		}
  3046  		c := v_1.AuxInt
  3047  		v.reset(OpMIPSANDconst)
  3048  		v.AuxInt = c
  3049  		v.AddArg(x)
  3050  		return true
  3051  	}
  3052  	// match: (AND (MOVWconst [c]) x)
  3053  	// cond:
  3054  	// result: (ANDconst [c] x)
  3055  	for {
  3056  		_ = v.Args[1]
  3057  		v_0 := v.Args[0]
  3058  		if v_0.Op != OpMIPSMOVWconst {
  3059  			break
  3060  		}
  3061  		c := v_0.AuxInt
  3062  		x := v.Args[1]
  3063  		v.reset(OpMIPSANDconst)
  3064  		v.AuxInt = c
  3065  		v.AddArg(x)
  3066  		return true
  3067  	}
  3068  	// match: (AND x x)
  3069  	// cond:
  3070  	// result: x
  3071  	for {
  3072  		_ = v.Args[1]
  3073  		x := v.Args[0]
  3074  		if x != v.Args[1] {
  3075  			break
  3076  		}
  3077  		v.reset(OpCopy)
  3078  		v.Type = x.Type
  3079  		v.AddArg(x)
  3080  		return true
  3081  	}
  3082  	// match: (AND (SGTUconst [1] x) (SGTUconst [1] y))
  3083  	// cond:
  3084  	// result: (SGTUconst [1] (OR <x.Type> x y))
  3085  	for {
  3086  		_ = v.Args[1]
  3087  		v_0 := v.Args[0]
  3088  		if v_0.Op != OpMIPSSGTUconst {
  3089  			break
  3090  		}
  3091  		if v_0.AuxInt != 1 {
  3092  			break
  3093  		}
  3094  		x := v_0.Args[0]
  3095  		v_1 := v.Args[1]
  3096  		if v_1.Op != OpMIPSSGTUconst {
  3097  			break
  3098  		}
  3099  		if v_1.AuxInt != 1 {
  3100  			break
  3101  		}
  3102  		y := v_1.Args[0]
  3103  		v.reset(OpMIPSSGTUconst)
  3104  		v.AuxInt = 1
  3105  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  3106  		v0.AddArg(x)
  3107  		v0.AddArg(y)
  3108  		v.AddArg(v0)
  3109  		return true
  3110  	}
  3111  	// match: (AND (SGTUconst [1] y) (SGTUconst [1] x))
  3112  	// cond:
  3113  	// result: (SGTUconst [1] (OR <x.Type> x y))
  3114  	for {
  3115  		_ = v.Args[1]
  3116  		v_0 := v.Args[0]
  3117  		if v_0.Op != OpMIPSSGTUconst {
  3118  			break
  3119  		}
  3120  		if v_0.AuxInt != 1 {
  3121  			break
  3122  		}
  3123  		y := v_0.Args[0]
  3124  		v_1 := v.Args[1]
  3125  		if v_1.Op != OpMIPSSGTUconst {
  3126  			break
  3127  		}
  3128  		if v_1.AuxInt != 1 {
  3129  			break
  3130  		}
  3131  		x := v_1.Args[0]
  3132  		v.reset(OpMIPSSGTUconst)
  3133  		v.AuxInt = 1
  3134  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  3135  		v0.AddArg(x)
  3136  		v0.AddArg(y)
  3137  		v.AddArg(v0)
  3138  		return true
  3139  	}
  3140  	return false
  3141  }
  3142  func rewriteValueMIPS_OpMIPSANDconst_0(v *Value) bool {
  3143  	// match: (ANDconst [0] _)
  3144  	// cond:
  3145  	// result: (MOVWconst [0])
  3146  	for {
  3147  		if v.AuxInt != 0 {
  3148  			break
  3149  		}
  3150  		v.reset(OpMIPSMOVWconst)
  3151  		v.AuxInt = 0
  3152  		return true
  3153  	}
  3154  	// match: (ANDconst [-1] x)
  3155  	// cond:
  3156  	// result: x
  3157  	for {
  3158  		if v.AuxInt != -1 {
  3159  			break
  3160  		}
  3161  		x := v.Args[0]
  3162  		v.reset(OpCopy)
  3163  		v.Type = x.Type
  3164  		v.AddArg(x)
  3165  		return true
  3166  	}
  3167  	// match: (ANDconst [c] (MOVWconst [d]))
  3168  	// cond:
  3169  	// result: (MOVWconst [c&d])
  3170  	for {
  3171  		c := v.AuxInt
  3172  		v_0 := v.Args[0]
  3173  		if v_0.Op != OpMIPSMOVWconst {
  3174  			break
  3175  		}
  3176  		d := v_0.AuxInt
  3177  		v.reset(OpMIPSMOVWconst)
  3178  		v.AuxInt = c & d
  3179  		return true
  3180  	}
  3181  	// match: (ANDconst [c] (ANDconst [d] x))
  3182  	// cond:
  3183  	// result: (ANDconst [c&d] x)
  3184  	for {
  3185  		c := v.AuxInt
  3186  		v_0 := v.Args[0]
  3187  		if v_0.Op != OpMIPSANDconst {
  3188  			break
  3189  		}
  3190  		d := v_0.AuxInt
  3191  		x := v_0.Args[0]
  3192  		v.reset(OpMIPSANDconst)
  3193  		v.AuxInt = c & d
  3194  		v.AddArg(x)
  3195  		return true
  3196  	}
  3197  	return false
  3198  }
  3199  func rewriteValueMIPS_OpMIPSCMOVZ_0(v *Value) bool {
  3200  	b := v.Block
  3201  	_ = b
  3202  	// match: (CMOVZ _ b (MOVWconst [0]))
  3203  	// cond:
  3204  	// result: b
  3205  	for {
  3206  		_ = v.Args[2]
  3207  		b := v.Args[1]
  3208  		v_2 := v.Args[2]
  3209  		if v_2.Op != OpMIPSMOVWconst {
  3210  			break
  3211  		}
  3212  		if v_2.AuxInt != 0 {
  3213  			break
  3214  		}
  3215  		v.reset(OpCopy)
  3216  		v.Type = b.Type
  3217  		v.AddArg(b)
  3218  		return true
  3219  	}
  3220  	// match: (CMOVZ a _ (MOVWconst [c]))
  3221  	// cond: c!=0
  3222  	// result: a
  3223  	for {
  3224  		_ = v.Args[2]
  3225  		a := v.Args[0]
  3226  		v_2 := v.Args[2]
  3227  		if v_2.Op != OpMIPSMOVWconst {
  3228  			break
  3229  		}
  3230  		c := v_2.AuxInt
  3231  		if !(c != 0) {
  3232  			break
  3233  		}
  3234  		v.reset(OpCopy)
  3235  		v.Type = a.Type
  3236  		v.AddArg(a)
  3237  		return true
  3238  	}
  3239  	// match: (CMOVZ a (MOVWconst [0]) c)
  3240  	// cond:
  3241  	// result: (CMOVZzero a c)
  3242  	for {
  3243  		_ = v.Args[2]
  3244  		a := v.Args[0]
  3245  		v_1 := v.Args[1]
  3246  		if v_1.Op != OpMIPSMOVWconst {
  3247  			break
  3248  		}
  3249  		if v_1.AuxInt != 0 {
  3250  			break
  3251  		}
  3252  		c := v.Args[2]
  3253  		v.reset(OpMIPSCMOVZzero)
  3254  		v.AddArg(a)
  3255  		v.AddArg(c)
  3256  		return true
  3257  	}
  3258  	return false
  3259  }
  3260  func rewriteValueMIPS_OpMIPSCMOVZzero_0(v *Value) bool {
  3261  	// match: (CMOVZzero _ (MOVWconst [0]))
  3262  	// cond:
  3263  	// result: (MOVWconst [0])
  3264  	for {
  3265  		_ = v.Args[1]
  3266  		v_1 := v.Args[1]
  3267  		if v_1.Op != OpMIPSMOVWconst {
  3268  			break
  3269  		}
  3270  		if v_1.AuxInt != 0 {
  3271  			break
  3272  		}
  3273  		v.reset(OpMIPSMOVWconst)
  3274  		v.AuxInt = 0
  3275  		return true
  3276  	}
  3277  	// match: (CMOVZzero a (MOVWconst [c]))
  3278  	// cond: c!=0
  3279  	// result: a
  3280  	for {
  3281  		_ = v.Args[1]
  3282  		a := v.Args[0]
  3283  		v_1 := v.Args[1]
  3284  		if v_1.Op != OpMIPSMOVWconst {
  3285  			break
  3286  		}
  3287  		c := v_1.AuxInt
  3288  		if !(c != 0) {
  3289  			break
  3290  		}
  3291  		v.reset(OpCopy)
  3292  		v.Type = a.Type
  3293  		v.AddArg(a)
  3294  		return true
  3295  	}
  3296  	return false
  3297  }
  3298  func rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v *Value) bool {
  3299  	// match: (LoweredAtomicAdd ptr (MOVWconst [c]) mem)
  3300  	// cond: is16Bit(c)
  3301  	// result: (LoweredAtomicAddconst [c] ptr mem)
  3302  	for {
  3303  		_ = v.Args[2]
  3304  		ptr := v.Args[0]
  3305  		v_1 := v.Args[1]
  3306  		if v_1.Op != OpMIPSMOVWconst {
  3307  			break
  3308  		}
  3309  		c := v_1.AuxInt
  3310  		mem := v.Args[2]
  3311  		if !(is16Bit(c)) {
  3312  			break
  3313  		}
  3314  		v.reset(OpMIPSLoweredAtomicAddconst)
  3315  		v.AuxInt = c
  3316  		v.AddArg(ptr)
  3317  		v.AddArg(mem)
  3318  		return true
  3319  	}
  3320  	return false
  3321  }
  3322  func rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v *Value) bool {
  3323  	// match: (LoweredAtomicStore ptr (MOVWconst [0]) mem)
  3324  	// cond:
  3325  	// result: (LoweredAtomicStorezero ptr mem)
  3326  	for {
  3327  		_ = v.Args[2]
  3328  		ptr := v.Args[0]
  3329  		v_1 := v.Args[1]
  3330  		if v_1.Op != OpMIPSMOVWconst {
  3331  			break
  3332  		}
  3333  		if v_1.AuxInt != 0 {
  3334  			break
  3335  		}
  3336  		mem := v.Args[2]
  3337  		v.reset(OpMIPSLoweredAtomicStorezero)
  3338  		v.AddArg(ptr)
  3339  		v.AddArg(mem)
  3340  		return true
  3341  	}
  3342  	return false
  3343  }
  3344  func rewriteValueMIPS_OpMIPSMOVBUload_0(v *Value) bool {
  3345  	// match: (MOVBUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3346  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3347  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3348  	for {
  3349  		off1 := v.AuxInt
  3350  		sym := v.Aux
  3351  		_ = v.Args[1]
  3352  		x := v.Args[0]
  3353  		if x.Op != OpMIPSADDconst {
  3354  			break
  3355  		}
  3356  		off2 := x.AuxInt
  3357  		ptr := x.Args[0]
  3358  		mem := v.Args[1]
  3359  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3360  			break
  3361  		}
  3362  		v.reset(OpMIPSMOVBUload)
  3363  		v.AuxInt = off1 + off2
  3364  		v.Aux = sym
  3365  		v.AddArg(ptr)
  3366  		v.AddArg(mem)
  3367  		return true
  3368  	}
  3369  	// match: (MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3370  	// cond: canMergeSym(sym1,sym2)
  3371  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3372  	for {
  3373  		off1 := v.AuxInt
  3374  		sym1 := v.Aux
  3375  		_ = v.Args[1]
  3376  		v_0 := v.Args[0]
  3377  		if v_0.Op != OpMIPSMOVWaddr {
  3378  			break
  3379  		}
  3380  		off2 := v_0.AuxInt
  3381  		sym2 := v_0.Aux
  3382  		ptr := v_0.Args[0]
  3383  		mem := v.Args[1]
  3384  		if !(canMergeSym(sym1, sym2)) {
  3385  			break
  3386  		}
  3387  		v.reset(OpMIPSMOVBUload)
  3388  		v.AuxInt = off1 + off2
  3389  		v.Aux = mergeSym(sym1, sym2)
  3390  		v.AddArg(ptr)
  3391  		v.AddArg(mem)
  3392  		return true
  3393  	}
  3394  	// match: (MOVBUload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3395  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3396  	// result: (MOVBUreg x)
  3397  	for {
  3398  		off := v.AuxInt
  3399  		sym := v.Aux
  3400  		_ = v.Args[1]
  3401  		ptr := v.Args[0]
  3402  		v_1 := v.Args[1]
  3403  		if v_1.Op != OpMIPSMOVBstore {
  3404  			break
  3405  		}
  3406  		off2 := v_1.AuxInt
  3407  		sym2 := v_1.Aux
  3408  		_ = v_1.Args[2]
  3409  		ptr2 := v_1.Args[0]
  3410  		x := v_1.Args[1]
  3411  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3412  			break
  3413  		}
  3414  		v.reset(OpMIPSMOVBUreg)
  3415  		v.AddArg(x)
  3416  		return true
  3417  	}
  3418  	return false
  3419  }
  3420  func rewriteValueMIPS_OpMIPSMOVBUreg_0(v *Value) bool {
  3421  	b := v.Block
  3422  	_ = b
  3423  	// match: (MOVBUreg x:(MOVBUload _ _))
  3424  	// cond:
  3425  	// result: (MOVWreg x)
  3426  	for {
  3427  		x := v.Args[0]
  3428  		if x.Op != OpMIPSMOVBUload {
  3429  			break
  3430  		}
  3431  		_ = x.Args[1]
  3432  		v.reset(OpMIPSMOVWreg)
  3433  		v.AddArg(x)
  3434  		return true
  3435  	}
  3436  	// match: (MOVBUreg x:(MOVBUreg _))
  3437  	// cond:
  3438  	// result: (MOVWreg x)
  3439  	for {
  3440  		x := v.Args[0]
  3441  		if x.Op != OpMIPSMOVBUreg {
  3442  			break
  3443  		}
  3444  		v.reset(OpMIPSMOVWreg)
  3445  		v.AddArg(x)
  3446  		return true
  3447  	}
  3448  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  3449  	// cond: x.Uses == 1 && clobber(x)
  3450  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  3451  	for {
  3452  		t := v.Type
  3453  		x := v.Args[0]
  3454  		if x.Op != OpMIPSMOVBload {
  3455  			break
  3456  		}
  3457  		off := x.AuxInt
  3458  		sym := x.Aux
  3459  		_ = x.Args[1]
  3460  		ptr := x.Args[0]
  3461  		mem := x.Args[1]
  3462  		if !(x.Uses == 1 && clobber(x)) {
  3463  			break
  3464  		}
  3465  		b = x.Block
  3466  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, t)
  3467  		v.reset(OpCopy)
  3468  		v.AddArg(v0)
  3469  		v0.AuxInt = off
  3470  		v0.Aux = sym
  3471  		v0.AddArg(ptr)
  3472  		v0.AddArg(mem)
  3473  		return true
  3474  	}
  3475  	// match: (MOVBUreg (ANDconst [c] x))
  3476  	// cond:
  3477  	// result: (ANDconst [c&0xff] x)
  3478  	for {
  3479  		v_0 := v.Args[0]
  3480  		if v_0.Op != OpMIPSANDconst {
  3481  			break
  3482  		}
  3483  		c := v_0.AuxInt
  3484  		x := v_0.Args[0]
  3485  		v.reset(OpMIPSANDconst)
  3486  		v.AuxInt = c & 0xff
  3487  		v.AddArg(x)
  3488  		return true
  3489  	}
  3490  	// match: (MOVBUreg (MOVWconst [c]))
  3491  	// cond:
  3492  	// result: (MOVWconst [int64(uint8(c))])
  3493  	for {
  3494  		v_0 := v.Args[0]
  3495  		if v_0.Op != OpMIPSMOVWconst {
  3496  			break
  3497  		}
  3498  		c := v_0.AuxInt
  3499  		v.reset(OpMIPSMOVWconst)
  3500  		v.AuxInt = int64(uint8(c))
  3501  		return true
  3502  	}
  3503  	return false
  3504  }
  3505  func rewriteValueMIPS_OpMIPSMOVBload_0(v *Value) bool {
  3506  	// match: (MOVBload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3507  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3508  	// result: (MOVBload  [off1+off2] {sym} ptr mem)
  3509  	for {
  3510  		off1 := v.AuxInt
  3511  		sym := v.Aux
  3512  		_ = v.Args[1]
  3513  		x := v.Args[0]
  3514  		if x.Op != OpMIPSADDconst {
  3515  			break
  3516  		}
  3517  		off2 := x.AuxInt
  3518  		ptr := x.Args[0]
  3519  		mem := v.Args[1]
  3520  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3521  			break
  3522  		}
  3523  		v.reset(OpMIPSMOVBload)
  3524  		v.AuxInt = off1 + off2
  3525  		v.Aux = sym
  3526  		v.AddArg(ptr)
  3527  		v.AddArg(mem)
  3528  		return true
  3529  	}
  3530  	// match: (MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3531  	// cond: canMergeSym(sym1,sym2)
  3532  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3533  	for {
  3534  		off1 := v.AuxInt
  3535  		sym1 := v.Aux
  3536  		_ = v.Args[1]
  3537  		v_0 := v.Args[0]
  3538  		if v_0.Op != OpMIPSMOVWaddr {
  3539  			break
  3540  		}
  3541  		off2 := v_0.AuxInt
  3542  		sym2 := v_0.Aux
  3543  		ptr := v_0.Args[0]
  3544  		mem := v.Args[1]
  3545  		if !(canMergeSym(sym1, sym2)) {
  3546  			break
  3547  		}
  3548  		v.reset(OpMIPSMOVBload)
  3549  		v.AuxInt = off1 + off2
  3550  		v.Aux = mergeSym(sym1, sym2)
  3551  		v.AddArg(ptr)
  3552  		v.AddArg(mem)
  3553  		return true
  3554  	}
  3555  	// match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3556  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3557  	// result: (MOVBreg x)
  3558  	for {
  3559  		off := v.AuxInt
  3560  		sym := v.Aux
  3561  		_ = v.Args[1]
  3562  		ptr := v.Args[0]
  3563  		v_1 := v.Args[1]
  3564  		if v_1.Op != OpMIPSMOVBstore {
  3565  			break
  3566  		}
  3567  		off2 := v_1.AuxInt
  3568  		sym2 := v_1.Aux
  3569  		_ = v_1.Args[2]
  3570  		ptr2 := v_1.Args[0]
  3571  		x := v_1.Args[1]
  3572  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3573  			break
  3574  		}
  3575  		v.reset(OpMIPSMOVBreg)
  3576  		v.AddArg(x)
  3577  		return true
  3578  	}
  3579  	return false
  3580  }
  3581  func rewriteValueMIPS_OpMIPSMOVBreg_0(v *Value) bool {
  3582  	b := v.Block
  3583  	_ = b
  3584  	// match: (MOVBreg x:(MOVBload _ _))
  3585  	// cond:
  3586  	// result: (MOVWreg x)
  3587  	for {
  3588  		x := v.Args[0]
  3589  		if x.Op != OpMIPSMOVBload {
  3590  			break
  3591  		}
  3592  		_ = x.Args[1]
  3593  		v.reset(OpMIPSMOVWreg)
  3594  		v.AddArg(x)
  3595  		return true
  3596  	}
  3597  	// match: (MOVBreg x:(MOVBreg _))
  3598  	// cond:
  3599  	// result: (MOVWreg x)
  3600  	for {
  3601  		x := v.Args[0]
  3602  		if x.Op != OpMIPSMOVBreg {
  3603  			break
  3604  		}
  3605  		v.reset(OpMIPSMOVWreg)
  3606  		v.AddArg(x)
  3607  		return true
  3608  	}
  3609  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  3610  	// cond: x.Uses == 1 && clobber(x)
  3611  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  3612  	for {
  3613  		t := v.Type
  3614  		x := v.Args[0]
  3615  		if x.Op != OpMIPSMOVBUload {
  3616  			break
  3617  		}
  3618  		off := x.AuxInt
  3619  		sym := x.Aux
  3620  		_ = x.Args[1]
  3621  		ptr := x.Args[0]
  3622  		mem := x.Args[1]
  3623  		if !(x.Uses == 1 && clobber(x)) {
  3624  			break
  3625  		}
  3626  		b = x.Block
  3627  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBload, t)
  3628  		v.reset(OpCopy)
  3629  		v.AddArg(v0)
  3630  		v0.AuxInt = off
  3631  		v0.Aux = sym
  3632  		v0.AddArg(ptr)
  3633  		v0.AddArg(mem)
  3634  		return true
  3635  	}
  3636  	// match: (MOVBreg (ANDconst [c] x))
  3637  	// cond: c & 0x80 == 0
  3638  	// result: (ANDconst [c&0x7f] x)
  3639  	for {
  3640  		v_0 := v.Args[0]
  3641  		if v_0.Op != OpMIPSANDconst {
  3642  			break
  3643  		}
  3644  		c := v_0.AuxInt
  3645  		x := v_0.Args[0]
  3646  		if !(c&0x80 == 0) {
  3647  			break
  3648  		}
  3649  		v.reset(OpMIPSANDconst)
  3650  		v.AuxInt = c & 0x7f
  3651  		v.AddArg(x)
  3652  		return true
  3653  	}
  3654  	// match: (MOVBreg (MOVWconst [c]))
  3655  	// cond:
  3656  	// result: (MOVWconst [int64(int8(c))])
  3657  	for {
  3658  		v_0 := v.Args[0]
  3659  		if v_0.Op != OpMIPSMOVWconst {
  3660  			break
  3661  		}
  3662  		c := v_0.AuxInt
  3663  		v.reset(OpMIPSMOVWconst)
  3664  		v.AuxInt = int64(int8(c))
  3665  		return true
  3666  	}
  3667  	return false
  3668  }
  3669  func rewriteValueMIPS_OpMIPSMOVBstore_0(v *Value) bool {
  3670  	// match: (MOVBstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3671  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3672  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3673  	for {
  3674  		off1 := v.AuxInt
  3675  		sym := v.Aux
  3676  		_ = v.Args[2]
  3677  		x := v.Args[0]
  3678  		if x.Op != OpMIPSADDconst {
  3679  			break
  3680  		}
  3681  		off2 := x.AuxInt
  3682  		ptr := x.Args[0]
  3683  		val := v.Args[1]
  3684  		mem := v.Args[2]
  3685  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3686  			break
  3687  		}
  3688  		v.reset(OpMIPSMOVBstore)
  3689  		v.AuxInt = off1 + off2
  3690  		v.Aux = sym
  3691  		v.AddArg(ptr)
  3692  		v.AddArg(val)
  3693  		v.AddArg(mem)
  3694  		return true
  3695  	}
  3696  	// match: (MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3697  	// cond: canMergeSym(sym1,sym2)
  3698  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3699  	for {
  3700  		off1 := v.AuxInt
  3701  		sym1 := v.Aux
  3702  		_ = v.Args[2]
  3703  		v_0 := v.Args[0]
  3704  		if v_0.Op != OpMIPSMOVWaddr {
  3705  			break
  3706  		}
  3707  		off2 := v_0.AuxInt
  3708  		sym2 := v_0.Aux
  3709  		ptr := v_0.Args[0]
  3710  		val := v.Args[1]
  3711  		mem := v.Args[2]
  3712  		if !(canMergeSym(sym1, sym2)) {
  3713  			break
  3714  		}
  3715  		v.reset(OpMIPSMOVBstore)
  3716  		v.AuxInt = off1 + off2
  3717  		v.Aux = mergeSym(sym1, sym2)
  3718  		v.AddArg(ptr)
  3719  		v.AddArg(val)
  3720  		v.AddArg(mem)
  3721  		return true
  3722  	}
  3723  	// match: (MOVBstore [off] {sym} ptr (MOVWconst [0]) mem)
  3724  	// cond:
  3725  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3726  	for {
  3727  		off := v.AuxInt
  3728  		sym := v.Aux
  3729  		_ = v.Args[2]
  3730  		ptr := v.Args[0]
  3731  		v_1 := v.Args[1]
  3732  		if v_1.Op != OpMIPSMOVWconst {
  3733  			break
  3734  		}
  3735  		if v_1.AuxInt != 0 {
  3736  			break
  3737  		}
  3738  		mem := v.Args[2]
  3739  		v.reset(OpMIPSMOVBstorezero)
  3740  		v.AuxInt = off
  3741  		v.Aux = sym
  3742  		v.AddArg(ptr)
  3743  		v.AddArg(mem)
  3744  		return true
  3745  	}
  3746  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3747  	// cond:
  3748  	// result: (MOVBstore [off] {sym} ptr x mem)
  3749  	for {
  3750  		off := v.AuxInt
  3751  		sym := v.Aux
  3752  		_ = v.Args[2]
  3753  		ptr := v.Args[0]
  3754  		v_1 := v.Args[1]
  3755  		if v_1.Op != OpMIPSMOVBreg {
  3756  			break
  3757  		}
  3758  		x := v_1.Args[0]
  3759  		mem := v.Args[2]
  3760  		v.reset(OpMIPSMOVBstore)
  3761  		v.AuxInt = off
  3762  		v.Aux = sym
  3763  		v.AddArg(ptr)
  3764  		v.AddArg(x)
  3765  		v.AddArg(mem)
  3766  		return true
  3767  	}
  3768  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3769  	// cond:
  3770  	// result: (MOVBstore [off] {sym} ptr x mem)
  3771  	for {
  3772  		off := v.AuxInt
  3773  		sym := v.Aux
  3774  		_ = v.Args[2]
  3775  		ptr := v.Args[0]
  3776  		v_1 := v.Args[1]
  3777  		if v_1.Op != OpMIPSMOVBUreg {
  3778  			break
  3779  		}
  3780  		x := v_1.Args[0]
  3781  		mem := v.Args[2]
  3782  		v.reset(OpMIPSMOVBstore)
  3783  		v.AuxInt = off
  3784  		v.Aux = sym
  3785  		v.AddArg(ptr)
  3786  		v.AddArg(x)
  3787  		v.AddArg(mem)
  3788  		return true
  3789  	}
  3790  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3791  	// cond:
  3792  	// result: (MOVBstore [off] {sym} ptr x mem)
  3793  	for {
  3794  		off := v.AuxInt
  3795  		sym := v.Aux
  3796  		_ = v.Args[2]
  3797  		ptr := v.Args[0]
  3798  		v_1 := v.Args[1]
  3799  		if v_1.Op != OpMIPSMOVHreg {
  3800  			break
  3801  		}
  3802  		x := v_1.Args[0]
  3803  		mem := v.Args[2]
  3804  		v.reset(OpMIPSMOVBstore)
  3805  		v.AuxInt = off
  3806  		v.Aux = sym
  3807  		v.AddArg(ptr)
  3808  		v.AddArg(x)
  3809  		v.AddArg(mem)
  3810  		return true
  3811  	}
  3812  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3813  	// cond:
  3814  	// result: (MOVBstore [off] {sym} ptr x mem)
  3815  	for {
  3816  		off := v.AuxInt
  3817  		sym := v.Aux
  3818  		_ = v.Args[2]
  3819  		ptr := v.Args[0]
  3820  		v_1 := v.Args[1]
  3821  		if v_1.Op != OpMIPSMOVHUreg {
  3822  			break
  3823  		}
  3824  		x := v_1.Args[0]
  3825  		mem := v.Args[2]
  3826  		v.reset(OpMIPSMOVBstore)
  3827  		v.AuxInt = off
  3828  		v.Aux = sym
  3829  		v.AddArg(ptr)
  3830  		v.AddArg(x)
  3831  		v.AddArg(mem)
  3832  		return true
  3833  	}
  3834  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3835  	// cond:
  3836  	// result: (MOVBstore [off] {sym} ptr x mem)
  3837  	for {
  3838  		off := v.AuxInt
  3839  		sym := v.Aux
  3840  		_ = v.Args[2]
  3841  		ptr := v.Args[0]
  3842  		v_1 := v.Args[1]
  3843  		if v_1.Op != OpMIPSMOVWreg {
  3844  			break
  3845  		}
  3846  		x := v_1.Args[0]
  3847  		mem := v.Args[2]
  3848  		v.reset(OpMIPSMOVBstore)
  3849  		v.AuxInt = off
  3850  		v.Aux = sym
  3851  		v.AddArg(ptr)
  3852  		v.AddArg(x)
  3853  		v.AddArg(mem)
  3854  		return true
  3855  	}
  3856  	return false
  3857  }
  3858  func rewriteValueMIPS_OpMIPSMOVBstorezero_0(v *Value) bool {
  3859  	// match: (MOVBstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3860  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3861  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3862  	for {
  3863  		off1 := v.AuxInt
  3864  		sym := v.Aux
  3865  		_ = v.Args[1]
  3866  		x := v.Args[0]
  3867  		if x.Op != OpMIPSADDconst {
  3868  			break
  3869  		}
  3870  		off2 := x.AuxInt
  3871  		ptr := x.Args[0]
  3872  		mem := v.Args[1]
  3873  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3874  			break
  3875  		}
  3876  		v.reset(OpMIPSMOVBstorezero)
  3877  		v.AuxInt = off1 + off2
  3878  		v.Aux = sym
  3879  		v.AddArg(ptr)
  3880  		v.AddArg(mem)
  3881  		return true
  3882  	}
  3883  	// match: (MOVBstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3884  	// cond: canMergeSym(sym1,sym2)
  3885  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3886  	for {
  3887  		off1 := v.AuxInt
  3888  		sym1 := v.Aux
  3889  		_ = v.Args[1]
  3890  		v_0 := v.Args[0]
  3891  		if v_0.Op != OpMIPSMOVWaddr {
  3892  			break
  3893  		}
  3894  		off2 := v_0.AuxInt
  3895  		sym2 := v_0.Aux
  3896  		ptr := v_0.Args[0]
  3897  		mem := v.Args[1]
  3898  		if !(canMergeSym(sym1, sym2)) {
  3899  			break
  3900  		}
  3901  		v.reset(OpMIPSMOVBstorezero)
  3902  		v.AuxInt = off1 + off2
  3903  		v.Aux = mergeSym(sym1, sym2)
  3904  		v.AddArg(ptr)
  3905  		v.AddArg(mem)
  3906  		return true
  3907  	}
  3908  	return false
  3909  }
  3910  func rewriteValueMIPS_OpMIPSMOVDload_0(v *Value) bool {
  3911  	// match: (MOVDload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3912  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3913  	// result: (MOVDload  [off1+off2] {sym} ptr mem)
  3914  	for {
  3915  		off1 := v.AuxInt
  3916  		sym := v.Aux
  3917  		_ = v.Args[1]
  3918  		x := v.Args[0]
  3919  		if x.Op != OpMIPSADDconst {
  3920  			break
  3921  		}
  3922  		off2 := x.AuxInt
  3923  		ptr := x.Args[0]
  3924  		mem := v.Args[1]
  3925  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3926  			break
  3927  		}
  3928  		v.reset(OpMIPSMOVDload)
  3929  		v.AuxInt = off1 + off2
  3930  		v.Aux = sym
  3931  		v.AddArg(ptr)
  3932  		v.AddArg(mem)
  3933  		return true
  3934  	}
  3935  	// match: (MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3936  	// cond: canMergeSym(sym1,sym2)
  3937  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3938  	for {
  3939  		off1 := v.AuxInt
  3940  		sym1 := v.Aux
  3941  		_ = v.Args[1]
  3942  		v_0 := v.Args[0]
  3943  		if v_0.Op != OpMIPSMOVWaddr {
  3944  			break
  3945  		}
  3946  		off2 := v_0.AuxInt
  3947  		sym2 := v_0.Aux
  3948  		ptr := v_0.Args[0]
  3949  		mem := v.Args[1]
  3950  		if !(canMergeSym(sym1, sym2)) {
  3951  			break
  3952  		}
  3953  		v.reset(OpMIPSMOVDload)
  3954  		v.AuxInt = off1 + off2
  3955  		v.Aux = mergeSym(sym1, sym2)
  3956  		v.AddArg(ptr)
  3957  		v.AddArg(mem)
  3958  		return true
  3959  	}
  3960  	// match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _))
  3961  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3962  	// result: x
  3963  	for {
  3964  		off := v.AuxInt
  3965  		sym := v.Aux
  3966  		_ = v.Args[1]
  3967  		ptr := v.Args[0]
  3968  		v_1 := v.Args[1]
  3969  		if v_1.Op != OpMIPSMOVDstore {
  3970  			break
  3971  		}
  3972  		off2 := v_1.AuxInt
  3973  		sym2 := v_1.Aux
  3974  		_ = v_1.Args[2]
  3975  		ptr2 := v_1.Args[0]
  3976  		x := v_1.Args[1]
  3977  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3978  			break
  3979  		}
  3980  		v.reset(OpCopy)
  3981  		v.Type = x.Type
  3982  		v.AddArg(x)
  3983  		return true
  3984  	}
  3985  	return false
  3986  }
  3987  func rewriteValueMIPS_OpMIPSMOVDstore_0(v *Value) bool {
  3988  	// match: (MOVDstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3989  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3990  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  3991  	for {
  3992  		off1 := v.AuxInt
  3993  		sym := v.Aux
  3994  		_ = v.Args[2]
  3995  		x := v.Args[0]
  3996  		if x.Op != OpMIPSADDconst {
  3997  			break
  3998  		}
  3999  		off2 := x.AuxInt
  4000  		ptr := x.Args[0]
  4001  		val := v.Args[1]
  4002  		mem := v.Args[2]
  4003  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4004  			break
  4005  		}
  4006  		v.reset(OpMIPSMOVDstore)
  4007  		v.AuxInt = off1 + off2
  4008  		v.Aux = sym
  4009  		v.AddArg(ptr)
  4010  		v.AddArg(val)
  4011  		v.AddArg(mem)
  4012  		return true
  4013  	}
  4014  	// match: (MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4015  	// cond: canMergeSym(sym1,sym2)
  4016  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4017  	for {
  4018  		off1 := v.AuxInt
  4019  		sym1 := v.Aux
  4020  		_ = v.Args[2]
  4021  		v_0 := v.Args[0]
  4022  		if v_0.Op != OpMIPSMOVWaddr {
  4023  			break
  4024  		}
  4025  		off2 := v_0.AuxInt
  4026  		sym2 := v_0.Aux
  4027  		ptr := v_0.Args[0]
  4028  		val := v.Args[1]
  4029  		mem := v.Args[2]
  4030  		if !(canMergeSym(sym1, sym2)) {
  4031  			break
  4032  		}
  4033  		v.reset(OpMIPSMOVDstore)
  4034  		v.AuxInt = off1 + off2
  4035  		v.Aux = mergeSym(sym1, sym2)
  4036  		v.AddArg(ptr)
  4037  		v.AddArg(val)
  4038  		v.AddArg(mem)
  4039  		return true
  4040  	}
  4041  	return false
  4042  }
  4043  func rewriteValueMIPS_OpMIPSMOVFload_0(v *Value) bool {
  4044  	// match: (MOVFload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4045  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4046  	// result: (MOVFload  [off1+off2] {sym} ptr mem)
  4047  	for {
  4048  		off1 := v.AuxInt
  4049  		sym := v.Aux
  4050  		_ = v.Args[1]
  4051  		x := v.Args[0]
  4052  		if x.Op != OpMIPSADDconst {
  4053  			break
  4054  		}
  4055  		off2 := x.AuxInt
  4056  		ptr := x.Args[0]
  4057  		mem := v.Args[1]
  4058  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4059  			break
  4060  		}
  4061  		v.reset(OpMIPSMOVFload)
  4062  		v.AuxInt = off1 + off2
  4063  		v.Aux = sym
  4064  		v.AddArg(ptr)
  4065  		v.AddArg(mem)
  4066  		return true
  4067  	}
  4068  	// match: (MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4069  	// cond: canMergeSym(sym1,sym2)
  4070  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4071  	for {
  4072  		off1 := v.AuxInt
  4073  		sym1 := v.Aux
  4074  		_ = v.Args[1]
  4075  		v_0 := v.Args[0]
  4076  		if v_0.Op != OpMIPSMOVWaddr {
  4077  			break
  4078  		}
  4079  		off2 := v_0.AuxInt
  4080  		sym2 := v_0.Aux
  4081  		ptr := v_0.Args[0]
  4082  		mem := v.Args[1]
  4083  		if !(canMergeSym(sym1, sym2)) {
  4084  			break
  4085  		}
  4086  		v.reset(OpMIPSMOVFload)
  4087  		v.AuxInt = off1 + off2
  4088  		v.Aux = mergeSym(sym1, sym2)
  4089  		v.AddArg(ptr)
  4090  		v.AddArg(mem)
  4091  		return true
  4092  	}
  4093  	// match: (MOVFload [off] {sym} ptr (MOVFstore [off2] {sym2} ptr2 x _))
  4094  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4095  	// result: x
  4096  	for {
  4097  		off := v.AuxInt
  4098  		sym := v.Aux
  4099  		_ = v.Args[1]
  4100  		ptr := v.Args[0]
  4101  		v_1 := v.Args[1]
  4102  		if v_1.Op != OpMIPSMOVFstore {
  4103  			break
  4104  		}
  4105  		off2 := v_1.AuxInt
  4106  		sym2 := v_1.Aux
  4107  		_ = v_1.Args[2]
  4108  		ptr2 := v_1.Args[0]
  4109  		x := v_1.Args[1]
  4110  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4111  			break
  4112  		}
  4113  		v.reset(OpCopy)
  4114  		v.Type = x.Type
  4115  		v.AddArg(x)
  4116  		return true
  4117  	}
  4118  	return false
  4119  }
  4120  func rewriteValueMIPS_OpMIPSMOVFstore_0(v *Value) bool {
  4121  	// match: (MOVFstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4122  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4123  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  4124  	for {
  4125  		off1 := v.AuxInt
  4126  		sym := v.Aux
  4127  		_ = v.Args[2]
  4128  		x := v.Args[0]
  4129  		if x.Op != OpMIPSADDconst {
  4130  			break
  4131  		}
  4132  		off2 := x.AuxInt
  4133  		ptr := x.Args[0]
  4134  		val := v.Args[1]
  4135  		mem := v.Args[2]
  4136  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4137  			break
  4138  		}
  4139  		v.reset(OpMIPSMOVFstore)
  4140  		v.AuxInt = off1 + off2
  4141  		v.Aux = sym
  4142  		v.AddArg(ptr)
  4143  		v.AddArg(val)
  4144  		v.AddArg(mem)
  4145  		return true
  4146  	}
  4147  	// match: (MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4148  	// cond: canMergeSym(sym1,sym2)
  4149  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4150  	for {
  4151  		off1 := v.AuxInt
  4152  		sym1 := v.Aux
  4153  		_ = v.Args[2]
  4154  		v_0 := v.Args[0]
  4155  		if v_0.Op != OpMIPSMOVWaddr {
  4156  			break
  4157  		}
  4158  		off2 := v_0.AuxInt
  4159  		sym2 := v_0.Aux
  4160  		ptr := v_0.Args[0]
  4161  		val := v.Args[1]
  4162  		mem := v.Args[2]
  4163  		if !(canMergeSym(sym1, sym2)) {
  4164  			break
  4165  		}
  4166  		v.reset(OpMIPSMOVFstore)
  4167  		v.AuxInt = off1 + off2
  4168  		v.Aux = mergeSym(sym1, sym2)
  4169  		v.AddArg(ptr)
  4170  		v.AddArg(val)
  4171  		v.AddArg(mem)
  4172  		return true
  4173  	}
  4174  	return false
  4175  }
  4176  func rewriteValueMIPS_OpMIPSMOVHUload_0(v *Value) bool {
  4177  	// match: (MOVHUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4178  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4179  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4180  	for {
  4181  		off1 := v.AuxInt
  4182  		sym := v.Aux
  4183  		_ = v.Args[1]
  4184  		x := v.Args[0]
  4185  		if x.Op != OpMIPSADDconst {
  4186  			break
  4187  		}
  4188  		off2 := x.AuxInt
  4189  		ptr := x.Args[0]
  4190  		mem := v.Args[1]
  4191  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4192  			break
  4193  		}
  4194  		v.reset(OpMIPSMOVHUload)
  4195  		v.AuxInt = off1 + off2
  4196  		v.Aux = sym
  4197  		v.AddArg(ptr)
  4198  		v.AddArg(mem)
  4199  		return true
  4200  	}
  4201  	// match: (MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4202  	// cond: canMergeSym(sym1,sym2)
  4203  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4204  	for {
  4205  		off1 := v.AuxInt
  4206  		sym1 := v.Aux
  4207  		_ = v.Args[1]
  4208  		v_0 := v.Args[0]
  4209  		if v_0.Op != OpMIPSMOVWaddr {
  4210  			break
  4211  		}
  4212  		off2 := v_0.AuxInt
  4213  		sym2 := v_0.Aux
  4214  		ptr := v_0.Args[0]
  4215  		mem := v.Args[1]
  4216  		if !(canMergeSym(sym1, sym2)) {
  4217  			break
  4218  		}
  4219  		v.reset(OpMIPSMOVHUload)
  4220  		v.AuxInt = off1 + off2
  4221  		v.Aux = mergeSym(sym1, sym2)
  4222  		v.AddArg(ptr)
  4223  		v.AddArg(mem)
  4224  		return true
  4225  	}
  4226  	// match: (MOVHUload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4227  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4228  	// result: (MOVHUreg x)
  4229  	for {
  4230  		off := v.AuxInt
  4231  		sym := v.Aux
  4232  		_ = v.Args[1]
  4233  		ptr := v.Args[0]
  4234  		v_1 := v.Args[1]
  4235  		if v_1.Op != OpMIPSMOVHstore {
  4236  			break
  4237  		}
  4238  		off2 := v_1.AuxInt
  4239  		sym2 := v_1.Aux
  4240  		_ = v_1.Args[2]
  4241  		ptr2 := v_1.Args[0]
  4242  		x := v_1.Args[1]
  4243  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4244  			break
  4245  		}
  4246  		v.reset(OpMIPSMOVHUreg)
  4247  		v.AddArg(x)
  4248  		return true
  4249  	}
  4250  	return false
  4251  }
  4252  func rewriteValueMIPS_OpMIPSMOVHUreg_0(v *Value) bool {
  4253  	b := v.Block
  4254  	_ = b
  4255  	// match: (MOVHUreg x:(MOVBUload _ _))
  4256  	// cond:
  4257  	// result: (MOVWreg x)
  4258  	for {
  4259  		x := v.Args[0]
  4260  		if x.Op != OpMIPSMOVBUload {
  4261  			break
  4262  		}
  4263  		_ = x.Args[1]
  4264  		v.reset(OpMIPSMOVWreg)
  4265  		v.AddArg(x)
  4266  		return true
  4267  	}
  4268  	// match: (MOVHUreg x:(MOVHUload _ _))
  4269  	// cond:
  4270  	// result: (MOVWreg x)
  4271  	for {
  4272  		x := v.Args[0]
  4273  		if x.Op != OpMIPSMOVHUload {
  4274  			break
  4275  		}
  4276  		_ = x.Args[1]
  4277  		v.reset(OpMIPSMOVWreg)
  4278  		v.AddArg(x)
  4279  		return true
  4280  	}
  4281  	// match: (MOVHUreg x:(MOVBUreg _))
  4282  	// cond:
  4283  	// result: (MOVWreg x)
  4284  	for {
  4285  		x := v.Args[0]
  4286  		if x.Op != OpMIPSMOVBUreg {
  4287  			break
  4288  		}
  4289  		v.reset(OpMIPSMOVWreg)
  4290  		v.AddArg(x)
  4291  		return true
  4292  	}
  4293  	// match: (MOVHUreg x:(MOVHUreg _))
  4294  	// cond:
  4295  	// result: (MOVWreg x)
  4296  	for {
  4297  		x := v.Args[0]
  4298  		if x.Op != OpMIPSMOVHUreg {
  4299  			break
  4300  		}
  4301  		v.reset(OpMIPSMOVWreg)
  4302  		v.AddArg(x)
  4303  		return true
  4304  	}
  4305  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  4306  	// cond: x.Uses == 1 && clobber(x)
  4307  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  4308  	for {
  4309  		t := v.Type
  4310  		x := v.Args[0]
  4311  		if x.Op != OpMIPSMOVHload {
  4312  			break
  4313  		}
  4314  		off := x.AuxInt
  4315  		sym := x.Aux
  4316  		_ = x.Args[1]
  4317  		ptr := x.Args[0]
  4318  		mem := x.Args[1]
  4319  		if !(x.Uses == 1 && clobber(x)) {
  4320  			break
  4321  		}
  4322  		b = x.Block
  4323  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, t)
  4324  		v.reset(OpCopy)
  4325  		v.AddArg(v0)
  4326  		v0.AuxInt = off
  4327  		v0.Aux = sym
  4328  		v0.AddArg(ptr)
  4329  		v0.AddArg(mem)
  4330  		return true
  4331  	}
  4332  	// match: (MOVHUreg (ANDconst [c] x))
  4333  	// cond:
  4334  	// result: (ANDconst [c&0xffff] x)
  4335  	for {
  4336  		v_0 := v.Args[0]
  4337  		if v_0.Op != OpMIPSANDconst {
  4338  			break
  4339  		}
  4340  		c := v_0.AuxInt
  4341  		x := v_0.Args[0]
  4342  		v.reset(OpMIPSANDconst)
  4343  		v.AuxInt = c & 0xffff
  4344  		v.AddArg(x)
  4345  		return true
  4346  	}
  4347  	// match: (MOVHUreg (MOVWconst [c]))
  4348  	// cond:
  4349  	// result: (MOVWconst [int64(uint16(c))])
  4350  	for {
  4351  		v_0 := v.Args[0]
  4352  		if v_0.Op != OpMIPSMOVWconst {
  4353  			break
  4354  		}
  4355  		c := v_0.AuxInt
  4356  		v.reset(OpMIPSMOVWconst)
  4357  		v.AuxInt = int64(uint16(c))
  4358  		return true
  4359  	}
  4360  	return false
  4361  }
  4362  func rewriteValueMIPS_OpMIPSMOVHload_0(v *Value) bool {
  4363  	// match: (MOVHload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4364  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4365  	// result: (MOVHload  [off1+off2] {sym} ptr mem)
  4366  	for {
  4367  		off1 := v.AuxInt
  4368  		sym := v.Aux
  4369  		_ = v.Args[1]
  4370  		x := v.Args[0]
  4371  		if x.Op != OpMIPSADDconst {
  4372  			break
  4373  		}
  4374  		off2 := x.AuxInt
  4375  		ptr := x.Args[0]
  4376  		mem := v.Args[1]
  4377  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4378  			break
  4379  		}
  4380  		v.reset(OpMIPSMOVHload)
  4381  		v.AuxInt = off1 + off2
  4382  		v.Aux = sym
  4383  		v.AddArg(ptr)
  4384  		v.AddArg(mem)
  4385  		return true
  4386  	}
  4387  	// match: (MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4388  	// cond: canMergeSym(sym1,sym2)
  4389  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4390  	for {
  4391  		off1 := v.AuxInt
  4392  		sym1 := v.Aux
  4393  		_ = v.Args[1]
  4394  		v_0 := v.Args[0]
  4395  		if v_0.Op != OpMIPSMOVWaddr {
  4396  			break
  4397  		}
  4398  		off2 := v_0.AuxInt
  4399  		sym2 := v_0.Aux
  4400  		ptr := v_0.Args[0]
  4401  		mem := v.Args[1]
  4402  		if !(canMergeSym(sym1, sym2)) {
  4403  			break
  4404  		}
  4405  		v.reset(OpMIPSMOVHload)
  4406  		v.AuxInt = off1 + off2
  4407  		v.Aux = mergeSym(sym1, sym2)
  4408  		v.AddArg(ptr)
  4409  		v.AddArg(mem)
  4410  		return true
  4411  	}
  4412  	// match: (MOVHload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4413  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4414  	// result: (MOVHreg x)
  4415  	for {
  4416  		off := v.AuxInt
  4417  		sym := v.Aux
  4418  		_ = v.Args[1]
  4419  		ptr := v.Args[0]
  4420  		v_1 := v.Args[1]
  4421  		if v_1.Op != OpMIPSMOVHstore {
  4422  			break
  4423  		}
  4424  		off2 := v_1.AuxInt
  4425  		sym2 := v_1.Aux
  4426  		_ = v_1.Args[2]
  4427  		ptr2 := v_1.Args[0]
  4428  		x := v_1.Args[1]
  4429  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4430  			break
  4431  		}
  4432  		v.reset(OpMIPSMOVHreg)
  4433  		v.AddArg(x)
  4434  		return true
  4435  	}
  4436  	return false
  4437  }
  4438  func rewriteValueMIPS_OpMIPSMOVHreg_0(v *Value) bool {
  4439  	b := v.Block
  4440  	_ = b
  4441  	// match: (MOVHreg x:(MOVBload _ _))
  4442  	// cond:
  4443  	// result: (MOVWreg x)
  4444  	for {
  4445  		x := v.Args[0]
  4446  		if x.Op != OpMIPSMOVBload {
  4447  			break
  4448  		}
  4449  		_ = x.Args[1]
  4450  		v.reset(OpMIPSMOVWreg)
  4451  		v.AddArg(x)
  4452  		return true
  4453  	}
  4454  	// match: (MOVHreg x:(MOVBUload _ _))
  4455  	// cond:
  4456  	// result: (MOVWreg x)
  4457  	for {
  4458  		x := v.Args[0]
  4459  		if x.Op != OpMIPSMOVBUload {
  4460  			break
  4461  		}
  4462  		_ = x.Args[1]
  4463  		v.reset(OpMIPSMOVWreg)
  4464  		v.AddArg(x)
  4465  		return true
  4466  	}
  4467  	// match: (MOVHreg x:(MOVHload _ _))
  4468  	// cond:
  4469  	// result: (MOVWreg x)
  4470  	for {
  4471  		x := v.Args[0]
  4472  		if x.Op != OpMIPSMOVHload {
  4473  			break
  4474  		}
  4475  		_ = x.Args[1]
  4476  		v.reset(OpMIPSMOVWreg)
  4477  		v.AddArg(x)
  4478  		return true
  4479  	}
  4480  	// match: (MOVHreg x:(MOVBreg _))
  4481  	// cond:
  4482  	// result: (MOVWreg x)
  4483  	for {
  4484  		x := v.Args[0]
  4485  		if x.Op != OpMIPSMOVBreg {
  4486  			break
  4487  		}
  4488  		v.reset(OpMIPSMOVWreg)
  4489  		v.AddArg(x)
  4490  		return true
  4491  	}
  4492  	// match: (MOVHreg x:(MOVBUreg _))
  4493  	// cond:
  4494  	// result: (MOVWreg x)
  4495  	for {
  4496  		x := v.Args[0]
  4497  		if x.Op != OpMIPSMOVBUreg {
  4498  			break
  4499  		}
  4500  		v.reset(OpMIPSMOVWreg)
  4501  		v.AddArg(x)
  4502  		return true
  4503  	}
  4504  	// match: (MOVHreg x:(MOVHreg _))
  4505  	// cond:
  4506  	// result: (MOVWreg x)
  4507  	for {
  4508  		x := v.Args[0]
  4509  		if x.Op != OpMIPSMOVHreg {
  4510  			break
  4511  		}
  4512  		v.reset(OpMIPSMOVWreg)
  4513  		v.AddArg(x)
  4514  		return true
  4515  	}
  4516  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  4517  	// cond: x.Uses == 1 && clobber(x)
  4518  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  4519  	for {
  4520  		t := v.Type
  4521  		x := v.Args[0]
  4522  		if x.Op != OpMIPSMOVHUload {
  4523  			break
  4524  		}
  4525  		off := x.AuxInt
  4526  		sym := x.Aux
  4527  		_ = x.Args[1]
  4528  		ptr := x.Args[0]
  4529  		mem := x.Args[1]
  4530  		if !(x.Uses == 1 && clobber(x)) {
  4531  			break
  4532  		}
  4533  		b = x.Block
  4534  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, t)
  4535  		v.reset(OpCopy)
  4536  		v.AddArg(v0)
  4537  		v0.AuxInt = off
  4538  		v0.Aux = sym
  4539  		v0.AddArg(ptr)
  4540  		v0.AddArg(mem)
  4541  		return true
  4542  	}
  4543  	// match: (MOVHreg (ANDconst [c] x))
  4544  	// cond: c & 0x8000 == 0
  4545  	// result: (ANDconst [c&0x7fff] x)
  4546  	for {
  4547  		v_0 := v.Args[0]
  4548  		if v_0.Op != OpMIPSANDconst {
  4549  			break
  4550  		}
  4551  		c := v_0.AuxInt
  4552  		x := v_0.Args[0]
  4553  		if !(c&0x8000 == 0) {
  4554  			break
  4555  		}
  4556  		v.reset(OpMIPSANDconst)
  4557  		v.AuxInt = c & 0x7fff
  4558  		v.AddArg(x)
  4559  		return true
  4560  	}
  4561  	// match: (MOVHreg (MOVWconst [c]))
  4562  	// cond:
  4563  	// result: (MOVWconst [int64(int16(c))])
  4564  	for {
  4565  		v_0 := v.Args[0]
  4566  		if v_0.Op != OpMIPSMOVWconst {
  4567  			break
  4568  		}
  4569  		c := v_0.AuxInt
  4570  		v.reset(OpMIPSMOVWconst)
  4571  		v.AuxInt = int64(int16(c))
  4572  		return true
  4573  	}
  4574  	return false
  4575  }
  4576  func rewriteValueMIPS_OpMIPSMOVHstore_0(v *Value) bool {
  4577  	// match: (MOVHstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4578  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4579  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4580  	for {
  4581  		off1 := v.AuxInt
  4582  		sym := v.Aux
  4583  		_ = v.Args[2]
  4584  		x := v.Args[0]
  4585  		if x.Op != OpMIPSADDconst {
  4586  			break
  4587  		}
  4588  		off2 := x.AuxInt
  4589  		ptr := x.Args[0]
  4590  		val := v.Args[1]
  4591  		mem := v.Args[2]
  4592  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4593  			break
  4594  		}
  4595  		v.reset(OpMIPSMOVHstore)
  4596  		v.AuxInt = off1 + off2
  4597  		v.Aux = sym
  4598  		v.AddArg(ptr)
  4599  		v.AddArg(val)
  4600  		v.AddArg(mem)
  4601  		return true
  4602  	}
  4603  	// match: (MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4604  	// cond: canMergeSym(sym1,sym2)
  4605  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4606  	for {
  4607  		off1 := v.AuxInt
  4608  		sym1 := v.Aux
  4609  		_ = v.Args[2]
  4610  		v_0 := v.Args[0]
  4611  		if v_0.Op != OpMIPSMOVWaddr {
  4612  			break
  4613  		}
  4614  		off2 := v_0.AuxInt
  4615  		sym2 := v_0.Aux
  4616  		ptr := v_0.Args[0]
  4617  		val := v.Args[1]
  4618  		mem := v.Args[2]
  4619  		if !(canMergeSym(sym1, sym2)) {
  4620  			break
  4621  		}
  4622  		v.reset(OpMIPSMOVHstore)
  4623  		v.AuxInt = off1 + off2
  4624  		v.Aux = mergeSym(sym1, sym2)
  4625  		v.AddArg(ptr)
  4626  		v.AddArg(val)
  4627  		v.AddArg(mem)
  4628  		return true
  4629  	}
  4630  	// match: (MOVHstore [off] {sym} ptr (MOVWconst [0]) mem)
  4631  	// cond:
  4632  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4633  	for {
  4634  		off := v.AuxInt
  4635  		sym := v.Aux
  4636  		_ = v.Args[2]
  4637  		ptr := v.Args[0]
  4638  		v_1 := v.Args[1]
  4639  		if v_1.Op != OpMIPSMOVWconst {
  4640  			break
  4641  		}
  4642  		if v_1.AuxInt != 0 {
  4643  			break
  4644  		}
  4645  		mem := v.Args[2]
  4646  		v.reset(OpMIPSMOVHstorezero)
  4647  		v.AuxInt = off
  4648  		v.Aux = sym
  4649  		v.AddArg(ptr)
  4650  		v.AddArg(mem)
  4651  		return true
  4652  	}
  4653  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4654  	// cond:
  4655  	// result: (MOVHstore [off] {sym} ptr x mem)
  4656  	for {
  4657  		off := v.AuxInt
  4658  		sym := v.Aux
  4659  		_ = v.Args[2]
  4660  		ptr := v.Args[0]
  4661  		v_1 := v.Args[1]
  4662  		if v_1.Op != OpMIPSMOVHreg {
  4663  			break
  4664  		}
  4665  		x := v_1.Args[0]
  4666  		mem := v.Args[2]
  4667  		v.reset(OpMIPSMOVHstore)
  4668  		v.AuxInt = off
  4669  		v.Aux = sym
  4670  		v.AddArg(ptr)
  4671  		v.AddArg(x)
  4672  		v.AddArg(mem)
  4673  		return true
  4674  	}
  4675  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4676  	// cond:
  4677  	// result: (MOVHstore [off] {sym} ptr x mem)
  4678  	for {
  4679  		off := v.AuxInt
  4680  		sym := v.Aux
  4681  		_ = v.Args[2]
  4682  		ptr := v.Args[0]
  4683  		v_1 := v.Args[1]
  4684  		if v_1.Op != OpMIPSMOVHUreg {
  4685  			break
  4686  		}
  4687  		x := v_1.Args[0]
  4688  		mem := v.Args[2]
  4689  		v.reset(OpMIPSMOVHstore)
  4690  		v.AuxInt = off
  4691  		v.Aux = sym
  4692  		v.AddArg(ptr)
  4693  		v.AddArg(x)
  4694  		v.AddArg(mem)
  4695  		return true
  4696  	}
  4697  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4698  	// cond:
  4699  	// result: (MOVHstore [off] {sym} ptr x mem)
  4700  	for {
  4701  		off := v.AuxInt
  4702  		sym := v.Aux
  4703  		_ = v.Args[2]
  4704  		ptr := v.Args[0]
  4705  		v_1 := v.Args[1]
  4706  		if v_1.Op != OpMIPSMOVWreg {
  4707  			break
  4708  		}
  4709  		x := v_1.Args[0]
  4710  		mem := v.Args[2]
  4711  		v.reset(OpMIPSMOVHstore)
  4712  		v.AuxInt = off
  4713  		v.Aux = sym
  4714  		v.AddArg(ptr)
  4715  		v.AddArg(x)
  4716  		v.AddArg(mem)
  4717  		return true
  4718  	}
  4719  	return false
  4720  }
  4721  func rewriteValueMIPS_OpMIPSMOVHstorezero_0(v *Value) bool {
  4722  	// match: (MOVHstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4723  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4724  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4725  	for {
  4726  		off1 := v.AuxInt
  4727  		sym := v.Aux
  4728  		_ = v.Args[1]
  4729  		x := v.Args[0]
  4730  		if x.Op != OpMIPSADDconst {
  4731  			break
  4732  		}
  4733  		off2 := x.AuxInt
  4734  		ptr := x.Args[0]
  4735  		mem := v.Args[1]
  4736  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4737  			break
  4738  		}
  4739  		v.reset(OpMIPSMOVHstorezero)
  4740  		v.AuxInt = off1 + off2
  4741  		v.Aux = sym
  4742  		v.AddArg(ptr)
  4743  		v.AddArg(mem)
  4744  		return true
  4745  	}
  4746  	// match: (MOVHstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4747  	// cond: canMergeSym(sym1,sym2)
  4748  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4749  	for {
  4750  		off1 := v.AuxInt
  4751  		sym1 := v.Aux
  4752  		_ = v.Args[1]
  4753  		v_0 := v.Args[0]
  4754  		if v_0.Op != OpMIPSMOVWaddr {
  4755  			break
  4756  		}
  4757  		off2 := v_0.AuxInt
  4758  		sym2 := v_0.Aux
  4759  		ptr := v_0.Args[0]
  4760  		mem := v.Args[1]
  4761  		if !(canMergeSym(sym1, sym2)) {
  4762  			break
  4763  		}
  4764  		v.reset(OpMIPSMOVHstorezero)
  4765  		v.AuxInt = off1 + off2
  4766  		v.Aux = mergeSym(sym1, sym2)
  4767  		v.AddArg(ptr)
  4768  		v.AddArg(mem)
  4769  		return true
  4770  	}
  4771  	return false
  4772  }
  4773  func rewriteValueMIPS_OpMIPSMOVWload_0(v *Value) bool {
  4774  	// match: (MOVWload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4775  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4776  	// result: (MOVWload  [off1+off2] {sym} ptr mem)
  4777  	for {
  4778  		off1 := v.AuxInt
  4779  		sym := v.Aux
  4780  		_ = v.Args[1]
  4781  		x := v.Args[0]
  4782  		if x.Op != OpMIPSADDconst {
  4783  			break
  4784  		}
  4785  		off2 := x.AuxInt
  4786  		ptr := x.Args[0]
  4787  		mem := v.Args[1]
  4788  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4789  			break
  4790  		}
  4791  		v.reset(OpMIPSMOVWload)
  4792  		v.AuxInt = off1 + off2
  4793  		v.Aux = sym
  4794  		v.AddArg(ptr)
  4795  		v.AddArg(mem)
  4796  		return true
  4797  	}
  4798  	// match: (MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4799  	// cond: canMergeSym(sym1,sym2)
  4800  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4801  	for {
  4802  		off1 := v.AuxInt
  4803  		sym1 := v.Aux
  4804  		_ = v.Args[1]
  4805  		v_0 := v.Args[0]
  4806  		if v_0.Op != OpMIPSMOVWaddr {
  4807  			break
  4808  		}
  4809  		off2 := v_0.AuxInt
  4810  		sym2 := v_0.Aux
  4811  		ptr := v_0.Args[0]
  4812  		mem := v.Args[1]
  4813  		if !(canMergeSym(sym1, sym2)) {
  4814  			break
  4815  		}
  4816  		v.reset(OpMIPSMOVWload)
  4817  		v.AuxInt = off1 + off2
  4818  		v.Aux = mergeSym(sym1, sym2)
  4819  		v.AddArg(ptr)
  4820  		v.AddArg(mem)
  4821  		return true
  4822  	}
  4823  	// match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
  4824  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4825  	// result: x
  4826  	for {
  4827  		off := v.AuxInt
  4828  		sym := v.Aux
  4829  		_ = v.Args[1]
  4830  		ptr := v.Args[0]
  4831  		v_1 := v.Args[1]
  4832  		if v_1.Op != OpMIPSMOVWstore {
  4833  			break
  4834  		}
  4835  		off2 := v_1.AuxInt
  4836  		sym2 := v_1.Aux
  4837  		_ = v_1.Args[2]
  4838  		ptr2 := v_1.Args[0]
  4839  		x := v_1.Args[1]
  4840  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4841  			break
  4842  		}
  4843  		v.reset(OpCopy)
  4844  		v.Type = x.Type
  4845  		v.AddArg(x)
  4846  		return true
  4847  	}
  4848  	return false
  4849  }
  4850  func rewriteValueMIPS_OpMIPSMOVWreg_0(v *Value) bool {
  4851  	// match: (MOVWreg x)
  4852  	// cond: x.Uses == 1
  4853  	// result: (MOVWnop x)
  4854  	for {
  4855  		x := v.Args[0]
  4856  		if !(x.Uses == 1) {
  4857  			break
  4858  		}
  4859  		v.reset(OpMIPSMOVWnop)
  4860  		v.AddArg(x)
  4861  		return true
  4862  	}
  4863  	// match: (MOVWreg (MOVWconst [c]))
  4864  	// cond:
  4865  	// result: (MOVWconst [c])
  4866  	for {
  4867  		v_0 := v.Args[0]
  4868  		if v_0.Op != OpMIPSMOVWconst {
  4869  			break
  4870  		}
  4871  		c := v_0.AuxInt
  4872  		v.reset(OpMIPSMOVWconst)
  4873  		v.AuxInt = c
  4874  		return true
  4875  	}
  4876  	return false
  4877  }
  4878  func rewriteValueMIPS_OpMIPSMOVWstore_0(v *Value) bool {
  4879  	// match: (MOVWstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4880  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4881  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  4882  	for {
  4883  		off1 := v.AuxInt
  4884  		sym := v.Aux
  4885  		_ = v.Args[2]
  4886  		x := v.Args[0]
  4887  		if x.Op != OpMIPSADDconst {
  4888  			break
  4889  		}
  4890  		off2 := x.AuxInt
  4891  		ptr := x.Args[0]
  4892  		val := v.Args[1]
  4893  		mem := v.Args[2]
  4894  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4895  			break
  4896  		}
  4897  		v.reset(OpMIPSMOVWstore)
  4898  		v.AuxInt = off1 + off2
  4899  		v.Aux = sym
  4900  		v.AddArg(ptr)
  4901  		v.AddArg(val)
  4902  		v.AddArg(mem)
  4903  		return true
  4904  	}
  4905  	// match: (MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4906  	// cond: canMergeSym(sym1,sym2)
  4907  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4908  	for {
  4909  		off1 := v.AuxInt
  4910  		sym1 := v.Aux
  4911  		_ = v.Args[2]
  4912  		v_0 := v.Args[0]
  4913  		if v_0.Op != OpMIPSMOVWaddr {
  4914  			break
  4915  		}
  4916  		off2 := v_0.AuxInt
  4917  		sym2 := v_0.Aux
  4918  		ptr := v_0.Args[0]
  4919  		val := v.Args[1]
  4920  		mem := v.Args[2]
  4921  		if !(canMergeSym(sym1, sym2)) {
  4922  			break
  4923  		}
  4924  		v.reset(OpMIPSMOVWstore)
  4925  		v.AuxInt = off1 + off2
  4926  		v.Aux = mergeSym(sym1, sym2)
  4927  		v.AddArg(ptr)
  4928  		v.AddArg(val)
  4929  		v.AddArg(mem)
  4930  		return true
  4931  	}
  4932  	// match: (MOVWstore [off] {sym} ptr (MOVWconst [0]) mem)
  4933  	// cond:
  4934  	// result: (MOVWstorezero [off] {sym} ptr mem)
  4935  	for {
  4936  		off := v.AuxInt
  4937  		sym := v.Aux
  4938  		_ = v.Args[2]
  4939  		ptr := v.Args[0]
  4940  		v_1 := v.Args[1]
  4941  		if v_1.Op != OpMIPSMOVWconst {
  4942  			break
  4943  		}
  4944  		if v_1.AuxInt != 0 {
  4945  			break
  4946  		}
  4947  		mem := v.Args[2]
  4948  		v.reset(OpMIPSMOVWstorezero)
  4949  		v.AuxInt = off
  4950  		v.Aux = sym
  4951  		v.AddArg(ptr)
  4952  		v.AddArg(mem)
  4953  		return true
  4954  	}
  4955  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4956  	// cond:
  4957  	// result: (MOVWstore [off] {sym} ptr x mem)
  4958  	for {
  4959  		off := v.AuxInt
  4960  		sym := v.Aux
  4961  		_ = v.Args[2]
  4962  		ptr := v.Args[0]
  4963  		v_1 := v.Args[1]
  4964  		if v_1.Op != OpMIPSMOVWreg {
  4965  			break
  4966  		}
  4967  		x := v_1.Args[0]
  4968  		mem := v.Args[2]
  4969  		v.reset(OpMIPSMOVWstore)
  4970  		v.AuxInt = off
  4971  		v.Aux = sym
  4972  		v.AddArg(ptr)
  4973  		v.AddArg(x)
  4974  		v.AddArg(mem)
  4975  		return true
  4976  	}
  4977  	return false
  4978  }
  4979  func rewriteValueMIPS_OpMIPSMOVWstorezero_0(v *Value) bool {
  4980  	// match: (MOVWstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4981  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4982  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  4983  	for {
  4984  		off1 := v.AuxInt
  4985  		sym := v.Aux
  4986  		_ = v.Args[1]
  4987  		x := v.Args[0]
  4988  		if x.Op != OpMIPSADDconst {
  4989  			break
  4990  		}
  4991  		off2 := x.AuxInt
  4992  		ptr := x.Args[0]
  4993  		mem := v.Args[1]
  4994  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4995  			break
  4996  		}
  4997  		v.reset(OpMIPSMOVWstorezero)
  4998  		v.AuxInt = off1 + off2
  4999  		v.Aux = sym
  5000  		v.AddArg(ptr)
  5001  		v.AddArg(mem)
  5002  		return true
  5003  	}
  5004  	// match: (MOVWstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  5005  	// cond: canMergeSym(sym1,sym2)
  5006  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5007  	for {
  5008  		off1 := v.AuxInt
  5009  		sym1 := v.Aux
  5010  		_ = v.Args[1]
  5011  		v_0 := v.Args[0]
  5012  		if v_0.Op != OpMIPSMOVWaddr {
  5013  			break
  5014  		}
  5015  		off2 := v_0.AuxInt
  5016  		sym2 := v_0.Aux
  5017  		ptr := v_0.Args[0]
  5018  		mem := v.Args[1]
  5019  		if !(canMergeSym(sym1, sym2)) {
  5020  			break
  5021  		}
  5022  		v.reset(OpMIPSMOVWstorezero)
  5023  		v.AuxInt = off1 + off2
  5024  		v.Aux = mergeSym(sym1, sym2)
  5025  		v.AddArg(ptr)
  5026  		v.AddArg(mem)
  5027  		return true
  5028  	}
  5029  	return false
  5030  }
  5031  func rewriteValueMIPS_OpMIPSMUL_0(v *Value) bool {
  5032  	// match: (MUL (MOVWconst [0]) _)
  5033  	// cond:
  5034  	// result: (MOVWconst [0])
  5035  	for {
  5036  		_ = v.Args[1]
  5037  		v_0 := v.Args[0]
  5038  		if v_0.Op != OpMIPSMOVWconst {
  5039  			break
  5040  		}
  5041  		if v_0.AuxInt != 0 {
  5042  			break
  5043  		}
  5044  		v.reset(OpMIPSMOVWconst)
  5045  		v.AuxInt = 0
  5046  		return true
  5047  	}
  5048  	// match: (MUL _ (MOVWconst [0]))
  5049  	// cond:
  5050  	// result: (MOVWconst [0])
  5051  	for {
  5052  		_ = v.Args[1]
  5053  		v_1 := v.Args[1]
  5054  		if v_1.Op != OpMIPSMOVWconst {
  5055  			break
  5056  		}
  5057  		if v_1.AuxInt != 0 {
  5058  			break
  5059  		}
  5060  		v.reset(OpMIPSMOVWconst)
  5061  		v.AuxInt = 0
  5062  		return true
  5063  	}
  5064  	// match: (MUL (MOVWconst [1]) x)
  5065  	// cond:
  5066  	// result: x
  5067  	for {
  5068  		_ = v.Args[1]
  5069  		v_0 := v.Args[0]
  5070  		if v_0.Op != OpMIPSMOVWconst {
  5071  			break
  5072  		}
  5073  		if v_0.AuxInt != 1 {
  5074  			break
  5075  		}
  5076  		x := v.Args[1]
  5077  		v.reset(OpCopy)
  5078  		v.Type = x.Type
  5079  		v.AddArg(x)
  5080  		return true
  5081  	}
  5082  	// match: (MUL x (MOVWconst [1]))
  5083  	// cond:
  5084  	// result: x
  5085  	for {
  5086  		_ = v.Args[1]
  5087  		x := v.Args[0]
  5088  		v_1 := v.Args[1]
  5089  		if v_1.Op != OpMIPSMOVWconst {
  5090  			break
  5091  		}
  5092  		if v_1.AuxInt != 1 {
  5093  			break
  5094  		}
  5095  		v.reset(OpCopy)
  5096  		v.Type = x.Type
  5097  		v.AddArg(x)
  5098  		return true
  5099  	}
  5100  	// match: (MUL (MOVWconst [-1]) x)
  5101  	// cond:
  5102  	// result: (NEG x)
  5103  	for {
  5104  		_ = v.Args[1]
  5105  		v_0 := v.Args[0]
  5106  		if v_0.Op != OpMIPSMOVWconst {
  5107  			break
  5108  		}
  5109  		if v_0.AuxInt != -1 {
  5110  			break
  5111  		}
  5112  		x := v.Args[1]
  5113  		v.reset(OpMIPSNEG)
  5114  		v.AddArg(x)
  5115  		return true
  5116  	}
  5117  	// match: (MUL x (MOVWconst [-1]))
  5118  	// cond:
  5119  	// result: (NEG x)
  5120  	for {
  5121  		_ = v.Args[1]
  5122  		x := v.Args[0]
  5123  		v_1 := v.Args[1]
  5124  		if v_1.Op != OpMIPSMOVWconst {
  5125  			break
  5126  		}
  5127  		if v_1.AuxInt != -1 {
  5128  			break
  5129  		}
  5130  		v.reset(OpMIPSNEG)
  5131  		v.AddArg(x)
  5132  		return true
  5133  	}
  5134  	// match: (MUL (MOVWconst [c]) x)
  5135  	// cond: isPowerOfTwo(int64(uint32(c)))
  5136  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  5137  	for {
  5138  		_ = v.Args[1]
  5139  		v_0 := v.Args[0]
  5140  		if v_0.Op != OpMIPSMOVWconst {
  5141  			break
  5142  		}
  5143  		c := v_0.AuxInt
  5144  		x := v.Args[1]
  5145  		if !(isPowerOfTwo(int64(uint32(c)))) {
  5146  			break
  5147  		}
  5148  		v.reset(OpMIPSSLLconst)
  5149  		v.AuxInt = log2(int64(uint32(c)))
  5150  		v.AddArg(x)
  5151  		return true
  5152  	}
  5153  	// match: (MUL x (MOVWconst [c]))
  5154  	// cond: isPowerOfTwo(int64(uint32(c)))
  5155  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  5156  	for {
  5157  		_ = v.Args[1]
  5158  		x := v.Args[0]
  5159  		v_1 := v.Args[1]
  5160  		if v_1.Op != OpMIPSMOVWconst {
  5161  			break
  5162  		}
  5163  		c := v_1.AuxInt
  5164  		if !(isPowerOfTwo(int64(uint32(c)))) {
  5165  			break
  5166  		}
  5167  		v.reset(OpMIPSSLLconst)
  5168  		v.AuxInt = log2(int64(uint32(c)))
  5169  		v.AddArg(x)
  5170  		return true
  5171  	}
  5172  	// match: (MUL (MOVWconst [c]) (MOVWconst [d]))
  5173  	// cond:
  5174  	// result: (MOVWconst [int64(int32(c)*int32(d))])
  5175  	for {
  5176  		_ = v.Args[1]
  5177  		v_0 := v.Args[0]
  5178  		if v_0.Op != OpMIPSMOVWconst {
  5179  			break
  5180  		}
  5181  		c := v_0.AuxInt
  5182  		v_1 := v.Args[1]
  5183  		if v_1.Op != OpMIPSMOVWconst {
  5184  			break
  5185  		}
  5186  		d := v_1.AuxInt
  5187  		v.reset(OpMIPSMOVWconst)
  5188  		v.AuxInt = int64(int32(c) * int32(d))
  5189  		return true
  5190  	}
  5191  	// match: (MUL (MOVWconst [d]) (MOVWconst [c]))
  5192  	// cond:
  5193  	// result: (MOVWconst [int64(int32(c)*int32(d))])
  5194  	for {
  5195  		_ = v.Args[1]
  5196  		v_0 := v.Args[0]
  5197  		if v_0.Op != OpMIPSMOVWconst {
  5198  			break
  5199  		}
  5200  		d := v_0.AuxInt
  5201  		v_1 := v.Args[1]
  5202  		if v_1.Op != OpMIPSMOVWconst {
  5203  			break
  5204  		}
  5205  		c := v_1.AuxInt
  5206  		v.reset(OpMIPSMOVWconst)
  5207  		v.AuxInt = int64(int32(c) * int32(d))
  5208  		return true
  5209  	}
  5210  	return false
  5211  }
  5212  func rewriteValueMIPS_OpMIPSNEG_0(v *Value) bool {
  5213  	// match: (NEG (MOVWconst [c]))
  5214  	// cond:
  5215  	// result: (MOVWconst [int64(int32(-c))])
  5216  	for {
  5217  		v_0 := v.Args[0]
  5218  		if v_0.Op != OpMIPSMOVWconst {
  5219  			break
  5220  		}
  5221  		c := v_0.AuxInt
  5222  		v.reset(OpMIPSMOVWconst)
  5223  		v.AuxInt = int64(int32(-c))
  5224  		return true
  5225  	}
  5226  	return false
  5227  }
  5228  func rewriteValueMIPS_OpMIPSNOR_0(v *Value) bool {
  5229  	// match: (NOR x (MOVWconst [c]))
  5230  	// cond:
  5231  	// result: (NORconst [c] x)
  5232  	for {
  5233  		_ = v.Args[1]
  5234  		x := v.Args[0]
  5235  		v_1 := v.Args[1]
  5236  		if v_1.Op != OpMIPSMOVWconst {
  5237  			break
  5238  		}
  5239  		c := v_1.AuxInt
  5240  		v.reset(OpMIPSNORconst)
  5241  		v.AuxInt = c
  5242  		v.AddArg(x)
  5243  		return true
  5244  	}
  5245  	// match: (NOR (MOVWconst [c]) x)
  5246  	// cond:
  5247  	// result: (NORconst [c] x)
  5248  	for {
  5249  		_ = v.Args[1]
  5250  		v_0 := v.Args[0]
  5251  		if v_0.Op != OpMIPSMOVWconst {
  5252  			break
  5253  		}
  5254  		c := v_0.AuxInt
  5255  		x := v.Args[1]
  5256  		v.reset(OpMIPSNORconst)
  5257  		v.AuxInt = c
  5258  		v.AddArg(x)
  5259  		return true
  5260  	}
  5261  	return false
  5262  }
  5263  func rewriteValueMIPS_OpMIPSNORconst_0(v *Value) bool {
  5264  	// match: (NORconst [c] (MOVWconst [d]))
  5265  	// cond:
  5266  	// result: (MOVWconst [^(c|d)])
  5267  	for {
  5268  		c := v.AuxInt
  5269  		v_0 := v.Args[0]
  5270  		if v_0.Op != OpMIPSMOVWconst {
  5271  			break
  5272  		}
  5273  		d := v_0.AuxInt
  5274  		v.reset(OpMIPSMOVWconst)
  5275  		v.AuxInt = ^(c | d)
  5276  		return true
  5277  	}
  5278  	return false
  5279  }
  5280  func rewriteValueMIPS_OpMIPSOR_0(v *Value) bool {
  5281  	b := v.Block
  5282  	_ = b
  5283  	// match: (OR x (MOVWconst [c]))
  5284  	// cond:
  5285  	// result: (ORconst  [c] x)
  5286  	for {
  5287  		_ = v.Args[1]
  5288  		x := v.Args[0]
  5289  		v_1 := v.Args[1]
  5290  		if v_1.Op != OpMIPSMOVWconst {
  5291  			break
  5292  		}
  5293  		c := v_1.AuxInt
  5294  		v.reset(OpMIPSORconst)
  5295  		v.AuxInt = c
  5296  		v.AddArg(x)
  5297  		return true
  5298  	}
  5299  	// match: (OR (MOVWconst [c]) x)
  5300  	// cond:
  5301  	// result: (ORconst  [c] x)
  5302  	for {
  5303  		_ = v.Args[1]
  5304  		v_0 := v.Args[0]
  5305  		if v_0.Op != OpMIPSMOVWconst {
  5306  			break
  5307  		}
  5308  		c := v_0.AuxInt
  5309  		x := v.Args[1]
  5310  		v.reset(OpMIPSORconst)
  5311  		v.AuxInt = c
  5312  		v.AddArg(x)
  5313  		return true
  5314  	}
  5315  	// match: (OR x x)
  5316  	// cond:
  5317  	// result: x
  5318  	for {
  5319  		_ = v.Args[1]
  5320  		x := v.Args[0]
  5321  		if x != v.Args[1] {
  5322  			break
  5323  		}
  5324  		v.reset(OpCopy)
  5325  		v.Type = x.Type
  5326  		v.AddArg(x)
  5327  		return true
  5328  	}
  5329  	// match: (OR (SGTUzero x) (SGTUzero y))
  5330  	// cond:
  5331  	// result: (SGTUzero (OR <x.Type> x y))
  5332  	for {
  5333  		_ = v.Args[1]
  5334  		v_0 := v.Args[0]
  5335  		if v_0.Op != OpMIPSSGTUzero {
  5336  			break
  5337  		}
  5338  		x := v_0.Args[0]
  5339  		v_1 := v.Args[1]
  5340  		if v_1.Op != OpMIPSSGTUzero {
  5341  			break
  5342  		}
  5343  		y := v_1.Args[0]
  5344  		v.reset(OpMIPSSGTUzero)
  5345  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5346  		v0.AddArg(x)
  5347  		v0.AddArg(y)
  5348  		v.AddArg(v0)
  5349  		return true
  5350  	}
  5351  	// match: (OR (SGTUzero y) (SGTUzero x))
  5352  	// cond:
  5353  	// result: (SGTUzero (OR <x.Type> x y))
  5354  	for {
  5355  		_ = v.Args[1]
  5356  		v_0 := v.Args[0]
  5357  		if v_0.Op != OpMIPSSGTUzero {
  5358  			break
  5359  		}
  5360  		y := v_0.Args[0]
  5361  		v_1 := v.Args[1]
  5362  		if v_1.Op != OpMIPSSGTUzero {
  5363  			break
  5364  		}
  5365  		x := v_1.Args[0]
  5366  		v.reset(OpMIPSSGTUzero)
  5367  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5368  		v0.AddArg(x)
  5369  		v0.AddArg(y)
  5370  		v.AddArg(v0)
  5371  		return true
  5372  	}
  5373  	return false
  5374  }
  5375  func rewriteValueMIPS_OpMIPSORconst_0(v *Value) bool {
  5376  	// match: (ORconst [0] x)
  5377  	// cond:
  5378  	// result: x
  5379  	for {
  5380  		if v.AuxInt != 0 {
  5381  			break
  5382  		}
  5383  		x := v.Args[0]
  5384  		v.reset(OpCopy)
  5385  		v.Type = x.Type
  5386  		v.AddArg(x)
  5387  		return true
  5388  	}
  5389  	// match: (ORconst [-1] _)
  5390  	// cond:
  5391  	// result: (MOVWconst [-1])
  5392  	for {
  5393  		if v.AuxInt != -1 {
  5394  			break
  5395  		}
  5396  		v.reset(OpMIPSMOVWconst)
  5397  		v.AuxInt = -1
  5398  		return true
  5399  	}
  5400  	// match: (ORconst [c] (MOVWconst [d]))
  5401  	// cond:
  5402  	// result: (MOVWconst [c|d])
  5403  	for {
  5404  		c := v.AuxInt
  5405  		v_0 := v.Args[0]
  5406  		if v_0.Op != OpMIPSMOVWconst {
  5407  			break
  5408  		}
  5409  		d := v_0.AuxInt
  5410  		v.reset(OpMIPSMOVWconst)
  5411  		v.AuxInt = c | d
  5412  		return true
  5413  	}
  5414  	// match: (ORconst [c] (ORconst [d] x))
  5415  	// cond:
  5416  	// result: (ORconst [c|d] x)
  5417  	for {
  5418  		c := v.AuxInt
  5419  		v_0 := v.Args[0]
  5420  		if v_0.Op != OpMIPSORconst {
  5421  			break
  5422  		}
  5423  		d := v_0.AuxInt
  5424  		x := v_0.Args[0]
  5425  		v.reset(OpMIPSORconst)
  5426  		v.AuxInt = c | d
  5427  		v.AddArg(x)
  5428  		return true
  5429  	}
  5430  	return false
  5431  }
  5432  func rewriteValueMIPS_OpMIPSSGT_0(v *Value) bool {
  5433  	// match: (SGT (MOVWconst [c]) x)
  5434  	// cond:
  5435  	// result: (SGTconst  [c] x)
  5436  	for {
  5437  		_ = v.Args[1]
  5438  		v_0 := v.Args[0]
  5439  		if v_0.Op != OpMIPSMOVWconst {
  5440  			break
  5441  		}
  5442  		c := v_0.AuxInt
  5443  		x := v.Args[1]
  5444  		v.reset(OpMIPSSGTconst)
  5445  		v.AuxInt = c
  5446  		v.AddArg(x)
  5447  		return true
  5448  	}
  5449  	// match: (SGT x (MOVWconst [0]))
  5450  	// cond:
  5451  	// result: (SGTzero x)
  5452  	for {
  5453  		_ = v.Args[1]
  5454  		x := v.Args[0]
  5455  		v_1 := v.Args[1]
  5456  		if v_1.Op != OpMIPSMOVWconst {
  5457  			break
  5458  		}
  5459  		if v_1.AuxInt != 0 {
  5460  			break
  5461  		}
  5462  		v.reset(OpMIPSSGTzero)
  5463  		v.AddArg(x)
  5464  		return true
  5465  	}
  5466  	return false
  5467  }
  5468  func rewriteValueMIPS_OpMIPSSGTU_0(v *Value) bool {
  5469  	// match: (SGTU (MOVWconst [c]) x)
  5470  	// cond:
  5471  	// result: (SGTUconst [c] x)
  5472  	for {
  5473  		_ = v.Args[1]
  5474  		v_0 := v.Args[0]
  5475  		if v_0.Op != OpMIPSMOVWconst {
  5476  			break
  5477  		}
  5478  		c := v_0.AuxInt
  5479  		x := v.Args[1]
  5480  		v.reset(OpMIPSSGTUconst)
  5481  		v.AuxInt = c
  5482  		v.AddArg(x)
  5483  		return true
  5484  	}
  5485  	// match: (SGTU x (MOVWconst [0]))
  5486  	// cond:
  5487  	// result: (SGTUzero x)
  5488  	for {
  5489  		_ = v.Args[1]
  5490  		x := v.Args[0]
  5491  		v_1 := v.Args[1]
  5492  		if v_1.Op != OpMIPSMOVWconst {
  5493  			break
  5494  		}
  5495  		if v_1.AuxInt != 0 {
  5496  			break
  5497  		}
  5498  		v.reset(OpMIPSSGTUzero)
  5499  		v.AddArg(x)
  5500  		return true
  5501  	}
  5502  	return false
  5503  }
  5504  func rewriteValueMIPS_OpMIPSSGTUconst_0(v *Value) bool {
  5505  	// match: (SGTUconst [c] (MOVWconst [d]))
  5506  	// cond: uint32(c)>uint32(d)
  5507  	// result: (MOVWconst [1])
  5508  	for {
  5509  		c := v.AuxInt
  5510  		v_0 := v.Args[0]
  5511  		if v_0.Op != OpMIPSMOVWconst {
  5512  			break
  5513  		}
  5514  		d := v_0.AuxInt
  5515  		if !(uint32(c) > uint32(d)) {
  5516  			break
  5517  		}
  5518  		v.reset(OpMIPSMOVWconst)
  5519  		v.AuxInt = 1
  5520  		return true
  5521  	}
  5522  	// match: (SGTUconst [c] (MOVWconst [d]))
  5523  	// cond: uint32(c)<=uint32(d)
  5524  	// result: (MOVWconst [0])
  5525  	for {
  5526  		c := v.AuxInt
  5527  		v_0 := v.Args[0]
  5528  		if v_0.Op != OpMIPSMOVWconst {
  5529  			break
  5530  		}
  5531  		d := v_0.AuxInt
  5532  		if !(uint32(c) <= uint32(d)) {
  5533  			break
  5534  		}
  5535  		v.reset(OpMIPSMOVWconst)
  5536  		v.AuxInt = 0
  5537  		return true
  5538  	}
  5539  	// match: (SGTUconst [c] (MOVBUreg _))
  5540  	// cond: 0xff < uint32(c)
  5541  	// result: (MOVWconst [1])
  5542  	for {
  5543  		c := v.AuxInt
  5544  		v_0 := v.Args[0]
  5545  		if v_0.Op != OpMIPSMOVBUreg {
  5546  			break
  5547  		}
  5548  		if !(0xff < uint32(c)) {
  5549  			break
  5550  		}
  5551  		v.reset(OpMIPSMOVWconst)
  5552  		v.AuxInt = 1
  5553  		return true
  5554  	}
  5555  	// match: (SGTUconst [c] (MOVHUreg _))
  5556  	// cond: 0xffff < uint32(c)
  5557  	// result: (MOVWconst [1])
  5558  	for {
  5559  		c := v.AuxInt
  5560  		v_0 := v.Args[0]
  5561  		if v_0.Op != OpMIPSMOVHUreg {
  5562  			break
  5563  		}
  5564  		if !(0xffff < uint32(c)) {
  5565  			break
  5566  		}
  5567  		v.reset(OpMIPSMOVWconst)
  5568  		v.AuxInt = 1
  5569  		return true
  5570  	}
  5571  	// match: (SGTUconst [c] (ANDconst [m] _))
  5572  	// cond: uint32(m) < uint32(c)
  5573  	// result: (MOVWconst [1])
  5574  	for {
  5575  		c := v.AuxInt
  5576  		v_0 := v.Args[0]
  5577  		if v_0.Op != OpMIPSANDconst {
  5578  			break
  5579  		}
  5580  		m := v_0.AuxInt
  5581  		if !(uint32(m) < uint32(c)) {
  5582  			break
  5583  		}
  5584  		v.reset(OpMIPSMOVWconst)
  5585  		v.AuxInt = 1
  5586  		return true
  5587  	}
  5588  	// match: (SGTUconst [c] (SRLconst _ [d]))
  5589  	// cond: uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)
  5590  	// result: (MOVWconst [1])
  5591  	for {
  5592  		c := v.AuxInt
  5593  		v_0 := v.Args[0]
  5594  		if v_0.Op != OpMIPSSRLconst {
  5595  			break
  5596  		}
  5597  		d := v_0.AuxInt
  5598  		if !(uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)) {
  5599  			break
  5600  		}
  5601  		v.reset(OpMIPSMOVWconst)
  5602  		v.AuxInt = 1
  5603  		return true
  5604  	}
  5605  	return false
  5606  }
  5607  func rewriteValueMIPS_OpMIPSSGTUzero_0(v *Value) bool {
  5608  	// match: (SGTUzero (MOVWconst [d]))
  5609  	// cond: uint32(d) != 0
  5610  	// result: (MOVWconst [1])
  5611  	for {
  5612  		v_0 := v.Args[0]
  5613  		if v_0.Op != OpMIPSMOVWconst {
  5614  			break
  5615  		}
  5616  		d := v_0.AuxInt
  5617  		if !(uint32(d) != 0) {
  5618  			break
  5619  		}
  5620  		v.reset(OpMIPSMOVWconst)
  5621  		v.AuxInt = 1
  5622  		return true
  5623  	}
  5624  	// match: (SGTUzero (MOVWconst [d]))
  5625  	// cond: uint32(d) == 0
  5626  	// result: (MOVWconst [0])
  5627  	for {
  5628  		v_0 := v.Args[0]
  5629  		if v_0.Op != OpMIPSMOVWconst {
  5630  			break
  5631  		}
  5632  		d := v_0.AuxInt
  5633  		if !(uint32(d) == 0) {
  5634  			break
  5635  		}
  5636  		v.reset(OpMIPSMOVWconst)
  5637  		v.AuxInt = 0
  5638  		return true
  5639  	}
  5640  	return false
  5641  }
  5642  func rewriteValueMIPS_OpMIPSSGTconst_0(v *Value) bool {
  5643  	// match: (SGTconst [c] (MOVWconst [d]))
  5644  	// cond: int32(c) > int32(d)
  5645  	// result: (MOVWconst [1])
  5646  	for {
  5647  		c := v.AuxInt
  5648  		v_0 := v.Args[0]
  5649  		if v_0.Op != OpMIPSMOVWconst {
  5650  			break
  5651  		}
  5652  		d := v_0.AuxInt
  5653  		if !(int32(c) > int32(d)) {
  5654  			break
  5655  		}
  5656  		v.reset(OpMIPSMOVWconst)
  5657  		v.AuxInt = 1
  5658  		return true
  5659  	}
  5660  	// match: (SGTconst [c] (MOVWconst [d]))
  5661  	// cond: int32(c) <= int32(d)
  5662  	// result: (MOVWconst [0])
  5663  	for {
  5664  		c := v.AuxInt
  5665  		v_0 := v.Args[0]
  5666  		if v_0.Op != OpMIPSMOVWconst {
  5667  			break
  5668  		}
  5669  		d := v_0.AuxInt
  5670  		if !(int32(c) <= int32(d)) {
  5671  			break
  5672  		}
  5673  		v.reset(OpMIPSMOVWconst)
  5674  		v.AuxInt = 0
  5675  		return true
  5676  	}
  5677  	// match: (SGTconst [c] (MOVBreg _))
  5678  	// cond: 0x7f < int32(c)
  5679  	// result: (MOVWconst [1])
  5680  	for {
  5681  		c := v.AuxInt
  5682  		v_0 := v.Args[0]
  5683  		if v_0.Op != OpMIPSMOVBreg {
  5684  			break
  5685  		}
  5686  		if !(0x7f < int32(c)) {
  5687  			break
  5688  		}
  5689  		v.reset(OpMIPSMOVWconst)
  5690  		v.AuxInt = 1
  5691  		return true
  5692  	}
  5693  	// match: (SGTconst [c] (MOVBreg _))
  5694  	// cond: int32(c) <= -0x80
  5695  	// result: (MOVWconst [0])
  5696  	for {
  5697  		c := v.AuxInt
  5698  		v_0 := v.Args[0]
  5699  		if v_0.Op != OpMIPSMOVBreg {
  5700  			break
  5701  		}
  5702  		if !(int32(c) <= -0x80) {
  5703  			break
  5704  		}
  5705  		v.reset(OpMIPSMOVWconst)
  5706  		v.AuxInt = 0
  5707  		return true
  5708  	}
  5709  	// match: (SGTconst [c] (MOVBUreg _))
  5710  	// cond: 0xff < int32(c)
  5711  	// result: (MOVWconst [1])
  5712  	for {
  5713  		c := v.AuxInt
  5714  		v_0 := v.Args[0]
  5715  		if v_0.Op != OpMIPSMOVBUreg {
  5716  			break
  5717  		}
  5718  		if !(0xff < int32(c)) {
  5719  			break
  5720  		}
  5721  		v.reset(OpMIPSMOVWconst)
  5722  		v.AuxInt = 1
  5723  		return true
  5724  	}
  5725  	// match: (SGTconst [c] (MOVBUreg _))
  5726  	// cond: int32(c) < 0
  5727  	// result: (MOVWconst [0])
  5728  	for {
  5729  		c := v.AuxInt
  5730  		v_0 := v.Args[0]
  5731  		if v_0.Op != OpMIPSMOVBUreg {
  5732  			break
  5733  		}
  5734  		if !(int32(c) < 0) {
  5735  			break
  5736  		}
  5737  		v.reset(OpMIPSMOVWconst)
  5738  		v.AuxInt = 0
  5739  		return true
  5740  	}
  5741  	// match: (SGTconst [c] (MOVHreg _))
  5742  	// cond: 0x7fff < int32(c)
  5743  	// result: (MOVWconst [1])
  5744  	for {
  5745  		c := v.AuxInt
  5746  		v_0 := v.Args[0]
  5747  		if v_0.Op != OpMIPSMOVHreg {
  5748  			break
  5749  		}
  5750  		if !(0x7fff < int32(c)) {
  5751  			break
  5752  		}
  5753  		v.reset(OpMIPSMOVWconst)
  5754  		v.AuxInt = 1
  5755  		return true
  5756  	}
  5757  	// match: (SGTconst [c] (MOVHreg _))
  5758  	// cond: int32(c) <= -0x8000
  5759  	// result: (MOVWconst [0])
  5760  	for {
  5761  		c := v.AuxInt
  5762  		v_0 := v.Args[0]
  5763  		if v_0.Op != OpMIPSMOVHreg {
  5764  			break
  5765  		}
  5766  		if !(int32(c) <= -0x8000) {
  5767  			break
  5768  		}
  5769  		v.reset(OpMIPSMOVWconst)
  5770  		v.AuxInt = 0
  5771  		return true
  5772  	}
  5773  	// match: (SGTconst [c] (MOVHUreg _))
  5774  	// cond: 0xffff < int32(c)
  5775  	// result: (MOVWconst [1])
  5776  	for {
  5777  		c := v.AuxInt
  5778  		v_0 := v.Args[0]
  5779  		if v_0.Op != OpMIPSMOVHUreg {
  5780  			break
  5781  		}
  5782  		if !(0xffff < int32(c)) {
  5783  			break
  5784  		}
  5785  		v.reset(OpMIPSMOVWconst)
  5786  		v.AuxInt = 1
  5787  		return true
  5788  	}
  5789  	// match: (SGTconst [c] (MOVHUreg _))
  5790  	// cond: int32(c) < 0
  5791  	// result: (MOVWconst [0])
  5792  	for {
  5793  		c := v.AuxInt
  5794  		v_0 := v.Args[0]
  5795  		if v_0.Op != OpMIPSMOVHUreg {
  5796  			break
  5797  		}
  5798  		if !(int32(c) < 0) {
  5799  			break
  5800  		}
  5801  		v.reset(OpMIPSMOVWconst)
  5802  		v.AuxInt = 0
  5803  		return true
  5804  	}
  5805  	return false
  5806  }
  5807  func rewriteValueMIPS_OpMIPSSGTconst_10(v *Value) bool {
  5808  	// match: (SGTconst [c] (ANDconst [m] _))
  5809  	// cond: 0 <= int32(m) && int32(m) < int32(c)
  5810  	// result: (MOVWconst [1])
  5811  	for {
  5812  		c := v.AuxInt
  5813  		v_0 := v.Args[0]
  5814  		if v_0.Op != OpMIPSANDconst {
  5815  			break
  5816  		}
  5817  		m := v_0.AuxInt
  5818  		if !(0 <= int32(m) && int32(m) < int32(c)) {
  5819  			break
  5820  		}
  5821  		v.reset(OpMIPSMOVWconst)
  5822  		v.AuxInt = 1
  5823  		return true
  5824  	}
  5825  	// match: (SGTconst [c] (SRLconst _ [d]))
  5826  	// cond: 0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)
  5827  	// result: (MOVWconst [1])
  5828  	for {
  5829  		c := v.AuxInt
  5830  		v_0 := v.Args[0]
  5831  		if v_0.Op != OpMIPSSRLconst {
  5832  			break
  5833  		}
  5834  		d := v_0.AuxInt
  5835  		if !(0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)) {
  5836  			break
  5837  		}
  5838  		v.reset(OpMIPSMOVWconst)
  5839  		v.AuxInt = 1
  5840  		return true
  5841  	}
  5842  	return false
  5843  }
  5844  func rewriteValueMIPS_OpMIPSSGTzero_0(v *Value) bool {
  5845  	// match: (SGTzero (MOVWconst [d]))
  5846  	// cond: int32(d) > 0
  5847  	// result: (MOVWconst [1])
  5848  	for {
  5849  		v_0 := v.Args[0]
  5850  		if v_0.Op != OpMIPSMOVWconst {
  5851  			break
  5852  		}
  5853  		d := v_0.AuxInt
  5854  		if !(int32(d) > 0) {
  5855  			break
  5856  		}
  5857  		v.reset(OpMIPSMOVWconst)
  5858  		v.AuxInt = 1
  5859  		return true
  5860  	}
  5861  	// match: (SGTzero (MOVWconst [d]))
  5862  	// cond: int32(d) <= 0
  5863  	// result: (MOVWconst [0])
  5864  	for {
  5865  		v_0 := v.Args[0]
  5866  		if v_0.Op != OpMIPSMOVWconst {
  5867  			break
  5868  		}
  5869  		d := v_0.AuxInt
  5870  		if !(int32(d) <= 0) {
  5871  			break
  5872  		}
  5873  		v.reset(OpMIPSMOVWconst)
  5874  		v.AuxInt = 0
  5875  		return true
  5876  	}
  5877  	return false
  5878  }
  5879  func rewriteValueMIPS_OpMIPSSLL_0(v *Value) bool {
  5880  	// match: (SLL _ (MOVWconst [c]))
  5881  	// cond: uint32(c)>=32
  5882  	// result: (MOVWconst [0])
  5883  	for {
  5884  		_ = v.Args[1]
  5885  		v_1 := v.Args[1]
  5886  		if v_1.Op != OpMIPSMOVWconst {
  5887  			break
  5888  		}
  5889  		c := v_1.AuxInt
  5890  		if !(uint32(c) >= 32) {
  5891  			break
  5892  		}
  5893  		v.reset(OpMIPSMOVWconst)
  5894  		v.AuxInt = 0
  5895  		return true
  5896  	}
  5897  	// match: (SLL x (MOVWconst [c]))
  5898  	// cond:
  5899  	// result: (SLLconst x [c])
  5900  	for {
  5901  		_ = v.Args[1]
  5902  		x := v.Args[0]
  5903  		v_1 := v.Args[1]
  5904  		if v_1.Op != OpMIPSMOVWconst {
  5905  			break
  5906  		}
  5907  		c := v_1.AuxInt
  5908  		v.reset(OpMIPSSLLconst)
  5909  		v.AuxInt = c
  5910  		v.AddArg(x)
  5911  		return true
  5912  	}
  5913  	return false
  5914  }
  5915  func rewriteValueMIPS_OpMIPSSLLconst_0(v *Value) bool {
  5916  	// match: (SLLconst [c] (MOVWconst [d]))
  5917  	// cond:
  5918  	// result: (MOVWconst [int64(int32(uint32(d)<<uint32(c)))])
  5919  	for {
  5920  		c := v.AuxInt
  5921  		v_0 := v.Args[0]
  5922  		if v_0.Op != OpMIPSMOVWconst {
  5923  			break
  5924  		}
  5925  		d := v_0.AuxInt
  5926  		v.reset(OpMIPSMOVWconst)
  5927  		v.AuxInt = int64(int32(uint32(d) << uint32(c)))
  5928  		return true
  5929  	}
  5930  	return false
  5931  }
  5932  func rewriteValueMIPS_OpMIPSSRA_0(v *Value) bool {
  5933  	// match: (SRA x (MOVWconst [c]))
  5934  	// cond: uint32(c)>=32
  5935  	// result: (SRAconst x [31])
  5936  	for {
  5937  		_ = v.Args[1]
  5938  		x := v.Args[0]
  5939  		v_1 := v.Args[1]
  5940  		if v_1.Op != OpMIPSMOVWconst {
  5941  			break
  5942  		}
  5943  		c := v_1.AuxInt
  5944  		if !(uint32(c) >= 32) {
  5945  			break
  5946  		}
  5947  		v.reset(OpMIPSSRAconst)
  5948  		v.AuxInt = 31
  5949  		v.AddArg(x)
  5950  		return true
  5951  	}
  5952  	// match: (SRA x (MOVWconst [c]))
  5953  	// cond:
  5954  	// result: (SRAconst x [c])
  5955  	for {
  5956  		_ = v.Args[1]
  5957  		x := v.Args[0]
  5958  		v_1 := v.Args[1]
  5959  		if v_1.Op != OpMIPSMOVWconst {
  5960  			break
  5961  		}
  5962  		c := v_1.AuxInt
  5963  		v.reset(OpMIPSSRAconst)
  5964  		v.AuxInt = c
  5965  		v.AddArg(x)
  5966  		return true
  5967  	}
  5968  	return false
  5969  }
  5970  func rewriteValueMIPS_OpMIPSSRAconst_0(v *Value) bool {
  5971  	// match: (SRAconst [c] (MOVWconst [d]))
  5972  	// cond:
  5973  	// result: (MOVWconst [int64(int32(d)>>uint32(c))])
  5974  	for {
  5975  		c := v.AuxInt
  5976  		v_0 := v.Args[0]
  5977  		if v_0.Op != OpMIPSMOVWconst {
  5978  			break
  5979  		}
  5980  		d := v_0.AuxInt
  5981  		v.reset(OpMIPSMOVWconst)
  5982  		v.AuxInt = int64(int32(d) >> uint32(c))
  5983  		return true
  5984  	}
  5985  	return false
  5986  }
  5987  func rewriteValueMIPS_OpMIPSSRL_0(v *Value) bool {
  5988  	// match: (SRL _ (MOVWconst [c]))
  5989  	// cond: uint32(c)>=32
  5990  	// result: (MOVWconst [0])
  5991  	for {
  5992  		_ = v.Args[1]
  5993  		v_1 := v.Args[1]
  5994  		if v_1.Op != OpMIPSMOVWconst {
  5995  			break
  5996  		}
  5997  		c := v_1.AuxInt
  5998  		if !(uint32(c) >= 32) {
  5999  			break
  6000  		}
  6001  		v.reset(OpMIPSMOVWconst)
  6002  		v.AuxInt = 0
  6003  		return true
  6004  	}
  6005  	// match: (SRL x (MOVWconst [c]))
  6006  	// cond:
  6007  	// result: (SRLconst x [c])
  6008  	for {
  6009  		_ = v.Args[1]
  6010  		x := v.Args[0]
  6011  		v_1 := v.Args[1]
  6012  		if v_1.Op != OpMIPSMOVWconst {
  6013  			break
  6014  		}
  6015  		c := v_1.AuxInt
  6016  		v.reset(OpMIPSSRLconst)
  6017  		v.AuxInt = c
  6018  		v.AddArg(x)
  6019  		return true
  6020  	}
  6021  	return false
  6022  }
  6023  func rewriteValueMIPS_OpMIPSSRLconst_0(v *Value) bool {
  6024  	// match: (SRLconst [c] (MOVWconst [d]))
  6025  	// cond:
  6026  	// result: (MOVWconst [int64(uint32(d)>>uint32(c))])
  6027  	for {
  6028  		c := v.AuxInt
  6029  		v_0 := v.Args[0]
  6030  		if v_0.Op != OpMIPSMOVWconst {
  6031  			break
  6032  		}
  6033  		d := v_0.AuxInt
  6034  		v.reset(OpMIPSMOVWconst)
  6035  		v.AuxInt = int64(uint32(d) >> uint32(c))
  6036  		return true
  6037  	}
  6038  	return false
  6039  }
  6040  func rewriteValueMIPS_OpMIPSSUB_0(v *Value) bool {
  6041  	// match: (SUB x (MOVWconst [c]))
  6042  	// cond:
  6043  	// result: (SUBconst [c] x)
  6044  	for {
  6045  		_ = v.Args[1]
  6046  		x := v.Args[0]
  6047  		v_1 := v.Args[1]
  6048  		if v_1.Op != OpMIPSMOVWconst {
  6049  			break
  6050  		}
  6051  		c := v_1.AuxInt
  6052  		v.reset(OpMIPSSUBconst)
  6053  		v.AuxInt = c
  6054  		v.AddArg(x)
  6055  		return true
  6056  	}
  6057  	// match: (SUB x x)
  6058  	// cond:
  6059  	// result: (MOVWconst [0])
  6060  	for {
  6061  		_ = v.Args[1]
  6062  		x := v.Args[0]
  6063  		if x != v.Args[1] {
  6064  			break
  6065  		}
  6066  		v.reset(OpMIPSMOVWconst)
  6067  		v.AuxInt = 0
  6068  		return true
  6069  	}
  6070  	// match: (SUB (MOVWconst [0]) x)
  6071  	// cond:
  6072  	// result: (NEG x)
  6073  	for {
  6074  		_ = v.Args[1]
  6075  		v_0 := v.Args[0]
  6076  		if v_0.Op != OpMIPSMOVWconst {
  6077  			break
  6078  		}
  6079  		if v_0.AuxInt != 0 {
  6080  			break
  6081  		}
  6082  		x := v.Args[1]
  6083  		v.reset(OpMIPSNEG)
  6084  		v.AddArg(x)
  6085  		return true
  6086  	}
  6087  	return false
  6088  }
  6089  func rewriteValueMIPS_OpMIPSSUBconst_0(v *Value) bool {
  6090  	// match: (SUBconst [0] x)
  6091  	// cond:
  6092  	// result: x
  6093  	for {
  6094  		if v.AuxInt != 0 {
  6095  			break
  6096  		}
  6097  		x := v.Args[0]
  6098  		v.reset(OpCopy)
  6099  		v.Type = x.Type
  6100  		v.AddArg(x)
  6101  		return true
  6102  	}
  6103  	// match: (SUBconst [c] (MOVWconst [d]))
  6104  	// cond:
  6105  	// result: (MOVWconst [int64(int32(d-c))])
  6106  	for {
  6107  		c := v.AuxInt
  6108  		v_0 := v.Args[0]
  6109  		if v_0.Op != OpMIPSMOVWconst {
  6110  			break
  6111  		}
  6112  		d := v_0.AuxInt
  6113  		v.reset(OpMIPSMOVWconst)
  6114  		v.AuxInt = int64(int32(d - c))
  6115  		return true
  6116  	}
  6117  	// match: (SUBconst [c] (SUBconst [d] x))
  6118  	// cond:
  6119  	// result: (ADDconst [int64(int32(-c-d))] x)
  6120  	for {
  6121  		c := v.AuxInt
  6122  		v_0 := v.Args[0]
  6123  		if v_0.Op != OpMIPSSUBconst {
  6124  			break
  6125  		}
  6126  		d := v_0.AuxInt
  6127  		x := v_0.Args[0]
  6128  		v.reset(OpMIPSADDconst)
  6129  		v.AuxInt = int64(int32(-c - d))
  6130  		v.AddArg(x)
  6131  		return true
  6132  	}
  6133  	// match: (SUBconst [c] (ADDconst [d] x))
  6134  	// cond:
  6135  	// result: (ADDconst [int64(int32(-c+d))] x)
  6136  	for {
  6137  		c := v.AuxInt
  6138  		v_0 := v.Args[0]
  6139  		if v_0.Op != OpMIPSADDconst {
  6140  			break
  6141  		}
  6142  		d := v_0.AuxInt
  6143  		x := v_0.Args[0]
  6144  		v.reset(OpMIPSADDconst)
  6145  		v.AuxInt = int64(int32(-c + d))
  6146  		v.AddArg(x)
  6147  		return true
  6148  	}
  6149  	return false
  6150  }
  6151  func rewriteValueMIPS_OpMIPSXOR_0(v *Value) bool {
  6152  	// match: (XOR x (MOVWconst [c]))
  6153  	// cond:
  6154  	// result: (XORconst [c] x)
  6155  	for {
  6156  		_ = v.Args[1]
  6157  		x := v.Args[0]
  6158  		v_1 := v.Args[1]
  6159  		if v_1.Op != OpMIPSMOVWconst {
  6160  			break
  6161  		}
  6162  		c := v_1.AuxInt
  6163  		v.reset(OpMIPSXORconst)
  6164  		v.AuxInt = c
  6165  		v.AddArg(x)
  6166  		return true
  6167  	}
  6168  	// match: (XOR (MOVWconst [c]) x)
  6169  	// cond:
  6170  	// result: (XORconst [c] x)
  6171  	for {
  6172  		_ = v.Args[1]
  6173  		v_0 := v.Args[0]
  6174  		if v_0.Op != OpMIPSMOVWconst {
  6175  			break
  6176  		}
  6177  		c := v_0.AuxInt
  6178  		x := v.Args[1]
  6179  		v.reset(OpMIPSXORconst)
  6180  		v.AuxInt = c
  6181  		v.AddArg(x)
  6182  		return true
  6183  	}
  6184  	// match: (XOR x x)
  6185  	// cond:
  6186  	// result: (MOVWconst [0])
  6187  	for {
  6188  		_ = v.Args[1]
  6189  		x := v.Args[0]
  6190  		if x != v.Args[1] {
  6191  			break
  6192  		}
  6193  		v.reset(OpMIPSMOVWconst)
  6194  		v.AuxInt = 0
  6195  		return true
  6196  	}
  6197  	return false
  6198  }
  6199  func rewriteValueMIPS_OpMIPSXORconst_0(v *Value) bool {
  6200  	// match: (XORconst [0] x)
  6201  	// cond:
  6202  	// result: x
  6203  	for {
  6204  		if v.AuxInt != 0 {
  6205  			break
  6206  		}
  6207  		x := v.Args[0]
  6208  		v.reset(OpCopy)
  6209  		v.Type = x.Type
  6210  		v.AddArg(x)
  6211  		return true
  6212  	}
  6213  	// match: (XORconst [-1] x)
  6214  	// cond:
  6215  	// result: (NORconst [0] x)
  6216  	for {
  6217  		if v.AuxInt != -1 {
  6218  			break
  6219  		}
  6220  		x := v.Args[0]
  6221  		v.reset(OpMIPSNORconst)
  6222  		v.AuxInt = 0
  6223  		v.AddArg(x)
  6224  		return true
  6225  	}
  6226  	// match: (XORconst [c] (MOVWconst [d]))
  6227  	// cond:
  6228  	// result: (MOVWconst [c^d])
  6229  	for {
  6230  		c := v.AuxInt
  6231  		v_0 := v.Args[0]
  6232  		if v_0.Op != OpMIPSMOVWconst {
  6233  			break
  6234  		}
  6235  		d := v_0.AuxInt
  6236  		v.reset(OpMIPSMOVWconst)
  6237  		v.AuxInt = c ^ d
  6238  		return true
  6239  	}
  6240  	// match: (XORconst [c] (XORconst [d] x))
  6241  	// cond:
  6242  	// result: (XORconst [c^d] x)
  6243  	for {
  6244  		c := v.AuxInt
  6245  		v_0 := v.Args[0]
  6246  		if v_0.Op != OpMIPSXORconst {
  6247  			break
  6248  		}
  6249  		d := v_0.AuxInt
  6250  		x := v_0.Args[0]
  6251  		v.reset(OpMIPSXORconst)
  6252  		v.AuxInt = c ^ d
  6253  		v.AddArg(x)
  6254  		return true
  6255  	}
  6256  	return false
  6257  }
  6258  func rewriteValueMIPS_OpMod16_0(v *Value) bool {
  6259  	b := v.Block
  6260  	_ = b
  6261  	typ := &b.Func.Config.Types
  6262  	_ = typ
  6263  	// match: (Mod16 x y)
  6264  	// cond:
  6265  	// result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  6266  	for {
  6267  		_ = v.Args[1]
  6268  		x := v.Args[0]
  6269  		y := v.Args[1]
  6270  		v.reset(OpSelect0)
  6271  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6272  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  6273  		v1.AddArg(x)
  6274  		v0.AddArg(v1)
  6275  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  6276  		v2.AddArg(y)
  6277  		v0.AddArg(v2)
  6278  		v.AddArg(v0)
  6279  		return true
  6280  	}
  6281  }
  6282  func rewriteValueMIPS_OpMod16u_0(v *Value) bool {
  6283  	b := v.Block
  6284  	_ = b
  6285  	typ := &b.Func.Config.Types
  6286  	_ = typ
  6287  	// match: (Mod16u x y)
  6288  	// cond:
  6289  	// result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  6290  	for {
  6291  		_ = v.Args[1]
  6292  		x := v.Args[0]
  6293  		y := v.Args[1]
  6294  		v.reset(OpSelect0)
  6295  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6296  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6297  		v1.AddArg(x)
  6298  		v0.AddArg(v1)
  6299  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6300  		v2.AddArg(y)
  6301  		v0.AddArg(v2)
  6302  		v.AddArg(v0)
  6303  		return true
  6304  	}
  6305  }
  6306  func rewriteValueMIPS_OpMod32_0(v *Value) bool {
  6307  	b := v.Block
  6308  	_ = b
  6309  	typ := &b.Func.Config.Types
  6310  	_ = typ
  6311  	// match: (Mod32 x y)
  6312  	// cond:
  6313  	// result: (Select0 (DIV x y))
  6314  	for {
  6315  		_ = v.Args[1]
  6316  		x := v.Args[0]
  6317  		y := v.Args[1]
  6318  		v.reset(OpSelect0)
  6319  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6320  		v0.AddArg(x)
  6321  		v0.AddArg(y)
  6322  		v.AddArg(v0)
  6323  		return true
  6324  	}
  6325  }
  6326  func rewriteValueMIPS_OpMod32u_0(v *Value) bool {
  6327  	b := v.Block
  6328  	_ = b
  6329  	typ := &b.Func.Config.Types
  6330  	_ = typ
  6331  	// match: (Mod32u x y)
  6332  	// cond:
  6333  	// result: (Select0 (DIVU x y))
  6334  	for {
  6335  		_ = v.Args[1]
  6336  		x := v.Args[0]
  6337  		y := v.Args[1]
  6338  		v.reset(OpSelect0)
  6339  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6340  		v0.AddArg(x)
  6341  		v0.AddArg(y)
  6342  		v.AddArg(v0)
  6343  		return true
  6344  	}
  6345  }
  6346  func rewriteValueMIPS_OpMod8_0(v *Value) bool {
  6347  	b := v.Block
  6348  	_ = b
  6349  	typ := &b.Func.Config.Types
  6350  	_ = typ
  6351  	// match: (Mod8 x y)
  6352  	// cond:
  6353  	// result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  6354  	for {
  6355  		_ = v.Args[1]
  6356  		x := v.Args[0]
  6357  		y := v.Args[1]
  6358  		v.reset(OpSelect0)
  6359  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6360  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  6361  		v1.AddArg(x)
  6362  		v0.AddArg(v1)
  6363  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  6364  		v2.AddArg(y)
  6365  		v0.AddArg(v2)
  6366  		v.AddArg(v0)
  6367  		return true
  6368  	}
  6369  }
  6370  func rewriteValueMIPS_OpMod8u_0(v *Value) bool {
  6371  	b := v.Block
  6372  	_ = b
  6373  	typ := &b.Func.Config.Types
  6374  	_ = typ
  6375  	// match: (Mod8u x y)
  6376  	// cond:
  6377  	// result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  6378  	for {
  6379  		_ = v.Args[1]
  6380  		x := v.Args[0]
  6381  		y := v.Args[1]
  6382  		v.reset(OpSelect0)
  6383  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6384  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  6385  		v1.AddArg(x)
  6386  		v0.AddArg(v1)
  6387  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  6388  		v2.AddArg(y)
  6389  		v0.AddArg(v2)
  6390  		v.AddArg(v0)
  6391  		return true
  6392  	}
  6393  }
  6394  func rewriteValueMIPS_OpMove_0(v *Value) bool {
  6395  	b := v.Block
  6396  	_ = b
  6397  	typ := &b.Func.Config.Types
  6398  	_ = typ
  6399  	// match: (Move [0] _ _ mem)
  6400  	// cond:
  6401  	// result: mem
  6402  	for {
  6403  		if v.AuxInt != 0 {
  6404  			break
  6405  		}
  6406  		_ = v.Args[2]
  6407  		mem := v.Args[2]
  6408  		v.reset(OpCopy)
  6409  		v.Type = mem.Type
  6410  		v.AddArg(mem)
  6411  		return true
  6412  	}
  6413  	// match: (Move [1] dst src mem)
  6414  	// cond:
  6415  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  6416  	for {
  6417  		if v.AuxInt != 1 {
  6418  			break
  6419  		}
  6420  		_ = v.Args[2]
  6421  		dst := v.Args[0]
  6422  		src := v.Args[1]
  6423  		mem := v.Args[2]
  6424  		v.reset(OpMIPSMOVBstore)
  6425  		v.AddArg(dst)
  6426  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6427  		v0.AddArg(src)
  6428  		v0.AddArg(mem)
  6429  		v.AddArg(v0)
  6430  		v.AddArg(mem)
  6431  		return true
  6432  	}
  6433  	// match: (Move [2] {t} dst src mem)
  6434  	// cond: t.(*types.Type).Alignment()%2 == 0
  6435  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  6436  	for {
  6437  		if v.AuxInt != 2 {
  6438  			break
  6439  		}
  6440  		t := v.Aux
  6441  		_ = v.Args[2]
  6442  		dst := v.Args[0]
  6443  		src := v.Args[1]
  6444  		mem := v.Args[2]
  6445  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6446  			break
  6447  		}
  6448  		v.reset(OpMIPSMOVHstore)
  6449  		v.AddArg(dst)
  6450  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6451  		v0.AddArg(src)
  6452  		v0.AddArg(mem)
  6453  		v.AddArg(v0)
  6454  		v.AddArg(mem)
  6455  		return true
  6456  	}
  6457  	// match: (Move [2] dst src mem)
  6458  	// cond:
  6459  	// result: (MOVBstore [1] dst (MOVBUload [1] src mem) 		(MOVBstore dst (MOVBUload src mem) mem))
  6460  	for {
  6461  		if v.AuxInt != 2 {
  6462  			break
  6463  		}
  6464  		_ = v.Args[2]
  6465  		dst := v.Args[0]
  6466  		src := v.Args[1]
  6467  		mem := v.Args[2]
  6468  		v.reset(OpMIPSMOVBstore)
  6469  		v.AuxInt = 1
  6470  		v.AddArg(dst)
  6471  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6472  		v0.AuxInt = 1
  6473  		v0.AddArg(src)
  6474  		v0.AddArg(mem)
  6475  		v.AddArg(v0)
  6476  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6477  		v1.AddArg(dst)
  6478  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6479  		v2.AddArg(src)
  6480  		v2.AddArg(mem)
  6481  		v1.AddArg(v2)
  6482  		v1.AddArg(mem)
  6483  		v.AddArg(v1)
  6484  		return true
  6485  	}
  6486  	// match: (Move [4] {t} dst src mem)
  6487  	// cond: t.(*types.Type).Alignment()%4 == 0
  6488  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6489  	for {
  6490  		if v.AuxInt != 4 {
  6491  			break
  6492  		}
  6493  		t := v.Aux
  6494  		_ = v.Args[2]
  6495  		dst := v.Args[0]
  6496  		src := v.Args[1]
  6497  		mem := v.Args[2]
  6498  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6499  			break
  6500  		}
  6501  		v.reset(OpMIPSMOVWstore)
  6502  		v.AddArg(dst)
  6503  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6504  		v0.AddArg(src)
  6505  		v0.AddArg(mem)
  6506  		v.AddArg(v0)
  6507  		v.AddArg(mem)
  6508  		return true
  6509  	}
  6510  	// match: (Move [4] {t} dst src mem)
  6511  	// cond: t.(*types.Type).Alignment()%2 == 0
  6512  	// result: (MOVHstore [2] dst (MOVHUload [2] src mem) 		(MOVHstore dst (MOVHUload src mem) mem))
  6513  	for {
  6514  		if v.AuxInt != 4 {
  6515  			break
  6516  		}
  6517  		t := v.Aux
  6518  		_ = v.Args[2]
  6519  		dst := v.Args[0]
  6520  		src := v.Args[1]
  6521  		mem := v.Args[2]
  6522  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6523  			break
  6524  		}
  6525  		v.reset(OpMIPSMOVHstore)
  6526  		v.AuxInt = 2
  6527  		v.AddArg(dst)
  6528  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6529  		v0.AuxInt = 2
  6530  		v0.AddArg(src)
  6531  		v0.AddArg(mem)
  6532  		v.AddArg(v0)
  6533  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6534  		v1.AddArg(dst)
  6535  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6536  		v2.AddArg(src)
  6537  		v2.AddArg(mem)
  6538  		v1.AddArg(v2)
  6539  		v1.AddArg(mem)
  6540  		v.AddArg(v1)
  6541  		return true
  6542  	}
  6543  	// match: (Move [4] dst src mem)
  6544  	// cond:
  6545  	// result: (MOVBstore [3] dst (MOVBUload [3] src mem) 		(MOVBstore [2] dst (MOVBUload [2] src mem) 			(MOVBstore [1] dst (MOVBUload [1] src mem) 				(MOVBstore dst (MOVBUload src mem) mem))))
  6546  	for {
  6547  		if v.AuxInt != 4 {
  6548  			break
  6549  		}
  6550  		_ = v.Args[2]
  6551  		dst := v.Args[0]
  6552  		src := v.Args[1]
  6553  		mem := v.Args[2]
  6554  		v.reset(OpMIPSMOVBstore)
  6555  		v.AuxInt = 3
  6556  		v.AddArg(dst)
  6557  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6558  		v0.AuxInt = 3
  6559  		v0.AddArg(src)
  6560  		v0.AddArg(mem)
  6561  		v.AddArg(v0)
  6562  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6563  		v1.AuxInt = 2
  6564  		v1.AddArg(dst)
  6565  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6566  		v2.AuxInt = 2
  6567  		v2.AddArg(src)
  6568  		v2.AddArg(mem)
  6569  		v1.AddArg(v2)
  6570  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6571  		v3.AuxInt = 1
  6572  		v3.AddArg(dst)
  6573  		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6574  		v4.AuxInt = 1
  6575  		v4.AddArg(src)
  6576  		v4.AddArg(mem)
  6577  		v3.AddArg(v4)
  6578  		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6579  		v5.AddArg(dst)
  6580  		v6 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6581  		v6.AddArg(src)
  6582  		v6.AddArg(mem)
  6583  		v5.AddArg(v6)
  6584  		v5.AddArg(mem)
  6585  		v3.AddArg(v5)
  6586  		v1.AddArg(v3)
  6587  		v.AddArg(v1)
  6588  		return true
  6589  	}
  6590  	// match: (Move [3] dst src mem)
  6591  	// cond:
  6592  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) 		(MOVBstore [1] dst (MOVBUload [1] src mem) 			(MOVBstore dst (MOVBUload src mem) mem)))
  6593  	for {
  6594  		if v.AuxInt != 3 {
  6595  			break
  6596  		}
  6597  		_ = v.Args[2]
  6598  		dst := v.Args[0]
  6599  		src := v.Args[1]
  6600  		mem := v.Args[2]
  6601  		v.reset(OpMIPSMOVBstore)
  6602  		v.AuxInt = 2
  6603  		v.AddArg(dst)
  6604  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6605  		v0.AuxInt = 2
  6606  		v0.AddArg(src)
  6607  		v0.AddArg(mem)
  6608  		v.AddArg(v0)
  6609  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6610  		v1.AuxInt = 1
  6611  		v1.AddArg(dst)
  6612  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6613  		v2.AuxInt = 1
  6614  		v2.AddArg(src)
  6615  		v2.AddArg(mem)
  6616  		v1.AddArg(v2)
  6617  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6618  		v3.AddArg(dst)
  6619  		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6620  		v4.AddArg(src)
  6621  		v4.AddArg(mem)
  6622  		v3.AddArg(v4)
  6623  		v3.AddArg(mem)
  6624  		v1.AddArg(v3)
  6625  		v.AddArg(v1)
  6626  		return true
  6627  	}
  6628  	// match: (Move [8] {t} dst src mem)
  6629  	// cond: t.(*types.Type).Alignment()%4 == 0
  6630  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) 		(MOVWstore dst (MOVWload src mem) mem))
  6631  	for {
  6632  		if v.AuxInt != 8 {
  6633  			break
  6634  		}
  6635  		t := v.Aux
  6636  		_ = v.Args[2]
  6637  		dst := v.Args[0]
  6638  		src := v.Args[1]
  6639  		mem := v.Args[2]
  6640  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6641  			break
  6642  		}
  6643  		v.reset(OpMIPSMOVWstore)
  6644  		v.AuxInt = 4
  6645  		v.AddArg(dst)
  6646  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6647  		v0.AuxInt = 4
  6648  		v0.AddArg(src)
  6649  		v0.AddArg(mem)
  6650  		v.AddArg(v0)
  6651  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6652  		v1.AddArg(dst)
  6653  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6654  		v2.AddArg(src)
  6655  		v2.AddArg(mem)
  6656  		v1.AddArg(v2)
  6657  		v1.AddArg(mem)
  6658  		v.AddArg(v1)
  6659  		return true
  6660  	}
  6661  	// match: (Move [8] {t} dst src mem)
  6662  	// cond: t.(*types.Type).Alignment()%2 == 0
  6663  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) 		(MOVHstore [4] dst (MOVHload [4] src mem) 			(MOVHstore [2] dst (MOVHload [2] src mem) 				(MOVHstore dst (MOVHload src mem) mem))))
  6664  	for {
  6665  		if v.AuxInt != 8 {
  6666  			break
  6667  		}
  6668  		t := v.Aux
  6669  		_ = v.Args[2]
  6670  		dst := v.Args[0]
  6671  		src := v.Args[1]
  6672  		mem := v.Args[2]
  6673  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6674  			break
  6675  		}
  6676  		v.reset(OpMIPSMOVHstore)
  6677  		v.AuxInt = 6
  6678  		v.AddArg(dst)
  6679  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6680  		v0.AuxInt = 6
  6681  		v0.AddArg(src)
  6682  		v0.AddArg(mem)
  6683  		v.AddArg(v0)
  6684  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6685  		v1.AuxInt = 4
  6686  		v1.AddArg(dst)
  6687  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6688  		v2.AuxInt = 4
  6689  		v2.AddArg(src)
  6690  		v2.AddArg(mem)
  6691  		v1.AddArg(v2)
  6692  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6693  		v3.AuxInt = 2
  6694  		v3.AddArg(dst)
  6695  		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6696  		v4.AuxInt = 2
  6697  		v4.AddArg(src)
  6698  		v4.AddArg(mem)
  6699  		v3.AddArg(v4)
  6700  		v5 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6701  		v5.AddArg(dst)
  6702  		v6 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6703  		v6.AddArg(src)
  6704  		v6.AddArg(mem)
  6705  		v5.AddArg(v6)
  6706  		v5.AddArg(mem)
  6707  		v3.AddArg(v5)
  6708  		v1.AddArg(v3)
  6709  		v.AddArg(v1)
  6710  		return true
  6711  	}
  6712  	return false
  6713  }
  6714  func rewriteValueMIPS_OpMove_10(v *Value) bool {
  6715  	b := v.Block
  6716  	_ = b
  6717  	config := b.Func.Config
  6718  	_ = config
  6719  	typ := &b.Func.Config.Types
  6720  	_ = typ
  6721  	// match: (Move [6] {t} dst src mem)
  6722  	// cond: t.(*types.Type).Alignment()%2 == 0
  6723  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) 		(MOVHstore [2] dst (MOVHload [2] src mem) 			(MOVHstore dst (MOVHload src mem) mem)))
  6724  	for {
  6725  		if v.AuxInt != 6 {
  6726  			break
  6727  		}
  6728  		t := v.Aux
  6729  		_ = v.Args[2]
  6730  		dst := v.Args[0]
  6731  		src := v.Args[1]
  6732  		mem := v.Args[2]
  6733  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6734  			break
  6735  		}
  6736  		v.reset(OpMIPSMOVHstore)
  6737  		v.AuxInt = 4
  6738  		v.AddArg(dst)
  6739  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6740  		v0.AuxInt = 4
  6741  		v0.AddArg(src)
  6742  		v0.AddArg(mem)
  6743  		v.AddArg(v0)
  6744  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6745  		v1.AuxInt = 2
  6746  		v1.AddArg(dst)
  6747  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6748  		v2.AuxInt = 2
  6749  		v2.AddArg(src)
  6750  		v2.AddArg(mem)
  6751  		v1.AddArg(v2)
  6752  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6753  		v3.AddArg(dst)
  6754  		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6755  		v4.AddArg(src)
  6756  		v4.AddArg(mem)
  6757  		v3.AddArg(v4)
  6758  		v3.AddArg(mem)
  6759  		v1.AddArg(v3)
  6760  		v.AddArg(v1)
  6761  		return true
  6762  	}
  6763  	// match: (Move [12] {t} dst src mem)
  6764  	// cond: t.(*types.Type).Alignment()%4 == 0
  6765  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) 		(MOVWstore [4] dst (MOVWload [4] src mem) 			(MOVWstore dst (MOVWload src mem) mem)))
  6766  	for {
  6767  		if v.AuxInt != 12 {
  6768  			break
  6769  		}
  6770  		t := v.Aux
  6771  		_ = v.Args[2]
  6772  		dst := v.Args[0]
  6773  		src := v.Args[1]
  6774  		mem := v.Args[2]
  6775  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6776  			break
  6777  		}
  6778  		v.reset(OpMIPSMOVWstore)
  6779  		v.AuxInt = 8
  6780  		v.AddArg(dst)
  6781  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6782  		v0.AuxInt = 8
  6783  		v0.AddArg(src)
  6784  		v0.AddArg(mem)
  6785  		v.AddArg(v0)
  6786  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6787  		v1.AuxInt = 4
  6788  		v1.AddArg(dst)
  6789  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6790  		v2.AuxInt = 4
  6791  		v2.AddArg(src)
  6792  		v2.AddArg(mem)
  6793  		v1.AddArg(v2)
  6794  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6795  		v3.AddArg(dst)
  6796  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6797  		v4.AddArg(src)
  6798  		v4.AddArg(mem)
  6799  		v3.AddArg(v4)
  6800  		v3.AddArg(mem)
  6801  		v1.AddArg(v3)
  6802  		v.AddArg(v1)
  6803  		return true
  6804  	}
  6805  	// match: (Move [16] {t} dst src mem)
  6806  	// cond: t.(*types.Type).Alignment()%4 == 0
  6807  	// result: (MOVWstore [12] dst (MOVWload [12] src mem) 		(MOVWstore [8] dst (MOVWload [8] src mem) 			(MOVWstore [4] dst (MOVWload [4] src mem) 				(MOVWstore dst (MOVWload src mem) mem))))
  6808  	for {
  6809  		if v.AuxInt != 16 {
  6810  			break
  6811  		}
  6812  		t := v.Aux
  6813  		_ = v.Args[2]
  6814  		dst := v.Args[0]
  6815  		src := v.Args[1]
  6816  		mem := v.Args[2]
  6817  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6818  			break
  6819  		}
  6820  		v.reset(OpMIPSMOVWstore)
  6821  		v.AuxInt = 12
  6822  		v.AddArg(dst)
  6823  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6824  		v0.AuxInt = 12
  6825  		v0.AddArg(src)
  6826  		v0.AddArg(mem)
  6827  		v.AddArg(v0)
  6828  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6829  		v1.AuxInt = 8
  6830  		v1.AddArg(dst)
  6831  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6832  		v2.AuxInt = 8
  6833  		v2.AddArg(src)
  6834  		v2.AddArg(mem)
  6835  		v1.AddArg(v2)
  6836  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6837  		v3.AuxInt = 4
  6838  		v3.AddArg(dst)
  6839  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6840  		v4.AuxInt = 4
  6841  		v4.AddArg(src)
  6842  		v4.AddArg(mem)
  6843  		v3.AddArg(v4)
  6844  		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6845  		v5.AddArg(dst)
  6846  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6847  		v6.AddArg(src)
  6848  		v6.AddArg(mem)
  6849  		v5.AddArg(v6)
  6850  		v5.AddArg(mem)
  6851  		v3.AddArg(v5)
  6852  		v1.AddArg(v3)
  6853  		v.AddArg(v1)
  6854  		return true
  6855  	}
  6856  	// match: (Move [s] {t} dst src mem)
  6857  	// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
  6858  	// result: (LoweredMove [t.(*types.Type).Alignment()] 		dst 		src 		(ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) 		mem)
  6859  	for {
  6860  		s := v.AuxInt
  6861  		t := v.Aux
  6862  		_ = v.Args[2]
  6863  		dst := v.Args[0]
  6864  		src := v.Args[1]
  6865  		mem := v.Args[2]
  6866  		if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) {
  6867  			break
  6868  		}
  6869  		v.reset(OpMIPSLoweredMove)
  6870  		v.AuxInt = t.(*types.Type).Alignment()
  6871  		v.AddArg(dst)
  6872  		v.AddArg(src)
  6873  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type)
  6874  		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  6875  		v0.AddArg(src)
  6876  		v.AddArg(v0)
  6877  		v.AddArg(mem)
  6878  		return true
  6879  	}
  6880  	return false
  6881  }
  6882  func rewriteValueMIPS_OpMul16_0(v *Value) bool {
  6883  	// match: (Mul16 x y)
  6884  	// cond:
  6885  	// result: (MUL x y)
  6886  	for {
  6887  		_ = v.Args[1]
  6888  		x := v.Args[0]
  6889  		y := v.Args[1]
  6890  		v.reset(OpMIPSMUL)
  6891  		v.AddArg(x)
  6892  		v.AddArg(y)
  6893  		return true
  6894  	}
  6895  }
  6896  func rewriteValueMIPS_OpMul32_0(v *Value) bool {
  6897  	// match: (Mul32 x y)
  6898  	// cond:
  6899  	// result: (MUL x y)
  6900  	for {
  6901  		_ = v.Args[1]
  6902  		x := v.Args[0]
  6903  		y := v.Args[1]
  6904  		v.reset(OpMIPSMUL)
  6905  		v.AddArg(x)
  6906  		v.AddArg(y)
  6907  		return true
  6908  	}
  6909  }
  6910  func rewriteValueMIPS_OpMul32F_0(v *Value) bool {
  6911  	// match: (Mul32F x y)
  6912  	// cond:
  6913  	// result: (MULF x y)
  6914  	for {
  6915  		_ = v.Args[1]
  6916  		x := v.Args[0]
  6917  		y := v.Args[1]
  6918  		v.reset(OpMIPSMULF)
  6919  		v.AddArg(x)
  6920  		v.AddArg(y)
  6921  		return true
  6922  	}
  6923  }
  6924  func rewriteValueMIPS_OpMul32uhilo_0(v *Value) bool {
  6925  	// match: (Mul32uhilo x y)
  6926  	// cond:
  6927  	// result: (MULTU x y)
  6928  	for {
  6929  		_ = v.Args[1]
  6930  		x := v.Args[0]
  6931  		y := v.Args[1]
  6932  		v.reset(OpMIPSMULTU)
  6933  		v.AddArg(x)
  6934  		v.AddArg(y)
  6935  		return true
  6936  	}
  6937  }
  6938  func rewriteValueMIPS_OpMul64F_0(v *Value) bool {
  6939  	// match: (Mul64F x y)
  6940  	// cond:
  6941  	// result: (MULD x y)
  6942  	for {
  6943  		_ = v.Args[1]
  6944  		x := v.Args[0]
  6945  		y := v.Args[1]
  6946  		v.reset(OpMIPSMULD)
  6947  		v.AddArg(x)
  6948  		v.AddArg(y)
  6949  		return true
  6950  	}
  6951  }
  6952  func rewriteValueMIPS_OpMul8_0(v *Value) bool {
  6953  	// match: (Mul8 x y)
  6954  	// cond:
  6955  	// result: (MUL x y)
  6956  	for {
  6957  		_ = v.Args[1]
  6958  		x := v.Args[0]
  6959  		y := v.Args[1]
  6960  		v.reset(OpMIPSMUL)
  6961  		v.AddArg(x)
  6962  		v.AddArg(y)
  6963  		return true
  6964  	}
  6965  }
  6966  func rewriteValueMIPS_OpNeg16_0(v *Value) bool {
  6967  	// match: (Neg16 x)
  6968  	// cond:
  6969  	// result: (NEG x)
  6970  	for {
  6971  		x := v.Args[0]
  6972  		v.reset(OpMIPSNEG)
  6973  		v.AddArg(x)
  6974  		return true
  6975  	}
  6976  }
  6977  func rewriteValueMIPS_OpNeg32_0(v *Value) bool {
  6978  	// match: (Neg32 x)
  6979  	// cond:
  6980  	// result: (NEG x)
  6981  	for {
  6982  		x := v.Args[0]
  6983  		v.reset(OpMIPSNEG)
  6984  		v.AddArg(x)
  6985  		return true
  6986  	}
  6987  }
  6988  func rewriteValueMIPS_OpNeg32F_0(v *Value) bool {
  6989  	// match: (Neg32F x)
  6990  	// cond:
  6991  	// result: (NEGF x)
  6992  	for {
  6993  		x := v.Args[0]
  6994  		v.reset(OpMIPSNEGF)
  6995  		v.AddArg(x)
  6996  		return true
  6997  	}
  6998  }
  6999  func rewriteValueMIPS_OpNeg64F_0(v *Value) bool {
  7000  	// match: (Neg64F x)
  7001  	// cond:
  7002  	// result: (NEGD x)
  7003  	for {
  7004  		x := v.Args[0]
  7005  		v.reset(OpMIPSNEGD)
  7006  		v.AddArg(x)
  7007  		return true
  7008  	}
  7009  }
  7010  func rewriteValueMIPS_OpNeg8_0(v *Value) bool {
  7011  	// match: (Neg8 x)
  7012  	// cond:
  7013  	// result: (NEG x)
  7014  	for {
  7015  		x := v.Args[0]
  7016  		v.reset(OpMIPSNEG)
  7017  		v.AddArg(x)
  7018  		return true
  7019  	}
  7020  }
  7021  func rewriteValueMIPS_OpNeq16_0(v *Value) bool {
  7022  	b := v.Block
  7023  	_ = b
  7024  	typ := &b.Func.Config.Types
  7025  	_ = typ
  7026  	// match: (Neq16 x y)
  7027  	// cond:
  7028  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0]))
  7029  	for {
  7030  		_ = v.Args[1]
  7031  		x := v.Args[0]
  7032  		y := v.Args[1]
  7033  		v.reset(OpMIPSSGTU)
  7034  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7035  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7036  		v1.AddArg(x)
  7037  		v0.AddArg(v1)
  7038  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7039  		v2.AddArg(y)
  7040  		v0.AddArg(v2)
  7041  		v.AddArg(v0)
  7042  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7043  		v3.AuxInt = 0
  7044  		v.AddArg(v3)
  7045  		return true
  7046  	}
  7047  }
  7048  func rewriteValueMIPS_OpNeq32_0(v *Value) bool {
  7049  	b := v.Block
  7050  	_ = b
  7051  	typ := &b.Func.Config.Types
  7052  	_ = typ
  7053  	// match: (Neq32 x y)
  7054  	// cond:
  7055  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  7056  	for {
  7057  		_ = v.Args[1]
  7058  		x := v.Args[0]
  7059  		y := v.Args[1]
  7060  		v.reset(OpMIPSSGTU)
  7061  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7062  		v0.AddArg(x)
  7063  		v0.AddArg(y)
  7064  		v.AddArg(v0)
  7065  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7066  		v1.AuxInt = 0
  7067  		v.AddArg(v1)
  7068  		return true
  7069  	}
  7070  }
  7071  func rewriteValueMIPS_OpNeq32F_0(v *Value) bool {
  7072  	b := v.Block
  7073  	_ = b
  7074  	// match: (Neq32F x y)
  7075  	// cond:
  7076  	// result: (FPFlagFalse (CMPEQF x y))
  7077  	for {
  7078  		_ = v.Args[1]
  7079  		x := v.Args[0]
  7080  		y := v.Args[1]
  7081  		v.reset(OpMIPSFPFlagFalse)
  7082  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags)
  7083  		v0.AddArg(x)
  7084  		v0.AddArg(y)
  7085  		v.AddArg(v0)
  7086  		return true
  7087  	}
  7088  }
  7089  func rewriteValueMIPS_OpNeq64F_0(v *Value) bool {
  7090  	b := v.Block
  7091  	_ = b
  7092  	// match: (Neq64F x y)
  7093  	// cond:
  7094  	// result: (FPFlagFalse (CMPEQD x y))
  7095  	for {
  7096  		_ = v.Args[1]
  7097  		x := v.Args[0]
  7098  		y := v.Args[1]
  7099  		v.reset(OpMIPSFPFlagFalse)
  7100  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags)
  7101  		v0.AddArg(x)
  7102  		v0.AddArg(y)
  7103  		v.AddArg(v0)
  7104  		return true
  7105  	}
  7106  }
  7107  func rewriteValueMIPS_OpNeq8_0(v *Value) bool {
  7108  	b := v.Block
  7109  	_ = b
  7110  	typ := &b.Func.Config.Types
  7111  	_ = typ
  7112  	// match: (Neq8 x y)
  7113  	// cond:
  7114  	// result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0]))
  7115  	for {
  7116  		_ = v.Args[1]
  7117  		x := v.Args[0]
  7118  		y := v.Args[1]
  7119  		v.reset(OpMIPSSGTU)
  7120  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7121  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7122  		v1.AddArg(x)
  7123  		v0.AddArg(v1)
  7124  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7125  		v2.AddArg(y)
  7126  		v0.AddArg(v2)
  7127  		v.AddArg(v0)
  7128  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7129  		v3.AuxInt = 0
  7130  		v.AddArg(v3)
  7131  		return true
  7132  	}
  7133  }
  7134  func rewriteValueMIPS_OpNeqB_0(v *Value) bool {
  7135  	// match: (NeqB x y)
  7136  	// cond:
  7137  	// result: (XOR x y)
  7138  	for {
  7139  		_ = v.Args[1]
  7140  		x := v.Args[0]
  7141  		y := v.Args[1]
  7142  		v.reset(OpMIPSXOR)
  7143  		v.AddArg(x)
  7144  		v.AddArg(y)
  7145  		return true
  7146  	}
  7147  }
  7148  func rewriteValueMIPS_OpNeqPtr_0(v *Value) bool {
  7149  	b := v.Block
  7150  	_ = b
  7151  	typ := &b.Func.Config.Types
  7152  	_ = typ
  7153  	// match: (NeqPtr x y)
  7154  	// cond:
  7155  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  7156  	for {
  7157  		_ = v.Args[1]
  7158  		x := v.Args[0]
  7159  		y := v.Args[1]
  7160  		v.reset(OpMIPSSGTU)
  7161  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7162  		v0.AddArg(x)
  7163  		v0.AddArg(y)
  7164  		v.AddArg(v0)
  7165  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7166  		v1.AuxInt = 0
  7167  		v.AddArg(v1)
  7168  		return true
  7169  	}
  7170  }
  7171  func rewriteValueMIPS_OpNilCheck_0(v *Value) bool {
  7172  	// match: (NilCheck ptr mem)
  7173  	// cond:
  7174  	// result: (LoweredNilCheck ptr mem)
  7175  	for {
  7176  		_ = v.Args[1]
  7177  		ptr := v.Args[0]
  7178  		mem := v.Args[1]
  7179  		v.reset(OpMIPSLoweredNilCheck)
  7180  		v.AddArg(ptr)
  7181  		v.AddArg(mem)
  7182  		return true
  7183  	}
  7184  }
  7185  func rewriteValueMIPS_OpNot_0(v *Value) bool {
  7186  	// match: (Not x)
  7187  	// cond:
  7188  	// result: (XORconst [1] x)
  7189  	for {
  7190  		x := v.Args[0]
  7191  		v.reset(OpMIPSXORconst)
  7192  		v.AuxInt = 1
  7193  		v.AddArg(x)
  7194  		return true
  7195  	}
  7196  }
  7197  func rewriteValueMIPS_OpOffPtr_0(v *Value) bool {
  7198  	// match: (OffPtr [off] ptr:(SP))
  7199  	// cond:
  7200  	// result: (MOVWaddr [off] ptr)
  7201  	for {
  7202  		off := v.AuxInt
  7203  		ptr := v.Args[0]
  7204  		if ptr.Op != OpSP {
  7205  			break
  7206  		}
  7207  		v.reset(OpMIPSMOVWaddr)
  7208  		v.AuxInt = off
  7209  		v.AddArg(ptr)
  7210  		return true
  7211  	}
  7212  	// match: (OffPtr [off] ptr)
  7213  	// cond:
  7214  	// result: (ADDconst [off] ptr)
  7215  	for {
  7216  		off := v.AuxInt
  7217  		ptr := v.Args[0]
  7218  		v.reset(OpMIPSADDconst)
  7219  		v.AuxInt = off
  7220  		v.AddArg(ptr)
  7221  		return true
  7222  	}
  7223  }
  7224  func rewriteValueMIPS_OpOr16_0(v *Value) bool {
  7225  	// match: (Or16 x y)
  7226  	// cond:
  7227  	// result: (OR x y)
  7228  	for {
  7229  		_ = v.Args[1]
  7230  		x := v.Args[0]
  7231  		y := v.Args[1]
  7232  		v.reset(OpMIPSOR)
  7233  		v.AddArg(x)
  7234  		v.AddArg(y)
  7235  		return true
  7236  	}
  7237  }
  7238  func rewriteValueMIPS_OpOr32_0(v *Value) bool {
  7239  	// match: (Or32 x y)
  7240  	// cond:
  7241  	// result: (OR x y)
  7242  	for {
  7243  		_ = v.Args[1]
  7244  		x := v.Args[0]
  7245  		y := v.Args[1]
  7246  		v.reset(OpMIPSOR)
  7247  		v.AddArg(x)
  7248  		v.AddArg(y)
  7249  		return true
  7250  	}
  7251  }
  7252  func rewriteValueMIPS_OpOr8_0(v *Value) bool {
  7253  	// match: (Or8 x y)
  7254  	// cond:
  7255  	// result: (OR x y)
  7256  	for {
  7257  		_ = v.Args[1]
  7258  		x := v.Args[0]
  7259  		y := v.Args[1]
  7260  		v.reset(OpMIPSOR)
  7261  		v.AddArg(x)
  7262  		v.AddArg(y)
  7263  		return true
  7264  	}
  7265  }
  7266  func rewriteValueMIPS_OpOrB_0(v *Value) bool {
  7267  	// match: (OrB x y)
  7268  	// cond:
  7269  	// result: (OR x y)
  7270  	for {
  7271  		_ = v.Args[1]
  7272  		x := v.Args[0]
  7273  		y := v.Args[1]
  7274  		v.reset(OpMIPSOR)
  7275  		v.AddArg(x)
  7276  		v.AddArg(y)
  7277  		return true
  7278  	}
  7279  }
  7280  func rewriteValueMIPS_OpRound32F_0(v *Value) bool {
  7281  	// match: (Round32F x)
  7282  	// cond:
  7283  	// result: x
  7284  	for {
  7285  		x := v.Args[0]
  7286  		v.reset(OpCopy)
  7287  		v.Type = x.Type
  7288  		v.AddArg(x)
  7289  		return true
  7290  	}
  7291  }
  7292  func rewriteValueMIPS_OpRound64F_0(v *Value) bool {
  7293  	// match: (Round64F x)
  7294  	// cond:
  7295  	// result: x
  7296  	for {
  7297  		x := v.Args[0]
  7298  		v.reset(OpCopy)
  7299  		v.Type = x.Type
  7300  		v.AddArg(x)
  7301  		return true
  7302  	}
  7303  }
  7304  func rewriteValueMIPS_OpRsh16Ux16_0(v *Value) bool {
  7305  	b := v.Block
  7306  	_ = b
  7307  	typ := &b.Func.Config.Types
  7308  	_ = typ
  7309  	// match: (Rsh16Ux16 <t> x y)
  7310  	// cond:
  7311  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7312  	for {
  7313  		t := v.Type
  7314  		_ = v.Args[1]
  7315  		x := v.Args[0]
  7316  		y := v.Args[1]
  7317  		v.reset(OpMIPSCMOVZ)
  7318  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7319  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7320  		v1.AddArg(x)
  7321  		v0.AddArg(v1)
  7322  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7323  		v2.AddArg(y)
  7324  		v0.AddArg(v2)
  7325  		v.AddArg(v0)
  7326  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7327  		v3.AuxInt = 0
  7328  		v.AddArg(v3)
  7329  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7330  		v4.AuxInt = 32
  7331  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7332  		v5.AddArg(y)
  7333  		v4.AddArg(v5)
  7334  		v.AddArg(v4)
  7335  		return true
  7336  	}
  7337  }
  7338  func rewriteValueMIPS_OpRsh16Ux32_0(v *Value) bool {
  7339  	b := v.Block
  7340  	_ = b
  7341  	typ := &b.Func.Config.Types
  7342  	_ = typ
  7343  	// match: (Rsh16Ux32 <t> x y)
  7344  	// cond:
  7345  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7346  	for {
  7347  		t := v.Type
  7348  		_ = v.Args[1]
  7349  		x := v.Args[0]
  7350  		y := v.Args[1]
  7351  		v.reset(OpMIPSCMOVZ)
  7352  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7353  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7354  		v1.AddArg(x)
  7355  		v0.AddArg(v1)
  7356  		v0.AddArg(y)
  7357  		v.AddArg(v0)
  7358  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7359  		v2.AuxInt = 0
  7360  		v.AddArg(v2)
  7361  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7362  		v3.AuxInt = 32
  7363  		v3.AddArg(y)
  7364  		v.AddArg(v3)
  7365  		return true
  7366  	}
  7367  }
  7368  func rewriteValueMIPS_OpRsh16Ux64_0(v *Value) bool {
  7369  	b := v.Block
  7370  	_ = b
  7371  	typ := &b.Func.Config.Types
  7372  	_ = typ
  7373  	// match: (Rsh16Ux64 x (Const64 [c]))
  7374  	// cond: uint32(c) < 16
  7375  	// result: (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16])
  7376  	for {
  7377  		_ = v.Args[1]
  7378  		x := v.Args[0]
  7379  		v_1 := v.Args[1]
  7380  		if v_1.Op != OpConst64 {
  7381  			break
  7382  		}
  7383  		c := v_1.AuxInt
  7384  		if !(uint32(c) < 16) {
  7385  			break
  7386  		}
  7387  		v.reset(OpMIPSSRLconst)
  7388  		v.AuxInt = c + 16
  7389  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7390  		v0.AuxInt = 16
  7391  		v0.AddArg(x)
  7392  		v.AddArg(v0)
  7393  		return true
  7394  	}
  7395  	// match: (Rsh16Ux64 _ (Const64 [c]))
  7396  	// cond: uint32(c) >= 16
  7397  	// result: (MOVWconst [0])
  7398  	for {
  7399  		_ = v.Args[1]
  7400  		v_1 := v.Args[1]
  7401  		if v_1.Op != OpConst64 {
  7402  			break
  7403  		}
  7404  		c := v_1.AuxInt
  7405  		if !(uint32(c) >= 16) {
  7406  			break
  7407  		}
  7408  		v.reset(OpMIPSMOVWconst)
  7409  		v.AuxInt = 0
  7410  		return true
  7411  	}
  7412  	return false
  7413  }
  7414  func rewriteValueMIPS_OpRsh16Ux8_0(v *Value) bool {
  7415  	b := v.Block
  7416  	_ = b
  7417  	typ := &b.Func.Config.Types
  7418  	_ = typ
  7419  	// match: (Rsh16Ux8 <t> x y)
  7420  	// cond:
  7421  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7422  	for {
  7423  		t := v.Type
  7424  		_ = v.Args[1]
  7425  		x := v.Args[0]
  7426  		y := v.Args[1]
  7427  		v.reset(OpMIPSCMOVZ)
  7428  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7429  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7430  		v1.AddArg(x)
  7431  		v0.AddArg(v1)
  7432  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7433  		v2.AddArg(y)
  7434  		v0.AddArg(v2)
  7435  		v.AddArg(v0)
  7436  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7437  		v3.AuxInt = 0
  7438  		v.AddArg(v3)
  7439  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7440  		v4.AuxInt = 32
  7441  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7442  		v5.AddArg(y)
  7443  		v4.AddArg(v5)
  7444  		v.AddArg(v4)
  7445  		return true
  7446  	}
  7447  }
  7448  func rewriteValueMIPS_OpRsh16x16_0(v *Value) bool {
  7449  	b := v.Block
  7450  	_ = b
  7451  	typ := &b.Func.Config.Types
  7452  	_ = typ
  7453  	// match: (Rsh16x16 x y)
  7454  	// cond:
  7455  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7456  	for {
  7457  		_ = v.Args[1]
  7458  		x := v.Args[0]
  7459  		y := v.Args[1]
  7460  		v.reset(OpMIPSSRA)
  7461  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7462  		v0.AddArg(x)
  7463  		v.AddArg(v0)
  7464  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7465  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7466  		v2.AddArg(y)
  7467  		v1.AddArg(v2)
  7468  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7469  		v3.AuxInt = -1
  7470  		v1.AddArg(v3)
  7471  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7472  		v4.AuxInt = 32
  7473  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7474  		v5.AddArg(y)
  7475  		v4.AddArg(v5)
  7476  		v1.AddArg(v4)
  7477  		v.AddArg(v1)
  7478  		return true
  7479  	}
  7480  }
  7481  func rewriteValueMIPS_OpRsh16x32_0(v *Value) bool {
  7482  	b := v.Block
  7483  	_ = b
  7484  	typ := &b.Func.Config.Types
  7485  	_ = typ
  7486  	// match: (Rsh16x32 x y)
  7487  	// cond:
  7488  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7489  	for {
  7490  		_ = v.Args[1]
  7491  		x := v.Args[0]
  7492  		y := v.Args[1]
  7493  		v.reset(OpMIPSSRA)
  7494  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7495  		v0.AddArg(x)
  7496  		v.AddArg(v0)
  7497  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7498  		v1.AddArg(y)
  7499  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7500  		v2.AuxInt = -1
  7501  		v1.AddArg(v2)
  7502  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7503  		v3.AuxInt = 32
  7504  		v3.AddArg(y)
  7505  		v1.AddArg(v3)
  7506  		v.AddArg(v1)
  7507  		return true
  7508  	}
  7509  }
  7510  func rewriteValueMIPS_OpRsh16x64_0(v *Value) bool {
  7511  	b := v.Block
  7512  	_ = b
  7513  	typ := &b.Func.Config.Types
  7514  	_ = typ
  7515  	// match: (Rsh16x64 x (Const64 [c]))
  7516  	// cond: uint32(c) < 16
  7517  	// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16])
  7518  	for {
  7519  		_ = v.Args[1]
  7520  		x := v.Args[0]
  7521  		v_1 := v.Args[1]
  7522  		if v_1.Op != OpConst64 {
  7523  			break
  7524  		}
  7525  		c := v_1.AuxInt
  7526  		if !(uint32(c) < 16) {
  7527  			break
  7528  		}
  7529  		v.reset(OpMIPSSRAconst)
  7530  		v.AuxInt = c + 16
  7531  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7532  		v0.AuxInt = 16
  7533  		v0.AddArg(x)
  7534  		v.AddArg(v0)
  7535  		return true
  7536  	}
  7537  	// match: (Rsh16x64 x (Const64 [c]))
  7538  	// cond: uint32(c) >= 16
  7539  	// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [31])
  7540  	for {
  7541  		_ = v.Args[1]
  7542  		x := v.Args[0]
  7543  		v_1 := v.Args[1]
  7544  		if v_1.Op != OpConst64 {
  7545  			break
  7546  		}
  7547  		c := v_1.AuxInt
  7548  		if !(uint32(c) >= 16) {
  7549  			break
  7550  		}
  7551  		v.reset(OpMIPSSRAconst)
  7552  		v.AuxInt = 31
  7553  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7554  		v0.AuxInt = 16
  7555  		v0.AddArg(x)
  7556  		v.AddArg(v0)
  7557  		return true
  7558  	}
  7559  	return false
  7560  }
  7561  func rewriteValueMIPS_OpRsh16x8_0(v *Value) bool {
  7562  	b := v.Block
  7563  	_ = b
  7564  	typ := &b.Func.Config.Types
  7565  	_ = typ
  7566  	// match: (Rsh16x8 x y)
  7567  	// cond:
  7568  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7569  	for {
  7570  		_ = v.Args[1]
  7571  		x := v.Args[0]
  7572  		y := v.Args[1]
  7573  		v.reset(OpMIPSSRA)
  7574  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7575  		v0.AddArg(x)
  7576  		v.AddArg(v0)
  7577  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7578  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7579  		v2.AddArg(y)
  7580  		v1.AddArg(v2)
  7581  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7582  		v3.AuxInt = -1
  7583  		v1.AddArg(v3)
  7584  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7585  		v4.AuxInt = 32
  7586  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7587  		v5.AddArg(y)
  7588  		v4.AddArg(v5)
  7589  		v1.AddArg(v4)
  7590  		v.AddArg(v1)
  7591  		return true
  7592  	}
  7593  }
  7594  func rewriteValueMIPS_OpRsh32Ux16_0(v *Value) bool {
  7595  	b := v.Block
  7596  	_ = b
  7597  	typ := &b.Func.Config.Types
  7598  	_ = typ
  7599  	// match: (Rsh32Ux16 <t> x y)
  7600  	// cond:
  7601  	// result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7602  	for {
  7603  		t := v.Type
  7604  		_ = v.Args[1]
  7605  		x := v.Args[0]
  7606  		y := v.Args[1]
  7607  		v.reset(OpMIPSCMOVZ)
  7608  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7609  		v0.AddArg(x)
  7610  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7611  		v1.AddArg(y)
  7612  		v0.AddArg(v1)
  7613  		v.AddArg(v0)
  7614  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7615  		v2.AuxInt = 0
  7616  		v.AddArg(v2)
  7617  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7618  		v3.AuxInt = 32
  7619  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7620  		v4.AddArg(y)
  7621  		v3.AddArg(v4)
  7622  		v.AddArg(v3)
  7623  		return true
  7624  	}
  7625  }
  7626  func rewriteValueMIPS_OpRsh32Ux32_0(v *Value) bool {
  7627  	b := v.Block
  7628  	_ = b
  7629  	typ := &b.Func.Config.Types
  7630  	_ = typ
  7631  	// match: (Rsh32Ux32 <t> x y)
  7632  	// cond:
  7633  	// result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  7634  	for {
  7635  		t := v.Type
  7636  		_ = v.Args[1]
  7637  		x := v.Args[0]
  7638  		y := v.Args[1]
  7639  		v.reset(OpMIPSCMOVZ)
  7640  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7641  		v0.AddArg(x)
  7642  		v0.AddArg(y)
  7643  		v.AddArg(v0)
  7644  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7645  		v1.AuxInt = 0
  7646  		v.AddArg(v1)
  7647  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7648  		v2.AuxInt = 32
  7649  		v2.AddArg(y)
  7650  		v.AddArg(v2)
  7651  		return true
  7652  	}
  7653  }
  7654  func rewriteValueMIPS_OpRsh32Ux64_0(v *Value) bool {
  7655  	// match: (Rsh32Ux64 x (Const64 [c]))
  7656  	// cond: uint32(c) < 32
  7657  	// result: (SRLconst x [c])
  7658  	for {
  7659  		_ = v.Args[1]
  7660  		x := v.Args[0]
  7661  		v_1 := v.Args[1]
  7662  		if v_1.Op != OpConst64 {
  7663  			break
  7664  		}
  7665  		c := v_1.AuxInt
  7666  		if !(uint32(c) < 32) {
  7667  			break
  7668  		}
  7669  		v.reset(OpMIPSSRLconst)
  7670  		v.AuxInt = c
  7671  		v.AddArg(x)
  7672  		return true
  7673  	}
  7674  	// match: (Rsh32Ux64 _ (Const64 [c]))
  7675  	// cond: uint32(c) >= 32
  7676  	// result: (MOVWconst [0])
  7677  	for {
  7678  		_ = v.Args[1]
  7679  		v_1 := v.Args[1]
  7680  		if v_1.Op != OpConst64 {
  7681  			break
  7682  		}
  7683  		c := v_1.AuxInt
  7684  		if !(uint32(c) >= 32) {
  7685  			break
  7686  		}
  7687  		v.reset(OpMIPSMOVWconst)
  7688  		v.AuxInt = 0
  7689  		return true
  7690  	}
  7691  	return false
  7692  }
  7693  func rewriteValueMIPS_OpRsh32Ux8_0(v *Value) bool {
  7694  	b := v.Block
  7695  	_ = b
  7696  	typ := &b.Func.Config.Types
  7697  	_ = typ
  7698  	// match: (Rsh32Ux8 <t> x y)
  7699  	// cond:
  7700  	// result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7701  	for {
  7702  		t := v.Type
  7703  		_ = v.Args[1]
  7704  		x := v.Args[0]
  7705  		y := v.Args[1]
  7706  		v.reset(OpMIPSCMOVZ)
  7707  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7708  		v0.AddArg(x)
  7709  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7710  		v1.AddArg(y)
  7711  		v0.AddArg(v1)
  7712  		v.AddArg(v0)
  7713  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7714  		v2.AuxInt = 0
  7715  		v.AddArg(v2)
  7716  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7717  		v3.AuxInt = 32
  7718  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7719  		v4.AddArg(y)
  7720  		v3.AddArg(v4)
  7721  		v.AddArg(v3)
  7722  		return true
  7723  	}
  7724  }
  7725  func rewriteValueMIPS_OpRsh32x16_0(v *Value) bool {
  7726  	b := v.Block
  7727  	_ = b
  7728  	typ := &b.Func.Config.Types
  7729  	_ = typ
  7730  	// match: (Rsh32x16 x y)
  7731  	// cond:
  7732  	// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7733  	for {
  7734  		_ = v.Args[1]
  7735  		x := v.Args[0]
  7736  		y := v.Args[1]
  7737  		v.reset(OpMIPSSRA)
  7738  		v.AddArg(x)
  7739  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7740  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7741  		v1.AddArg(y)
  7742  		v0.AddArg(v1)
  7743  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7744  		v2.AuxInt = -1
  7745  		v0.AddArg(v2)
  7746  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7747  		v3.AuxInt = 32
  7748  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7749  		v4.AddArg(y)
  7750  		v3.AddArg(v4)
  7751  		v0.AddArg(v3)
  7752  		v.AddArg(v0)
  7753  		return true
  7754  	}
  7755  }
  7756  func rewriteValueMIPS_OpRsh32x32_0(v *Value) bool {
  7757  	b := v.Block
  7758  	_ = b
  7759  	typ := &b.Func.Config.Types
  7760  	_ = typ
  7761  	// match: (Rsh32x32 x y)
  7762  	// cond:
  7763  	// result: (SRA x ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7764  	for {
  7765  		_ = v.Args[1]
  7766  		x := v.Args[0]
  7767  		y := v.Args[1]
  7768  		v.reset(OpMIPSSRA)
  7769  		v.AddArg(x)
  7770  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7771  		v0.AddArg(y)
  7772  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7773  		v1.AuxInt = -1
  7774  		v0.AddArg(v1)
  7775  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7776  		v2.AuxInt = 32
  7777  		v2.AddArg(y)
  7778  		v0.AddArg(v2)
  7779  		v.AddArg(v0)
  7780  		return true
  7781  	}
  7782  }
  7783  func rewriteValueMIPS_OpRsh32x64_0(v *Value) bool {
  7784  	// match: (Rsh32x64 x (Const64 [c]))
  7785  	// cond: uint32(c) < 32
  7786  	// result: (SRAconst x [c])
  7787  	for {
  7788  		_ = v.Args[1]
  7789  		x := v.Args[0]
  7790  		v_1 := v.Args[1]
  7791  		if v_1.Op != OpConst64 {
  7792  			break
  7793  		}
  7794  		c := v_1.AuxInt
  7795  		if !(uint32(c) < 32) {
  7796  			break
  7797  		}
  7798  		v.reset(OpMIPSSRAconst)
  7799  		v.AuxInt = c
  7800  		v.AddArg(x)
  7801  		return true
  7802  	}
  7803  	// match: (Rsh32x64 x (Const64 [c]))
  7804  	// cond: uint32(c) >= 32
  7805  	// result: (SRAconst x [31])
  7806  	for {
  7807  		_ = v.Args[1]
  7808  		x := v.Args[0]
  7809  		v_1 := v.Args[1]
  7810  		if v_1.Op != OpConst64 {
  7811  			break
  7812  		}
  7813  		c := v_1.AuxInt
  7814  		if !(uint32(c) >= 32) {
  7815  			break
  7816  		}
  7817  		v.reset(OpMIPSSRAconst)
  7818  		v.AuxInt = 31
  7819  		v.AddArg(x)
  7820  		return true
  7821  	}
  7822  	return false
  7823  }
  7824  func rewriteValueMIPS_OpRsh32x8_0(v *Value) bool {
  7825  	b := v.Block
  7826  	_ = b
  7827  	typ := &b.Func.Config.Types
  7828  	_ = typ
  7829  	// match: (Rsh32x8 x y)
  7830  	// cond:
  7831  	// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7832  	for {
  7833  		_ = v.Args[1]
  7834  		x := v.Args[0]
  7835  		y := v.Args[1]
  7836  		v.reset(OpMIPSSRA)
  7837  		v.AddArg(x)
  7838  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7839  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7840  		v1.AddArg(y)
  7841  		v0.AddArg(v1)
  7842  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7843  		v2.AuxInt = -1
  7844  		v0.AddArg(v2)
  7845  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7846  		v3.AuxInt = 32
  7847  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7848  		v4.AddArg(y)
  7849  		v3.AddArg(v4)
  7850  		v0.AddArg(v3)
  7851  		v.AddArg(v0)
  7852  		return true
  7853  	}
  7854  }
  7855  func rewriteValueMIPS_OpRsh8Ux16_0(v *Value) bool {
  7856  	b := v.Block
  7857  	_ = b
  7858  	typ := &b.Func.Config.Types
  7859  	_ = typ
  7860  	// match: (Rsh8Ux16 <t> x y)
  7861  	// cond:
  7862  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7863  	for {
  7864  		t := v.Type
  7865  		_ = v.Args[1]
  7866  		x := v.Args[0]
  7867  		y := v.Args[1]
  7868  		v.reset(OpMIPSCMOVZ)
  7869  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7870  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7871  		v1.AddArg(x)
  7872  		v0.AddArg(v1)
  7873  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7874  		v2.AddArg(y)
  7875  		v0.AddArg(v2)
  7876  		v.AddArg(v0)
  7877  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7878  		v3.AuxInt = 0
  7879  		v.AddArg(v3)
  7880  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7881  		v4.AuxInt = 32
  7882  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7883  		v5.AddArg(y)
  7884  		v4.AddArg(v5)
  7885  		v.AddArg(v4)
  7886  		return true
  7887  	}
  7888  }
  7889  func rewriteValueMIPS_OpRsh8Ux32_0(v *Value) bool {
  7890  	b := v.Block
  7891  	_ = b
  7892  	typ := &b.Func.Config.Types
  7893  	_ = typ
  7894  	// match: (Rsh8Ux32 <t> x y)
  7895  	// cond:
  7896  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7897  	for {
  7898  		t := v.Type
  7899  		_ = v.Args[1]
  7900  		x := v.Args[0]
  7901  		y := v.Args[1]
  7902  		v.reset(OpMIPSCMOVZ)
  7903  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7904  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7905  		v1.AddArg(x)
  7906  		v0.AddArg(v1)
  7907  		v0.AddArg(y)
  7908  		v.AddArg(v0)
  7909  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7910  		v2.AuxInt = 0
  7911  		v.AddArg(v2)
  7912  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7913  		v3.AuxInt = 32
  7914  		v3.AddArg(y)
  7915  		v.AddArg(v3)
  7916  		return true
  7917  	}
  7918  }
  7919  func rewriteValueMIPS_OpRsh8Ux64_0(v *Value) bool {
  7920  	b := v.Block
  7921  	_ = b
  7922  	typ := &b.Func.Config.Types
  7923  	_ = typ
  7924  	// match: (Rsh8Ux64 x (Const64 [c]))
  7925  	// cond: uint32(c) < 8
  7926  	// result: (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24])
  7927  	for {
  7928  		_ = v.Args[1]
  7929  		x := v.Args[0]
  7930  		v_1 := v.Args[1]
  7931  		if v_1.Op != OpConst64 {
  7932  			break
  7933  		}
  7934  		c := v_1.AuxInt
  7935  		if !(uint32(c) < 8) {
  7936  			break
  7937  		}
  7938  		v.reset(OpMIPSSRLconst)
  7939  		v.AuxInt = c + 24
  7940  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7941  		v0.AuxInt = 24
  7942  		v0.AddArg(x)
  7943  		v.AddArg(v0)
  7944  		return true
  7945  	}
  7946  	// match: (Rsh8Ux64 _ (Const64 [c]))
  7947  	// cond: uint32(c) >= 8
  7948  	// result: (MOVWconst [0])
  7949  	for {
  7950  		_ = v.Args[1]
  7951  		v_1 := v.Args[1]
  7952  		if v_1.Op != OpConst64 {
  7953  			break
  7954  		}
  7955  		c := v_1.AuxInt
  7956  		if !(uint32(c) >= 8) {
  7957  			break
  7958  		}
  7959  		v.reset(OpMIPSMOVWconst)
  7960  		v.AuxInt = 0
  7961  		return true
  7962  	}
  7963  	return false
  7964  }
  7965  func rewriteValueMIPS_OpRsh8Ux8_0(v *Value) bool {
  7966  	b := v.Block
  7967  	_ = b
  7968  	typ := &b.Func.Config.Types
  7969  	_ = typ
  7970  	// match: (Rsh8Ux8 <t> x y)
  7971  	// cond:
  7972  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7973  	for {
  7974  		t := v.Type
  7975  		_ = v.Args[1]
  7976  		x := v.Args[0]
  7977  		y := v.Args[1]
  7978  		v.reset(OpMIPSCMOVZ)
  7979  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7980  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7981  		v1.AddArg(x)
  7982  		v0.AddArg(v1)
  7983  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7984  		v2.AddArg(y)
  7985  		v0.AddArg(v2)
  7986  		v.AddArg(v0)
  7987  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7988  		v3.AuxInt = 0
  7989  		v.AddArg(v3)
  7990  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7991  		v4.AuxInt = 32
  7992  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7993  		v5.AddArg(y)
  7994  		v4.AddArg(v5)
  7995  		v.AddArg(v4)
  7996  		return true
  7997  	}
  7998  }
  7999  func rewriteValueMIPS_OpRsh8x16_0(v *Value) bool {
  8000  	b := v.Block
  8001  	_ = b
  8002  	typ := &b.Func.Config.Types
  8003  	_ = typ
  8004  	// match: (Rsh8x16 x y)
  8005  	// cond:
  8006  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  8007  	for {
  8008  		_ = v.Args[1]
  8009  		x := v.Args[0]
  8010  		y := v.Args[1]
  8011  		v.reset(OpMIPSSRA)
  8012  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  8013  		v0.AddArg(x)
  8014  		v.AddArg(v0)
  8015  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  8016  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  8017  		v2.AddArg(y)
  8018  		v1.AddArg(v2)
  8019  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8020  		v3.AuxInt = -1
  8021  		v1.AddArg(v3)
  8022  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8023  		v4.AuxInt = 32
  8024  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  8025  		v5.AddArg(y)
  8026  		v4.AddArg(v5)
  8027  		v1.AddArg(v4)
  8028  		v.AddArg(v1)
  8029  		return true
  8030  	}
  8031  }
  8032  func rewriteValueMIPS_OpRsh8x32_0(v *Value) bool {
  8033  	b := v.Block
  8034  	_ = b
  8035  	typ := &b.Func.Config.Types
  8036  	_ = typ
  8037  	// match: (Rsh8x32 x y)
  8038  	// cond:
  8039  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  8040  	for {
  8041  		_ = v.Args[1]
  8042  		x := v.Args[0]
  8043  		y := v.Args[1]
  8044  		v.reset(OpMIPSSRA)
  8045  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  8046  		v0.AddArg(x)
  8047  		v.AddArg(v0)
  8048  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  8049  		v1.AddArg(y)
  8050  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8051  		v2.AuxInt = -1
  8052  		v1.AddArg(v2)
  8053  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8054  		v3.AuxInt = 32
  8055  		v3.AddArg(y)
  8056  		v1.AddArg(v3)
  8057  		v.AddArg(v1)
  8058  		return true
  8059  	}
  8060  }
  8061  func rewriteValueMIPS_OpRsh8x64_0(v *Value) bool {
  8062  	b := v.Block
  8063  	_ = b
  8064  	typ := &b.Func.Config.Types
  8065  	_ = typ
  8066  	// match: (Rsh8x64 x (Const64 [c]))
  8067  	// cond: uint32(c) < 8
  8068  	// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24])
  8069  	for {
  8070  		_ = v.Args[1]
  8071  		x := v.Args[0]
  8072  		v_1 := v.Args[1]
  8073  		if v_1.Op != OpConst64 {
  8074  			break
  8075  		}
  8076  		c := v_1.AuxInt
  8077  		if !(uint32(c) < 8) {
  8078  			break
  8079  		}
  8080  		v.reset(OpMIPSSRAconst)
  8081  		v.AuxInt = c + 24
  8082  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  8083  		v0.AuxInt = 24
  8084  		v0.AddArg(x)
  8085  		v.AddArg(v0)
  8086  		return true
  8087  	}
  8088  	// match: (Rsh8x64 x (Const64 [c]))
  8089  	// cond: uint32(c) >= 8
  8090  	// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [31])
  8091  	for {
  8092  		_ = v.Args[1]
  8093  		x := v.Args[0]
  8094  		v_1 := v.Args[1]
  8095  		if v_1.Op != OpConst64 {
  8096  			break
  8097  		}
  8098  		c := v_1.AuxInt
  8099  		if !(uint32(c) >= 8) {
  8100  			break
  8101  		}
  8102  		v.reset(OpMIPSSRAconst)
  8103  		v.AuxInt = 31
  8104  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  8105  		v0.AuxInt = 24
  8106  		v0.AddArg(x)
  8107  		v.AddArg(v0)
  8108  		return true
  8109  	}
  8110  	return false
  8111  }
  8112  func rewriteValueMIPS_OpRsh8x8_0(v *Value) bool {
  8113  	b := v.Block
  8114  	_ = b
  8115  	typ := &b.Func.Config.Types
  8116  	_ = typ
  8117  	// match: (Rsh8x8 x y)
  8118  	// cond:
  8119  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  8120  	for {
  8121  		_ = v.Args[1]
  8122  		x := v.Args[0]
  8123  		y := v.Args[1]
  8124  		v.reset(OpMIPSSRA)
  8125  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  8126  		v0.AddArg(x)
  8127  		v.AddArg(v0)
  8128  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  8129  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8130  		v2.AddArg(y)
  8131  		v1.AddArg(v2)
  8132  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8133  		v3.AuxInt = -1
  8134  		v1.AddArg(v3)
  8135  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8136  		v4.AuxInt = 32
  8137  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8138  		v5.AddArg(y)
  8139  		v4.AddArg(v5)
  8140  		v1.AddArg(v4)
  8141  		v.AddArg(v1)
  8142  		return true
  8143  	}
  8144  }
  8145  func rewriteValueMIPS_OpSelect0_0(v *Value) bool {
  8146  	b := v.Block
  8147  	_ = b
  8148  	typ := &b.Func.Config.Types
  8149  	_ = typ
  8150  	// match: (Select0 (Add32carry <t> x y))
  8151  	// cond:
  8152  	// result: (ADD <t.FieldType(0)> x y)
  8153  	for {
  8154  		v_0 := v.Args[0]
  8155  		if v_0.Op != OpAdd32carry {
  8156  			break
  8157  		}
  8158  		t := v_0.Type
  8159  		_ = v_0.Args[1]
  8160  		x := v_0.Args[0]
  8161  		y := v_0.Args[1]
  8162  		v.reset(OpMIPSADD)
  8163  		v.Type = t.FieldType(0)
  8164  		v.AddArg(x)
  8165  		v.AddArg(y)
  8166  		return true
  8167  	}
  8168  	// match: (Select0 (Sub32carry <t> x y))
  8169  	// cond:
  8170  	// result: (SUB <t.FieldType(0)> x y)
  8171  	for {
  8172  		v_0 := v.Args[0]
  8173  		if v_0.Op != OpSub32carry {
  8174  			break
  8175  		}
  8176  		t := v_0.Type
  8177  		_ = v_0.Args[1]
  8178  		x := v_0.Args[0]
  8179  		y := v_0.Args[1]
  8180  		v.reset(OpMIPSSUB)
  8181  		v.Type = t.FieldType(0)
  8182  		v.AddArg(x)
  8183  		v.AddArg(y)
  8184  		return true
  8185  	}
  8186  	// match: (Select0 (MULTU (MOVWconst [0]) _))
  8187  	// cond:
  8188  	// result: (MOVWconst [0])
  8189  	for {
  8190  		v_0 := v.Args[0]
  8191  		if v_0.Op != OpMIPSMULTU {
  8192  			break
  8193  		}
  8194  		_ = v_0.Args[1]
  8195  		v_0_0 := v_0.Args[0]
  8196  		if v_0_0.Op != OpMIPSMOVWconst {
  8197  			break
  8198  		}
  8199  		if v_0_0.AuxInt != 0 {
  8200  			break
  8201  		}
  8202  		v.reset(OpMIPSMOVWconst)
  8203  		v.AuxInt = 0
  8204  		return true
  8205  	}
  8206  	// match: (Select0 (MULTU _ (MOVWconst [0])))
  8207  	// cond:
  8208  	// result: (MOVWconst [0])
  8209  	for {
  8210  		v_0 := v.Args[0]
  8211  		if v_0.Op != OpMIPSMULTU {
  8212  			break
  8213  		}
  8214  		_ = v_0.Args[1]
  8215  		v_0_1 := v_0.Args[1]
  8216  		if v_0_1.Op != OpMIPSMOVWconst {
  8217  			break
  8218  		}
  8219  		if v_0_1.AuxInt != 0 {
  8220  			break
  8221  		}
  8222  		v.reset(OpMIPSMOVWconst)
  8223  		v.AuxInt = 0
  8224  		return true
  8225  	}
  8226  	// match: (Select0 (MULTU (MOVWconst [1]) _))
  8227  	// cond:
  8228  	// result: (MOVWconst [0])
  8229  	for {
  8230  		v_0 := v.Args[0]
  8231  		if v_0.Op != OpMIPSMULTU {
  8232  			break
  8233  		}
  8234  		_ = v_0.Args[1]
  8235  		v_0_0 := v_0.Args[0]
  8236  		if v_0_0.Op != OpMIPSMOVWconst {
  8237  			break
  8238  		}
  8239  		if v_0_0.AuxInt != 1 {
  8240  			break
  8241  		}
  8242  		v.reset(OpMIPSMOVWconst)
  8243  		v.AuxInt = 0
  8244  		return true
  8245  	}
  8246  	// match: (Select0 (MULTU _ (MOVWconst [1])))
  8247  	// cond:
  8248  	// result: (MOVWconst [0])
  8249  	for {
  8250  		v_0 := v.Args[0]
  8251  		if v_0.Op != OpMIPSMULTU {
  8252  			break
  8253  		}
  8254  		_ = v_0.Args[1]
  8255  		v_0_1 := v_0.Args[1]
  8256  		if v_0_1.Op != OpMIPSMOVWconst {
  8257  			break
  8258  		}
  8259  		if v_0_1.AuxInt != 1 {
  8260  			break
  8261  		}
  8262  		v.reset(OpMIPSMOVWconst)
  8263  		v.AuxInt = 0
  8264  		return true
  8265  	}
  8266  	// match: (Select0 (MULTU (MOVWconst [-1]) x))
  8267  	// cond:
  8268  	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  8269  	for {
  8270  		v_0 := v.Args[0]
  8271  		if v_0.Op != OpMIPSMULTU {
  8272  			break
  8273  		}
  8274  		_ = v_0.Args[1]
  8275  		v_0_0 := v_0.Args[0]
  8276  		if v_0_0.Op != OpMIPSMOVWconst {
  8277  			break
  8278  		}
  8279  		if v_0_0.AuxInt != -1 {
  8280  			break
  8281  		}
  8282  		x := v_0.Args[1]
  8283  		v.reset(OpMIPSCMOVZ)
  8284  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  8285  		v0.AuxInt = -1
  8286  		v0.AddArg(x)
  8287  		v.AddArg(v0)
  8288  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8289  		v1.AuxInt = 0
  8290  		v.AddArg(v1)
  8291  		v.AddArg(x)
  8292  		return true
  8293  	}
  8294  	// match: (Select0 (MULTU x (MOVWconst [-1])))
  8295  	// cond:
  8296  	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  8297  	for {
  8298  		v_0 := v.Args[0]
  8299  		if v_0.Op != OpMIPSMULTU {
  8300  			break
  8301  		}
  8302  		_ = v_0.Args[1]
  8303  		x := v_0.Args[0]
  8304  		v_0_1 := v_0.Args[1]
  8305  		if v_0_1.Op != OpMIPSMOVWconst {
  8306  			break
  8307  		}
  8308  		if v_0_1.AuxInt != -1 {
  8309  			break
  8310  		}
  8311  		v.reset(OpMIPSCMOVZ)
  8312  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  8313  		v0.AuxInt = -1
  8314  		v0.AddArg(x)
  8315  		v.AddArg(v0)
  8316  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8317  		v1.AuxInt = 0
  8318  		v.AddArg(v1)
  8319  		v.AddArg(x)
  8320  		return true
  8321  	}
  8322  	// match: (Select0 (MULTU (MOVWconst [c]) x))
  8323  	// cond: isPowerOfTwo(int64(uint32(c)))
  8324  	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  8325  	for {
  8326  		v_0 := v.Args[0]
  8327  		if v_0.Op != OpMIPSMULTU {
  8328  			break
  8329  		}
  8330  		_ = v_0.Args[1]
  8331  		v_0_0 := v_0.Args[0]
  8332  		if v_0_0.Op != OpMIPSMOVWconst {
  8333  			break
  8334  		}
  8335  		c := v_0_0.AuxInt
  8336  		x := v_0.Args[1]
  8337  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8338  			break
  8339  		}
  8340  		v.reset(OpMIPSSRLconst)
  8341  		v.AuxInt = 32 - log2(int64(uint32(c)))
  8342  		v.AddArg(x)
  8343  		return true
  8344  	}
  8345  	// match: (Select0 (MULTU x (MOVWconst [c])))
  8346  	// cond: isPowerOfTwo(int64(uint32(c)))
  8347  	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  8348  	for {
  8349  		v_0 := v.Args[0]
  8350  		if v_0.Op != OpMIPSMULTU {
  8351  			break
  8352  		}
  8353  		_ = v_0.Args[1]
  8354  		x := v_0.Args[0]
  8355  		v_0_1 := v_0.Args[1]
  8356  		if v_0_1.Op != OpMIPSMOVWconst {
  8357  			break
  8358  		}
  8359  		c := v_0_1.AuxInt
  8360  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8361  			break
  8362  		}
  8363  		v.reset(OpMIPSSRLconst)
  8364  		v.AuxInt = 32 - log2(int64(uint32(c)))
  8365  		v.AddArg(x)
  8366  		return true
  8367  	}
  8368  	return false
  8369  }
  8370  func rewriteValueMIPS_OpSelect0_10(v *Value) bool {
  8371  	// match: (Select0 (MULTU (MOVWconst [c]) (MOVWconst [d])))
  8372  	// cond:
  8373  	// result: (MOVWconst [(c*d)>>32])
  8374  	for {
  8375  		v_0 := v.Args[0]
  8376  		if v_0.Op != OpMIPSMULTU {
  8377  			break
  8378  		}
  8379  		_ = v_0.Args[1]
  8380  		v_0_0 := v_0.Args[0]
  8381  		if v_0_0.Op != OpMIPSMOVWconst {
  8382  			break
  8383  		}
  8384  		c := v_0_0.AuxInt
  8385  		v_0_1 := v_0.Args[1]
  8386  		if v_0_1.Op != OpMIPSMOVWconst {
  8387  			break
  8388  		}
  8389  		d := v_0_1.AuxInt
  8390  		v.reset(OpMIPSMOVWconst)
  8391  		v.AuxInt = (c * d) >> 32
  8392  		return true
  8393  	}
  8394  	// match: (Select0 (MULTU (MOVWconst [d]) (MOVWconst [c])))
  8395  	// cond:
  8396  	// result: (MOVWconst [(c*d)>>32])
  8397  	for {
  8398  		v_0 := v.Args[0]
  8399  		if v_0.Op != OpMIPSMULTU {
  8400  			break
  8401  		}
  8402  		_ = v_0.Args[1]
  8403  		v_0_0 := v_0.Args[0]
  8404  		if v_0_0.Op != OpMIPSMOVWconst {
  8405  			break
  8406  		}
  8407  		d := v_0_0.AuxInt
  8408  		v_0_1 := v_0.Args[1]
  8409  		if v_0_1.Op != OpMIPSMOVWconst {
  8410  			break
  8411  		}
  8412  		c := v_0_1.AuxInt
  8413  		v.reset(OpMIPSMOVWconst)
  8414  		v.AuxInt = (c * d) >> 32
  8415  		return true
  8416  	}
  8417  	// match: (Select0 (DIV (MOVWconst [c]) (MOVWconst [d])))
  8418  	// cond:
  8419  	// result: (MOVWconst [int64(int32(c)%int32(d))])
  8420  	for {
  8421  		v_0 := v.Args[0]
  8422  		if v_0.Op != OpMIPSDIV {
  8423  			break
  8424  		}
  8425  		_ = v_0.Args[1]
  8426  		v_0_0 := v_0.Args[0]
  8427  		if v_0_0.Op != OpMIPSMOVWconst {
  8428  			break
  8429  		}
  8430  		c := v_0_0.AuxInt
  8431  		v_0_1 := v_0.Args[1]
  8432  		if v_0_1.Op != OpMIPSMOVWconst {
  8433  			break
  8434  		}
  8435  		d := v_0_1.AuxInt
  8436  		v.reset(OpMIPSMOVWconst)
  8437  		v.AuxInt = int64(int32(c) % int32(d))
  8438  		return true
  8439  	}
  8440  	// match: (Select0 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8441  	// cond:
  8442  	// result: (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
  8443  	for {
  8444  		v_0 := v.Args[0]
  8445  		if v_0.Op != OpMIPSDIVU {
  8446  			break
  8447  		}
  8448  		_ = v_0.Args[1]
  8449  		v_0_0 := v_0.Args[0]
  8450  		if v_0_0.Op != OpMIPSMOVWconst {
  8451  			break
  8452  		}
  8453  		c := v_0_0.AuxInt
  8454  		v_0_1 := v_0.Args[1]
  8455  		if v_0_1.Op != OpMIPSMOVWconst {
  8456  			break
  8457  		}
  8458  		d := v_0_1.AuxInt
  8459  		v.reset(OpMIPSMOVWconst)
  8460  		v.AuxInt = int64(int32(uint32(c) % uint32(d)))
  8461  		return true
  8462  	}
  8463  	return false
  8464  }
  8465  func rewriteValueMIPS_OpSelect1_0(v *Value) bool {
  8466  	b := v.Block
  8467  	_ = b
  8468  	typ := &b.Func.Config.Types
  8469  	_ = typ
  8470  	// match: (Select1 (Add32carry <t> x y))
  8471  	// cond:
  8472  	// result: (SGTU <typ.Bool> x (ADD <t.FieldType(0)> x y))
  8473  	for {
  8474  		v_0 := v.Args[0]
  8475  		if v_0.Op != OpAdd32carry {
  8476  			break
  8477  		}
  8478  		t := v_0.Type
  8479  		_ = v_0.Args[1]
  8480  		x := v_0.Args[0]
  8481  		y := v_0.Args[1]
  8482  		v.reset(OpMIPSSGTU)
  8483  		v.Type = typ.Bool
  8484  		v.AddArg(x)
  8485  		v0 := b.NewValue0(v.Pos, OpMIPSADD, t.FieldType(0))
  8486  		v0.AddArg(x)
  8487  		v0.AddArg(y)
  8488  		v.AddArg(v0)
  8489  		return true
  8490  	}
  8491  	// match: (Select1 (Sub32carry <t> x y))
  8492  	// cond:
  8493  	// result: (SGTU <typ.Bool> (SUB <t.FieldType(0)> x y) x)
  8494  	for {
  8495  		v_0 := v.Args[0]
  8496  		if v_0.Op != OpSub32carry {
  8497  			break
  8498  		}
  8499  		t := v_0.Type
  8500  		_ = v_0.Args[1]
  8501  		x := v_0.Args[0]
  8502  		y := v_0.Args[1]
  8503  		v.reset(OpMIPSSGTU)
  8504  		v.Type = typ.Bool
  8505  		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t.FieldType(0))
  8506  		v0.AddArg(x)
  8507  		v0.AddArg(y)
  8508  		v.AddArg(v0)
  8509  		v.AddArg(x)
  8510  		return true
  8511  	}
  8512  	// match: (Select1 (MULTU (MOVWconst [0]) _))
  8513  	// cond:
  8514  	// result: (MOVWconst [0])
  8515  	for {
  8516  		v_0 := v.Args[0]
  8517  		if v_0.Op != OpMIPSMULTU {
  8518  			break
  8519  		}
  8520  		_ = v_0.Args[1]
  8521  		v_0_0 := v_0.Args[0]
  8522  		if v_0_0.Op != OpMIPSMOVWconst {
  8523  			break
  8524  		}
  8525  		if v_0_0.AuxInt != 0 {
  8526  			break
  8527  		}
  8528  		v.reset(OpMIPSMOVWconst)
  8529  		v.AuxInt = 0
  8530  		return true
  8531  	}
  8532  	// match: (Select1 (MULTU _ (MOVWconst [0])))
  8533  	// cond:
  8534  	// result: (MOVWconst [0])
  8535  	for {
  8536  		v_0 := v.Args[0]
  8537  		if v_0.Op != OpMIPSMULTU {
  8538  			break
  8539  		}
  8540  		_ = v_0.Args[1]
  8541  		v_0_1 := v_0.Args[1]
  8542  		if v_0_1.Op != OpMIPSMOVWconst {
  8543  			break
  8544  		}
  8545  		if v_0_1.AuxInt != 0 {
  8546  			break
  8547  		}
  8548  		v.reset(OpMIPSMOVWconst)
  8549  		v.AuxInt = 0
  8550  		return true
  8551  	}
  8552  	// match: (Select1 (MULTU (MOVWconst [1]) x))
  8553  	// cond:
  8554  	// result: x
  8555  	for {
  8556  		v_0 := v.Args[0]
  8557  		if v_0.Op != OpMIPSMULTU {
  8558  			break
  8559  		}
  8560  		_ = v_0.Args[1]
  8561  		v_0_0 := v_0.Args[0]
  8562  		if v_0_0.Op != OpMIPSMOVWconst {
  8563  			break
  8564  		}
  8565  		if v_0_0.AuxInt != 1 {
  8566  			break
  8567  		}
  8568  		x := v_0.Args[1]
  8569  		v.reset(OpCopy)
  8570  		v.Type = x.Type
  8571  		v.AddArg(x)
  8572  		return true
  8573  	}
  8574  	// match: (Select1 (MULTU x (MOVWconst [1])))
  8575  	// cond:
  8576  	// result: x
  8577  	for {
  8578  		v_0 := v.Args[0]
  8579  		if v_0.Op != OpMIPSMULTU {
  8580  			break
  8581  		}
  8582  		_ = v_0.Args[1]
  8583  		x := v_0.Args[0]
  8584  		v_0_1 := v_0.Args[1]
  8585  		if v_0_1.Op != OpMIPSMOVWconst {
  8586  			break
  8587  		}
  8588  		if v_0_1.AuxInt != 1 {
  8589  			break
  8590  		}
  8591  		v.reset(OpCopy)
  8592  		v.Type = x.Type
  8593  		v.AddArg(x)
  8594  		return true
  8595  	}
  8596  	// match: (Select1 (MULTU (MOVWconst [-1]) x))
  8597  	// cond:
  8598  	// result: (NEG <x.Type> x)
  8599  	for {
  8600  		v_0 := v.Args[0]
  8601  		if v_0.Op != OpMIPSMULTU {
  8602  			break
  8603  		}
  8604  		_ = v_0.Args[1]
  8605  		v_0_0 := v_0.Args[0]
  8606  		if v_0_0.Op != OpMIPSMOVWconst {
  8607  			break
  8608  		}
  8609  		if v_0_0.AuxInt != -1 {
  8610  			break
  8611  		}
  8612  		x := v_0.Args[1]
  8613  		v.reset(OpMIPSNEG)
  8614  		v.Type = x.Type
  8615  		v.AddArg(x)
  8616  		return true
  8617  	}
  8618  	// match: (Select1 (MULTU x (MOVWconst [-1])))
  8619  	// cond:
  8620  	// result: (NEG <x.Type> x)
  8621  	for {
  8622  		v_0 := v.Args[0]
  8623  		if v_0.Op != OpMIPSMULTU {
  8624  			break
  8625  		}
  8626  		_ = v_0.Args[1]
  8627  		x := v_0.Args[0]
  8628  		v_0_1 := v_0.Args[1]
  8629  		if v_0_1.Op != OpMIPSMOVWconst {
  8630  			break
  8631  		}
  8632  		if v_0_1.AuxInt != -1 {
  8633  			break
  8634  		}
  8635  		v.reset(OpMIPSNEG)
  8636  		v.Type = x.Type
  8637  		v.AddArg(x)
  8638  		return true
  8639  	}
  8640  	// match: (Select1 (MULTU (MOVWconst [c]) x))
  8641  	// cond: isPowerOfTwo(int64(uint32(c)))
  8642  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  8643  	for {
  8644  		v_0 := v.Args[0]
  8645  		if v_0.Op != OpMIPSMULTU {
  8646  			break
  8647  		}
  8648  		_ = v_0.Args[1]
  8649  		v_0_0 := v_0.Args[0]
  8650  		if v_0_0.Op != OpMIPSMOVWconst {
  8651  			break
  8652  		}
  8653  		c := v_0_0.AuxInt
  8654  		x := v_0.Args[1]
  8655  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8656  			break
  8657  		}
  8658  		v.reset(OpMIPSSLLconst)
  8659  		v.AuxInt = log2(int64(uint32(c)))
  8660  		v.AddArg(x)
  8661  		return true
  8662  	}
  8663  	// match: (Select1 (MULTU x (MOVWconst [c])))
  8664  	// cond: isPowerOfTwo(int64(uint32(c)))
  8665  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  8666  	for {
  8667  		v_0 := v.Args[0]
  8668  		if v_0.Op != OpMIPSMULTU {
  8669  			break
  8670  		}
  8671  		_ = v_0.Args[1]
  8672  		x := v_0.Args[0]
  8673  		v_0_1 := v_0.Args[1]
  8674  		if v_0_1.Op != OpMIPSMOVWconst {
  8675  			break
  8676  		}
  8677  		c := v_0_1.AuxInt
  8678  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8679  			break
  8680  		}
  8681  		v.reset(OpMIPSSLLconst)
  8682  		v.AuxInt = log2(int64(uint32(c)))
  8683  		v.AddArg(x)
  8684  		return true
  8685  	}
  8686  	return false
  8687  }
  8688  func rewriteValueMIPS_OpSelect1_10(v *Value) bool {
  8689  	// match: (Select1 (MULTU (MOVWconst [c]) (MOVWconst [d])))
  8690  	// cond:
  8691  	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8692  	for {
  8693  		v_0 := v.Args[0]
  8694  		if v_0.Op != OpMIPSMULTU {
  8695  			break
  8696  		}
  8697  		_ = v_0.Args[1]
  8698  		v_0_0 := v_0.Args[0]
  8699  		if v_0_0.Op != OpMIPSMOVWconst {
  8700  			break
  8701  		}
  8702  		c := v_0_0.AuxInt
  8703  		v_0_1 := v_0.Args[1]
  8704  		if v_0_1.Op != OpMIPSMOVWconst {
  8705  			break
  8706  		}
  8707  		d := v_0_1.AuxInt
  8708  		v.reset(OpMIPSMOVWconst)
  8709  		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8710  		return true
  8711  	}
  8712  	// match: (Select1 (MULTU (MOVWconst [d]) (MOVWconst [c])))
  8713  	// cond:
  8714  	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8715  	for {
  8716  		v_0 := v.Args[0]
  8717  		if v_0.Op != OpMIPSMULTU {
  8718  			break
  8719  		}
  8720  		_ = v_0.Args[1]
  8721  		v_0_0 := v_0.Args[0]
  8722  		if v_0_0.Op != OpMIPSMOVWconst {
  8723  			break
  8724  		}
  8725  		d := v_0_0.AuxInt
  8726  		v_0_1 := v_0.Args[1]
  8727  		if v_0_1.Op != OpMIPSMOVWconst {
  8728  			break
  8729  		}
  8730  		c := v_0_1.AuxInt
  8731  		v.reset(OpMIPSMOVWconst)
  8732  		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8733  		return true
  8734  	}
  8735  	// match: (Select1 (DIV (MOVWconst [c]) (MOVWconst [d])))
  8736  	// cond:
  8737  	// result: (MOVWconst [int64(int32(c)/int32(d))])
  8738  	for {
  8739  		v_0 := v.Args[0]
  8740  		if v_0.Op != OpMIPSDIV {
  8741  			break
  8742  		}
  8743  		_ = v_0.Args[1]
  8744  		v_0_0 := v_0.Args[0]
  8745  		if v_0_0.Op != OpMIPSMOVWconst {
  8746  			break
  8747  		}
  8748  		c := v_0_0.AuxInt
  8749  		v_0_1 := v_0.Args[1]
  8750  		if v_0_1.Op != OpMIPSMOVWconst {
  8751  			break
  8752  		}
  8753  		d := v_0_1.AuxInt
  8754  		v.reset(OpMIPSMOVWconst)
  8755  		v.AuxInt = int64(int32(c) / int32(d))
  8756  		return true
  8757  	}
  8758  	// match: (Select1 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8759  	// cond:
  8760  	// result: (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
  8761  	for {
  8762  		v_0 := v.Args[0]
  8763  		if v_0.Op != OpMIPSDIVU {
  8764  			break
  8765  		}
  8766  		_ = v_0.Args[1]
  8767  		v_0_0 := v_0.Args[0]
  8768  		if v_0_0.Op != OpMIPSMOVWconst {
  8769  			break
  8770  		}
  8771  		c := v_0_0.AuxInt
  8772  		v_0_1 := v_0.Args[1]
  8773  		if v_0_1.Op != OpMIPSMOVWconst {
  8774  			break
  8775  		}
  8776  		d := v_0_1.AuxInt
  8777  		v.reset(OpMIPSMOVWconst)
  8778  		v.AuxInt = int64(int32(uint32(c) / uint32(d)))
  8779  		return true
  8780  	}
  8781  	return false
  8782  }
  8783  func rewriteValueMIPS_OpSignExt16to32_0(v *Value) bool {
  8784  	// match: (SignExt16to32 x)
  8785  	// cond:
  8786  	// result: (MOVHreg x)
  8787  	for {
  8788  		x := v.Args[0]
  8789  		v.reset(OpMIPSMOVHreg)
  8790  		v.AddArg(x)
  8791  		return true
  8792  	}
  8793  }
  8794  func rewriteValueMIPS_OpSignExt8to16_0(v *Value) bool {
  8795  	// match: (SignExt8to16 x)
  8796  	// cond:
  8797  	// result: (MOVBreg x)
  8798  	for {
  8799  		x := v.Args[0]
  8800  		v.reset(OpMIPSMOVBreg)
  8801  		v.AddArg(x)
  8802  		return true
  8803  	}
  8804  }
  8805  func rewriteValueMIPS_OpSignExt8to32_0(v *Value) bool {
  8806  	// match: (SignExt8to32 x)
  8807  	// cond:
  8808  	// result: (MOVBreg x)
  8809  	for {
  8810  		x := v.Args[0]
  8811  		v.reset(OpMIPSMOVBreg)
  8812  		v.AddArg(x)
  8813  		return true
  8814  	}
  8815  }
  8816  func rewriteValueMIPS_OpSignmask_0(v *Value) bool {
  8817  	// match: (Signmask x)
  8818  	// cond:
  8819  	// result: (SRAconst x [31])
  8820  	for {
  8821  		x := v.Args[0]
  8822  		v.reset(OpMIPSSRAconst)
  8823  		v.AuxInt = 31
  8824  		v.AddArg(x)
  8825  		return true
  8826  	}
  8827  }
  8828  func rewriteValueMIPS_OpSlicemask_0(v *Value) bool {
  8829  	b := v.Block
  8830  	_ = b
  8831  	// match: (Slicemask <t> x)
  8832  	// cond:
  8833  	// result: (SRAconst (NEG <t> x) [31])
  8834  	for {
  8835  		t := v.Type
  8836  		x := v.Args[0]
  8837  		v.reset(OpMIPSSRAconst)
  8838  		v.AuxInt = 31
  8839  		v0 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  8840  		v0.AddArg(x)
  8841  		v.AddArg(v0)
  8842  		return true
  8843  	}
  8844  }
  8845  func rewriteValueMIPS_OpSqrt_0(v *Value) bool {
  8846  	// match: (Sqrt x)
  8847  	// cond:
  8848  	// result: (SQRTD x)
  8849  	for {
  8850  		x := v.Args[0]
  8851  		v.reset(OpMIPSSQRTD)
  8852  		v.AddArg(x)
  8853  		return true
  8854  	}
  8855  }
  8856  func rewriteValueMIPS_OpStaticCall_0(v *Value) bool {
  8857  	// match: (StaticCall [argwid] {target} mem)
  8858  	// cond:
  8859  	// result: (CALLstatic [argwid] {target} mem)
  8860  	for {
  8861  		argwid := v.AuxInt
  8862  		target := v.Aux
  8863  		mem := v.Args[0]
  8864  		v.reset(OpMIPSCALLstatic)
  8865  		v.AuxInt = argwid
  8866  		v.Aux = target
  8867  		v.AddArg(mem)
  8868  		return true
  8869  	}
  8870  }
  8871  func rewriteValueMIPS_OpStore_0(v *Value) bool {
  8872  	// match: (Store {t} ptr val mem)
  8873  	// cond: t.(*types.Type).Size() == 1
  8874  	// result: (MOVBstore ptr val mem)
  8875  	for {
  8876  		t := v.Aux
  8877  		_ = v.Args[2]
  8878  		ptr := v.Args[0]
  8879  		val := v.Args[1]
  8880  		mem := v.Args[2]
  8881  		if !(t.(*types.Type).Size() == 1) {
  8882  			break
  8883  		}
  8884  		v.reset(OpMIPSMOVBstore)
  8885  		v.AddArg(ptr)
  8886  		v.AddArg(val)
  8887  		v.AddArg(mem)
  8888  		return true
  8889  	}
  8890  	// match: (Store {t} ptr val mem)
  8891  	// cond: t.(*types.Type).Size() == 2
  8892  	// result: (MOVHstore ptr val mem)
  8893  	for {
  8894  		t := v.Aux
  8895  		_ = v.Args[2]
  8896  		ptr := v.Args[0]
  8897  		val := v.Args[1]
  8898  		mem := v.Args[2]
  8899  		if !(t.(*types.Type).Size() == 2) {
  8900  			break
  8901  		}
  8902  		v.reset(OpMIPSMOVHstore)
  8903  		v.AddArg(ptr)
  8904  		v.AddArg(val)
  8905  		v.AddArg(mem)
  8906  		return true
  8907  	}
  8908  	// match: (Store {t} ptr val mem)
  8909  	// cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)
  8910  	// result: (MOVWstore ptr val mem)
  8911  	for {
  8912  		t := v.Aux
  8913  		_ = v.Args[2]
  8914  		ptr := v.Args[0]
  8915  		val := v.Args[1]
  8916  		mem := v.Args[2]
  8917  		if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) {
  8918  			break
  8919  		}
  8920  		v.reset(OpMIPSMOVWstore)
  8921  		v.AddArg(ptr)
  8922  		v.AddArg(val)
  8923  		v.AddArg(mem)
  8924  		return true
  8925  	}
  8926  	// match: (Store {t} ptr val mem)
  8927  	// cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)
  8928  	// result: (MOVFstore ptr val mem)
  8929  	for {
  8930  		t := v.Aux
  8931  		_ = v.Args[2]
  8932  		ptr := v.Args[0]
  8933  		val := v.Args[1]
  8934  		mem := v.Args[2]
  8935  		if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) {
  8936  			break
  8937  		}
  8938  		v.reset(OpMIPSMOVFstore)
  8939  		v.AddArg(ptr)
  8940  		v.AddArg(val)
  8941  		v.AddArg(mem)
  8942  		return true
  8943  	}
  8944  	// match: (Store {t} ptr val mem)
  8945  	// cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)
  8946  	// result: (MOVDstore ptr val mem)
  8947  	for {
  8948  		t := v.Aux
  8949  		_ = v.Args[2]
  8950  		ptr := v.Args[0]
  8951  		val := v.Args[1]
  8952  		mem := v.Args[2]
  8953  		if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) {
  8954  			break
  8955  		}
  8956  		v.reset(OpMIPSMOVDstore)
  8957  		v.AddArg(ptr)
  8958  		v.AddArg(val)
  8959  		v.AddArg(mem)
  8960  		return true
  8961  	}
  8962  	return false
  8963  }
  8964  func rewriteValueMIPS_OpSub16_0(v *Value) bool {
  8965  	// match: (Sub16 x y)
  8966  	// cond:
  8967  	// result: (SUB x y)
  8968  	for {
  8969  		_ = v.Args[1]
  8970  		x := v.Args[0]
  8971  		y := v.Args[1]
  8972  		v.reset(OpMIPSSUB)
  8973  		v.AddArg(x)
  8974  		v.AddArg(y)
  8975  		return true
  8976  	}
  8977  }
  8978  func rewriteValueMIPS_OpSub32_0(v *Value) bool {
  8979  	// match: (Sub32 x y)
  8980  	// cond:
  8981  	// result: (SUB x y)
  8982  	for {
  8983  		_ = v.Args[1]
  8984  		x := v.Args[0]
  8985  		y := v.Args[1]
  8986  		v.reset(OpMIPSSUB)
  8987  		v.AddArg(x)
  8988  		v.AddArg(y)
  8989  		return true
  8990  	}
  8991  }
  8992  func rewriteValueMIPS_OpSub32F_0(v *Value) bool {
  8993  	// match: (Sub32F x y)
  8994  	// cond:
  8995  	// result: (SUBF x y)
  8996  	for {
  8997  		_ = v.Args[1]
  8998  		x := v.Args[0]
  8999  		y := v.Args[1]
  9000  		v.reset(OpMIPSSUBF)
  9001  		v.AddArg(x)
  9002  		v.AddArg(y)
  9003  		return true
  9004  	}
  9005  }
  9006  func rewriteValueMIPS_OpSub32withcarry_0(v *Value) bool {
  9007  	b := v.Block
  9008  	_ = b
  9009  	// match: (Sub32withcarry <t> x y c)
  9010  	// cond:
  9011  	// result: (SUB (SUB <t> x y) c)
  9012  	for {
  9013  		t := v.Type
  9014  		_ = v.Args[2]
  9015  		x := v.Args[0]
  9016  		y := v.Args[1]
  9017  		c := v.Args[2]
  9018  		v.reset(OpMIPSSUB)
  9019  		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t)
  9020  		v0.AddArg(x)
  9021  		v0.AddArg(y)
  9022  		v.AddArg(v0)
  9023  		v.AddArg(c)
  9024  		return true
  9025  	}
  9026  }
  9027  func rewriteValueMIPS_OpSub64F_0(v *Value) bool {
  9028  	// match: (Sub64F x y)
  9029  	// cond:
  9030  	// result: (SUBD x y)
  9031  	for {
  9032  		_ = v.Args[1]
  9033  		x := v.Args[0]
  9034  		y := v.Args[1]
  9035  		v.reset(OpMIPSSUBD)
  9036  		v.AddArg(x)
  9037  		v.AddArg(y)
  9038  		return true
  9039  	}
  9040  }
  9041  func rewriteValueMIPS_OpSub8_0(v *Value) bool {
  9042  	// match: (Sub8 x y)
  9043  	// cond:
  9044  	// result: (SUB x y)
  9045  	for {
  9046  		_ = v.Args[1]
  9047  		x := v.Args[0]
  9048  		y := v.Args[1]
  9049  		v.reset(OpMIPSSUB)
  9050  		v.AddArg(x)
  9051  		v.AddArg(y)
  9052  		return true
  9053  	}
  9054  }
  9055  func rewriteValueMIPS_OpSubPtr_0(v *Value) bool {
  9056  	// match: (SubPtr x y)
  9057  	// cond:
  9058  	// result: (SUB x y)
  9059  	for {
  9060  		_ = v.Args[1]
  9061  		x := v.Args[0]
  9062  		y := v.Args[1]
  9063  		v.reset(OpMIPSSUB)
  9064  		v.AddArg(x)
  9065  		v.AddArg(y)
  9066  		return true
  9067  	}
  9068  }
  9069  func rewriteValueMIPS_OpTrunc16to8_0(v *Value) bool {
  9070  	// match: (Trunc16to8 x)
  9071  	// cond:
  9072  	// result: x
  9073  	for {
  9074  		x := v.Args[0]
  9075  		v.reset(OpCopy)
  9076  		v.Type = x.Type
  9077  		v.AddArg(x)
  9078  		return true
  9079  	}
  9080  }
  9081  func rewriteValueMIPS_OpTrunc32to16_0(v *Value) bool {
  9082  	// match: (Trunc32to16 x)
  9083  	// cond:
  9084  	// result: x
  9085  	for {
  9086  		x := v.Args[0]
  9087  		v.reset(OpCopy)
  9088  		v.Type = x.Type
  9089  		v.AddArg(x)
  9090  		return true
  9091  	}
  9092  }
  9093  func rewriteValueMIPS_OpTrunc32to8_0(v *Value) bool {
  9094  	// match: (Trunc32to8 x)
  9095  	// cond:
  9096  	// result: x
  9097  	for {
  9098  		x := v.Args[0]
  9099  		v.reset(OpCopy)
  9100  		v.Type = x.Type
  9101  		v.AddArg(x)
  9102  		return true
  9103  	}
  9104  }
  9105  func rewriteValueMIPS_OpXor16_0(v *Value) bool {
  9106  	// match: (Xor16 x y)
  9107  	// cond:
  9108  	// result: (XOR x y)
  9109  	for {
  9110  		_ = v.Args[1]
  9111  		x := v.Args[0]
  9112  		y := v.Args[1]
  9113  		v.reset(OpMIPSXOR)
  9114  		v.AddArg(x)
  9115  		v.AddArg(y)
  9116  		return true
  9117  	}
  9118  }
  9119  func rewriteValueMIPS_OpXor32_0(v *Value) bool {
  9120  	// match: (Xor32 x y)
  9121  	// cond:
  9122  	// result: (XOR x y)
  9123  	for {
  9124  		_ = v.Args[1]
  9125  		x := v.Args[0]
  9126  		y := v.Args[1]
  9127  		v.reset(OpMIPSXOR)
  9128  		v.AddArg(x)
  9129  		v.AddArg(y)
  9130  		return true
  9131  	}
  9132  }
  9133  func rewriteValueMIPS_OpXor8_0(v *Value) bool {
  9134  	// match: (Xor8 x y)
  9135  	// cond:
  9136  	// result: (XOR x y)
  9137  	for {
  9138  		_ = v.Args[1]
  9139  		x := v.Args[0]
  9140  		y := v.Args[1]
  9141  		v.reset(OpMIPSXOR)
  9142  		v.AddArg(x)
  9143  		v.AddArg(y)
  9144  		return true
  9145  	}
  9146  }
  9147  func rewriteValueMIPS_OpZero_0(v *Value) bool {
  9148  	b := v.Block
  9149  	_ = b
  9150  	typ := &b.Func.Config.Types
  9151  	_ = typ
  9152  	// match: (Zero [0] _ mem)
  9153  	// cond:
  9154  	// result: mem
  9155  	for {
  9156  		if v.AuxInt != 0 {
  9157  			break
  9158  		}
  9159  		_ = v.Args[1]
  9160  		mem := v.Args[1]
  9161  		v.reset(OpCopy)
  9162  		v.Type = mem.Type
  9163  		v.AddArg(mem)
  9164  		return true
  9165  	}
  9166  	// match: (Zero [1] ptr mem)
  9167  	// cond:
  9168  	// result: (MOVBstore ptr (MOVWconst [0]) mem)
  9169  	for {
  9170  		if v.AuxInt != 1 {
  9171  			break
  9172  		}
  9173  		_ = v.Args[1]
  9174  		ptr := v.Args[0]
  9175  		mem := v.Args[1]
  9176  		v.reset(OpMIPSMOVBstore)
  9177  		v.AddArg(ptr)
  9178  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9179  		v0.AuxInt = 0
  9180  		v.AddArg(v0)
  9181  		v.AddArg(mem)
  9182  		return true
  9183  	}
  9184  	// match: (Zero [2] {t} ptr mem)
  9185  	// cond: t.(*types.Type).Alignment()%2 == 0
  9186  	// result: (MOVHstore ptr (MOVWconst [0]) mem)
  9187  	for {
  9188  		if v.AuxInt != 2 {
  9189  			break
  9190  		}
  9191  		t := v.Aux
  9192  		_ = v.Args[1]
  9193  		ptr := v.Args[0]
  9194  		mem := v.Args[1]
  9195  		if !(t.(*types.Type).Alignment()%2 == 0) {
  9196  			break
  9197  		}
  9198  		v.reset(OpMIPSMOVHstore)
  9199  		v.AddArg(ptr)
  9200  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9201  		v0.AuxInt = 0
  9202  		v.AddArg(v0)
  9203  		v.AddArg(mem)
  9204  		return true
  9205  	}
  9206  	// match: (Zero [2] ptr mem)
  9207  	// cond:
  9208  	// result: (MOVBstore [1] ptr (MOVWconst [0]) 		(MOVBstore [0] ptr (MOVWconst [0]) mem))
  9209  	for {
  9210  		if v.AuxInt != 2 {
  9211  			break
  9212  		}
  9213  		_ = v.Args[1]
  9214  		ptr := v.Args[0]
  9215  		mem := v.Args[1]
  9216  		v.reset(OpMIPSMOVBstore)
  9217  		v.AuxInt = 1
  9218  		v.AddArg(ptr)
  9219  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9220  		v0.AuxInt = 0
  9221  		v.AddArg(v0)
  9222  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9223  		v1.AuxInt = 0
  9224  		v1.AddArg(ptr)
  9225  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9226  		v2.AuxInt = 0
  9227  		v1.AddArg(v2)
  9228  		v1.AddArg(mem)
  9229  		v.AddArg(v1)
  9230  		return true
  9231  	}
  9232  	// match: (Zero [4] {t} ptr mem)
  9233  	// cond: t.(*types.Type).Alignment()%4 == 0
  9234  	// result: (MOVWstore ptr (MOVWconst [0]) mem)
  9235  	for {
  9236  		if v.AuxInt != 4 {
  9237  			break
  9238  		}
  9239  		t := v.Aux
  9240  		_ = v.Args[1]
  9241  		ptr := v.Args[0]
  9242  		mem := v.Args[1]
  9243  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9244  			break
  9245  		}
  9246  		v.reset(OpMIPSMOVWstore)
  9247  		v.AddArg(ptr)
  9248  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9249  		v0.AuxInt = 0
  9250  		v.AddArg(v0)
  9251  		v.AddArg(mem)
  9252  		return true
  9253  	}
  9254  	// match: (Zero [4] {t} ptr mem)
  9255  	// cond: t.(*types.Type).Alignment()%2 == 0
  9256  	// result: (MOVHstore [2] ptr (MOVWconst [0]) 		(MOVHstore [0] ptr (MOVWconst [0]) mem))
  9257  	for {
  9258  		if v.AuxInt != 4 {
  9259  			break
  9260  		}
  9261  		t := v.Aux
  9262  		_ = v.Args[1]
  9263  		ptr := v.Args[0]
  9264  		mem := v.Args[1]
  9265  		if !(t.(*types.Type).Alignment()%2 == 0) {
  9266  			break
  9267  		}
  9268  		v.reset(OpMIPSMOVHstore)
  9269  		v.AuxInt = 2
  9270  		v.AddArg(ptr)
  9271  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9272  		v0.AuxInt = 0
  9273  		v.AddArg(v0)
  9274  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9275  		v1.AuxInt = 0
  9276  		v1.AddArg(ptr)
  9277  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9278  		v2.AuxInt = 0
  9279  		v1.AddArg(v2)
  9280  		v1.AddArg(mem)
  9281  		v.AddArg(v1)
  9282  		return true
  9283  	}
  9284  	// match: (Zero [4] ptr mem)
  9285  	// cond:
  9286  	// result: (MOVBstore [3] ptr (MOVWconst [0]) 		(MOVBstore [2] ptr (MOVWconst [0]) 			(MOVBstore [1] ptr (MOVWconst [0]) 				(MOVBstore [0] ptr (MOVWconst [0]) mem))))
  9287  	for {
  9288  		if v.AuxInt != 4 {
  9289  			break
  9290  		}
  9291  		_ = v.Args[1]
  9292  		ptr := v.Args[0]
  9293  		mem := v.Args[1]
  9294  		v.reset(OpMIPSMOVBstore)
  9295  		v.AuxInt = 3
  9296  		v.AddArg(ptr)
  9297  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9298  		v0.AuxInt = 0
  9299  		v.AddArg(v0)
  9300  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9301  		v1.AuxInt = 2
  9302  		v1.AddArg(ptr)
  9303  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9304  		v2.AuxInt = 0
  9305  		v1.AddArg(v2)
  9306  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9307  		v3.AuxInt = 1
  9308  		v3.AddArg(ptr)
  9309  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9310  		v4.AuxInt = 0
  9311  		v3.AddArg(v4)
  9312  		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9313  		v5.AuxInt = 0
  9314  		v5.AddArg(ptr)
  9315  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9316  		v6.AuxInt = 0
  9317  		v5.AddArg(v6)
  9318  		v5.AddArg(mem)
  9319  		v3.AddArg(v5)
  9320  		v1.AddArg(v3)
  9321  		v.AddArg(v1)
  9322  		return true
  9323  	}
  9324  	// match: (Zero [3] ptr mem)
  9325  	// cond:
  9326  	// result: (MOVBstore [2] ptr (MOVWconst [0]) 		(MOVBstore [1] ptr (MOVWconst [0]) 			(MOVBstore [0] ptr (MOVWconst [0]) mem)))
  9327  	for {
  9328  		if v.AuxInt != 3 {
  9329  			break
  9330  		}
  9331  		_ = v.Args[1]
  9332  		ptr := v.Args[0]
  9333  		mem := v.Args[1]
  9334  		v.reset(OpMIPSMOVBstore)
  9335  		v.AuxInt = 2
  9336  		v.AddArg(ptr)
  9337  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9338  		v0.AuxInt = 0
  9339  		v.AddArg(v0)
  9340  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9341  		v1.AuxInt = 1
  9342  		v1.AddArg(ptr)
  9343  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9344  		v2.AuxInt = 0
  9345  		v1.AddArg(v2)
  9346  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9347  		v3.AuxInt = 0
  9348  		v3.AddArg(ptr)
  9349  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9350  		v4.AuxInt = 0
  9351  		v3.AddArg(v4)
  9352  		v3.AddArg(mem)
  9353  		v1.AddArg(v3)
  9354  		v.AddArg(v1)
  9355  		return true
  9356  	}
  9357  	// match: (Zero [6] {t} ptr mem)
  9358  	// cond: t.(*types.Type).Alignment()%2 == 0
  9359  	// result: (MOVHstore [4] ptr (MOVWconst [0]) 		(MOVHstore [2] ptr (MOVWconst [0]) 			(MOVHstore [0] ptr (MOVWconst [0]) mem)))
  9360  	for {
  9361  		if v.AuxInt != 6 {
  9362  			break
  9363  		}
  9364  		t := v.Aux
  9365  		_ = v.Args[1]
  9366  		ptr := v.Args[0]
  9367  		mem := v.Args[1]
  9368  		if !(t.(*types.Type).Alignment()%2 == 0) {
  9369  			break
  9370  		}
  9371  		v.reset(OpMIPSMOVHstore)
  9372  		v.AuxInt = 4
  9373  		v.AddArg(ptr)
  9374  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9375  		v0.AuxInt = 0
  9376  		v.AddArg(v0)
  9377  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9378  		v1.AuxInt = 2
  9379  		v1.AddArg(ptr)
  9380  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9381  		v2.AuxInt = 0
  9382  		v1.AddArg(v2)
  9383  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9384  		v3.AuxInt = 0
  9385  		v3.AddArg(ptr)
  9386  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9387  		v4.AuxInt = 0
  9388  		v3.AddArg(v4)
  9389  		v3.AddArg(mem)
  9390  		v1.AddArg(v3)
  9391  		v.AddArg(v1)
  9392  		return true
  9393  	}
  9394  	// match: (Zero [8] {t} ptr mem)
  9395  	// cond: t.(*types.Type).Alignment()%4 == 0
  9396  	// result: (MOVWstore [4] ptr (MOVWconst [0]) 			(MOVWstore [0] ptr (MOVWconst [0]) mem))
  9397  	for {
  9398  		if v.AuxInt != 8 {
  9399  			break
  9400  		}
  9401  		t := v.Aux
  9402  		_ = v.Args[1]
  9403  		ptr := v.Args[0]
  9404  		mem := v.Args[1]
  9405  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9406  			break
  9407  		}
  9408  		v.reset(OpMIPSMOVWstore)
  9409  		v.AuxInt = 4
  9410  		v.AddArg(ptr)
  9411  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9412  		v0.AuxInt = 0
  9413  		v.AddArg(v0)
  9414  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9415  		v1.AuxInt = 0
  9416  		v1.AddArg(ptr)
  9417  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9418  		v2.AuxInt = 0
  9419  		v1.AddArg(v2)
  9420  		v1.AddArg(mem)
  9421  		v.AddArg(v1)
  9422  		return true
  9423  	}
  9424  	return false
  9425  }
  9426  func rewriteValueMIPS_OpZero_10(v *Value) bool {
  9427  	b := v.Block
  9428  	_ = b
  9429  	config := b.Func.Config
  9430  	_ = config
  9431  	typ := &b.Func.Config.Types
  9432  	_ = typ
  9433  	// match: (Zero [12] {t} ptr mem)
  9434  	// cond: t.(*types.Type).Alignment()%4 == 0
  9435  	// result: (MOVWstore [8] ptr (MOVWconst [0]) 		(MOVWstore [4] ptr (MOVWconst [0]) 			(MOVWstore [0] ptr (MOVWconst [0]) mem)))
  9436  	for {
  9437  		if v.AuxInt != 12 {
  9438  			break
  9439  		}
  9440  		t := v.Aux
  9441  		_ = v.Args[1]
  9442  		ptr := v.Args[0]
  9443  		mem := v.Args[1]
  9444  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9445  			break
  9446  		}
  9447  		v.reset(OpMIPSMOVWstore)
  9448  		v.AuxInt = 8
  9449  		v.AddArg(ptr)
  9450  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9451  		v0.AuxInt = 0
  9452  		v.AddArg(v0)
  9453  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9454  		v1.AuxInt = 4
  9455  		v1.AddArg(ptr)
  9456  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9457  		v2.AuxInt = 0
  9458  		v1.AddArg(v2)
  9459  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9460  		v3.AuxInt = 0
  9461  		v3.AddArg(ptr)
  9462  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9463  		v4.AuxInt = 0
  9464  		v3.AddArg(v4)
  9465  		v3.AddArg(mem)
  9466  		v1.AddArg(v3)
  9467  		v.AddArg(v1)
  9468  		return true
  9469  	}
  9470  	// match: (Zero [16] {t} ptr mem)
  9471  	// cond: t.(*types.Type).Alignment()%4 == 0
  9472  	// result: (MOVWstore [12] ptr (MOVWconst [0]) 		(MOVWstore [8] ptr (MOVWconst [0]) 			(MOVWstore [4] ptr (MOVWconst [0]) 				(MOVWstore [0] ptr (MOVWconst [0]) mem))))
  9473  	for {
  9474  		if v.AuxInt != 16 {
  9475  			break
  9476  		}
  9477  		t := v.Aux
  9478  		_ = v.Args[1]
  9479  		ptr := v.Args[0]
  9480  		mem := v.Args[1]
  9481  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9482  			break
  9483  		}
  9484  		v.reset(OpMIPSMOVWstore)
  9485  		v.AuxInt = 12
  9486  		v.AddArg(ptr)
  9487  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9488  		v0.AuxInt = 0
  9489  		v.AddArg(v0)
  9490  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9491  		v1.AuxInt = 8
  9492  		v1.AddArg(ptr)
  9493  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9494  		v2.AuxInt = 0
  9495  		v1.AddArg(v2)
  9496  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9497  		v3.AuxInt = 4
  9498  		v3.AddArg(ptr)
  9499  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9500  		v4.AuxInt = 0
  9501  		v3.AddArg(v4)
  9502  		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9503  		v5.AuxInt = 0
  9504  		v5.AddArg(ptr)
  9505  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9506  		v6.AuxInt = 0
  9507  		v5.AddArg(v6)
  9508  		v5.AddArg(mem)
  9509  		v3.AddArg(v5)
  9510  		v1.AddArg(v3)
  9511  		v.AddArg(v1)
  9512  		return true
  9513  	}
  9514  	// match: (Zero [s] {t} ptr mem)
  9515  	// cond: (s > 16  || t.(*types.Type).Alignment()%4 != 0)
  9516  	// result: (LoweredZero [t.(*types.Type).Alignment()] 		ptr 		(ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) 		mem)
  9517  	for {
  9518  		s := v.AuxInt
  9519  		t := v.Aux
  9520  		_ = v.Args[1]
  9521  		ptr := v.Args[0]
  9522  		mem := v.Args[1]
  9523  		if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) {
  9524  			break
  9525  		}
  9526  		v.reset(OpMIPSLoweredZero)
  9527  		v.AuxInt = t.(*types.Type).Alignment()
  9528  		v.AddArg(ptr)
  9529  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type)
  9530  		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  9531  		v0.AddArg(ptr)
  9532  		v.AddArg(v0)
  9533  		v.AddArg(mem)
  9534  		return true
  9535  	}
  9536  	return false
  9537  }
  9538  func rewriteValueMIPS_OpZeroExt16to32_0(v *Value) bool {
  9539  	// match: (ZeroExt16to32 x)
  9540  	// cond:
  9541  	// result: (MOVHUreg x)
  9542  	for {
  9543  		x := v.Args[0]
  9544  		v.reset(OpMIPSMOVHUreg)
  9545  		v.AddArg(x)
  9546  		return true
  9547  	}
  9548  }
  9549  func rewriteValueMIPS_OpZeroExt8to16_0(v *Value) bool {
  9550  	// match: (ZeroExt8to16 x)
  9551  	// cond:
  9552  	// result: (MOVBUreg x)
  9553  	for {
  9554  		x := v.Args[0]
  9555  		v.reset(OpMIPSMOVBUreg)
  9556  		v.AddArg(x)
  9557  		return true
  9558  	}
  9559  }
  9560  func rewriteValueMIPS_OpZeroExt8to32_0(v *Value) bool {
  9561  	// match: (ZeroExt8to32 x)
  9562  	// cond:
  9563  	// result: (MOVBUreg x)
  9564  	for {
  9565  		x := v.Args[0]
  9566  		v.reset(OpMIPSMOVBUreg)
  9567  		v.AddArg(x)
  9568  		return true
  9569  	}
  9570  }
  9571  func rewriteValueMIPS_OpZeromask_0(v *Value) bool {
  9572  	b := v.Block
  9573  	_ = b
  9574  	typ := &b.Func.Config.Types
  9575  	_ = typ
  9576  	// match: (Zeromask x)
  9577  	// cond:
  9578  	// result: (NEG (SGTU x (MOVWconst [0])))
  9579  	for {
  9580  		x := v.Args[0]
  9581  		v.reset(OpMIPSNEG)
  9582  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  9583  		v0.AddArg(x)
  9584  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9585  		v1.AuxInt = 0
  9586  		v0.AddArg(v1)
  9587  		v.AddArg(v0)
  9588  		return true
  9589  	}
  9590  }
  9591  func rewriteBlockMIPS(b *Block) bool {
  9592  	config := b.Func.Config
  9593  	_ = config
  9594  	fe := b.Func.fe
  9595  	_ = fe
  9596  	typ := &config.Types
  9597  	_ = typ
  9598  	switch b.Kind {
  9599  	case BlockMIPSEQ:
  9600  		// match: (EQ (FPFlagTrue cmp) yes no)
  9601  		// cond:
  9602  		// result: (FPF cmp yes no)
  9603  		for {
  9604  			v := b.Control
  9605  			if v.Op != OpMIPSFPFlagTrue {
  9606  				break
  9607  			}
  9608  			cmp := v.Args[0]
  9609  			b.Kind = BlockMIPSFPF
  9610  			b.SetControl(cmp)
  9611  			return true
  9612  		}
  9613  		// match: (EQ (FPFlagFalse cmp) yes no)
  9614  		// cond:
  9615  		// result: (FPT cmp yes no)
  9616  		for {
  9617  			v := b.Control
  9618  			if v.Op != OpMIPSFPFlagFalse {
  9619  				break
  9620  			}
  9621  			cmp := v.Args[0]
  9622  			b.Kind = BlockMIPSFPT
  9623  			b.SetControl(cmp)
  9624  			return true
  9625  		}
  9626  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  9627  		// cond:
  9628  		// result: (NE cmp yes no)
  9629  		for {
  9630  			v := b.Control
  9631  			if v.Op != OpMIPSXORconst {
  9632  				break
  9633  			}
  9634  			if v.AuxInt != 1 {
  9635  				break
  9636  			}
  9637  			cmp := v.Args[0]
  9638  			if cmp.Op != OpMIPSSGT {
  9639  				break
  9640  			}
  9641  			_ = cmp.Args[1]
  9642  			b.Kind = BlockMIPSNE
  9643  			b.SetControl(cmp)
  9644  			return true
  9645  		}
  9646  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  9647  		// cond:
  9648  		// result: (NE cmp yes no)
  9649  		for {
  9650  			v := b.Control
  9651  			if v.Op != OpMIPSXORconst {
  9652  				break
  9653  			}
  9654  			if v.AuxInt != 1 {
  9655  				break
  9656  			}
  9657  			cmp := v.Args[0]
  9658  			if cmp.Op != OpMIPSSGTU {
  9659  				break
  9660  			}
  9661  			_ = cmp.Args[1]
  9662  			b.Kind = BlockMIPSNE
  9663  			b.SetControl(cmp)
  9664  			return true
  9665  		}
  9666  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  9667  		// cond:
  9668  		// result: (NE cmp yes no)
  9669  		for {
  9670  			v := b.Control
  9671  			if v.Op != OpMIPSXORconst {
  9672  				break
  9673  			}
  9674  			if v.AuxInt != 1 {
  9675  				break
  9676  			}
  9677  			cmp := v.Args[0]
  9678  			if cmp.Op != OpMIPSSGTconst {
  9679  				break
  9680  			}
  9681  			b.Kind = BlockMIPSNE
  9682  			b.SetControl(cmp)
  9683  			return true
  9684  		}
  9685  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  9686  		// cond:
  9687  		// result: (NE cmp yes no)
  9688  		for {
  9689  			v := b.Control
  9690  			if v.Op != OpMIPSXORconst {
  9691  				break
  9692  			}
  9693  			if v.AuxInt != 1 {
  9694  				break
  9695  			}
  9696  			cmp := v.Args[0]
  9697  			if cmp.Op != OpMIPSSGTUconst {
  9698  				break
  9699  			}
  9700  			b.Kind = BlockMIPSNE
  9701  			b.SetControl(cmp)
  9702  			return true
  9703  		}
  9704  		// match: (EQ (XORconst [1] cmp:(SGTzero _)) yes no)
  9705  		// cond:
  9706  		// result: (NE cmp yes no)
  9707  		for {
  9708  			v := b.Control
  9709  			if v.Op != OpMIPSXORconst {
  9710  				break
  9711  			}
  9712  			if v.AuxInt != 1 {
  9713  				break
  9714  			}
  9715  			cmp := v.Args[0]
  9716  			if cmp.Op != OpMIPSSGTzero {
  9717  				break
  9718  			}
  9719  			b.Kind = BlockMIPSNE
  9720  			b.SetControl(cmp)
  9721  			return true
  9722  		}
  9723  		// match: (EQ (XORconst [1] cmp:(SGTUzero _)) yes no)
  9724  		// cond:
  9725  		// result: (NE cmp yes no)
  9726  		for {
  9727  			v := b.Control
  9728  			if v.Op != OpMIPSXORconst {
  9729  				break
  9730  			}
  9731  			if v.AuxInt != 1 {
  9732  				break
  9733  			}
  9734  			cmp := v.Args[0]
  9735  			if cmp.Op != OpMIPSSGTUzero {
  9736  				break
  9737  			}
  9738  			b.Kind = BlockMIPSNE
  9739  			b.SetControl(cmp)
  9740  			return true
  9741  		}
  9742  		// match: (EQ (SGTUconst [1] x) yes no)
  9743  		// cond:
  9744  		// result: (NE x yes no)
  9745  		for {
  9746  			v := b.Control
  9747  			if v.Op != OpMIPSSGTUconst {
  9748  				break
  9749  			}
  9750  			if v.AuxInt != 1 {
  9751  				break
  9752  			}
  9753  			x := v.Args[0]
  9754  			b.Kind = BlockMIPSNE
  9755  			b.SetControl(x)
  9756  			return true
  9757  		}
  9758  		// match: (EQ (SGTUzero x) yes no)
  9759  		// cond:
  9760  		// result: (EQ x yes no)
  9761  		for {
  9762  			v := b.Control
  9763  			if v.Op != OpMIPSSGTUzero {
  9764  				break
  9765  			}
  9766  			x := v.Args[0]
  9767  			b.Kind = BlockMIPSEQ
  9768  			b.SetControl(x)
  9769  			return true
  9770  		}
  9771  		// match: (EQ (SGTconst [0] x) yes no)
  9772  		// cond:
  9773  		// result: (GEZ x yes no)
  9774  		for {
  9775  			v := b.Control
  9776  			if v.Op != OpMIPSSGTconst {
  9777  				break
  9778  			}
  9779  			if v.AuxInt != 0 {
  9780  				break
  9781  			}
  9782  			x := v.Args[0]
  9783  			b.Kind = BlockMIPSGEZ
  9784  			b.SetControl(x)
  9785  			return true
  9786  		}
  9787  		// match: (EQ (SGTzero x) yes no)
  9788  		// cond:
  9789  		// result: (LEZ x yes no)
  9790  		for {
  9791  			v := b.Control
  9792  			if v.Op != OpMIPSSGTzero {
  9793  				break
  9794  			}
  9795  			x := v.Args[0]
  9796  			b.Kind = BlockMIPSLEZ
  9797  			b.SetControl(x)
  9798  			return true
  9799  		}
  9800  		// match: (EQ (MOVWconst [0]) yes no)
  9801  		// cond:
  9802  		// result: (First nil yes no)
  9803  		for {
  9804  			v := b.Control
  9805  			if v.Op != OpMIPSMOVWconst {
  9806  				break
  9807  			}
  9808  			if v.AuxInt != 0 {
  9809  				break
  9810  			}
  9811  			b.Kind = BlockFirst
  9812  			b.SetControl(nil)
  9813  			return true
  9814  		}
  9815  		// match: (EQ (MOVWconst [c]) yes no)
  9816  		// cond: c != 0
  9817  		// result: (First nil no yes)
  9818  		for {
  9819  			v := b.Control
  9820  			if v.Op != OpMIPSMOVWconst {
  9821  				break
  9822  			}
  9823  			c := v.AuxInt
  9824  			if !(c != 0) {
  9825  				break
  9826  			}
  9827  			b.Kind = BlockFirst
  9828  			b.SetControl(nil)
  9829  			b.swapSuccessors()
  9830  			return true
  9831  		}
  9832  	case BlockMIPSGEZ:
  9833  		// match: (GEZ (MOVWconst [c]) yes no)
  9834  		// cond: int32(c) >= 0
  9835  		// result: (First nil yes no)
  9836  		for {
  9837  			v := b.Control
  9838  			if v.Op != OpMIPSMOVWconst {
  9839  				break
  9840  			}
  9841  			c := v.AuxInt
  9842  			if !(int32(c) >= 0) {
  9843  				break
  9844  			}
  9845  			b.Kind = BlockFirst
  9846  			b.SetControl(nil)
  9847  			return true
  9848  		}
  9849  		// match: (GEZ (MOVWconst [c]) yes no)
  9850  		// cond: int32(c) <  0
  9851  		// result: (First nil no yes)
  9852  		for {
  9853  			v := b.Control
  9854  			if v.Op != OpMIPSMOVWconst {
  9855  				break
  9856  			}
  9857  			c := v.AuxInt
  9858  			if !(int32(c) < 0) {
  9859  				break
  9860  			}
  9861  			b.Kind = BlockFirst
  9862  			b.SetControl(nil)
  9863  			b.swapSuccessors()
  9864  			return true
  9865  		}
  9866  	case BlockMIPSGTZ:
  9867  		// match: (GTZ (MOVWconst [c]) yes no)
  9868  		// cond: int32(c) >  0
  9869  		// result: (First nil yes no)
  9870  		for {
  9871  			v := b.Control
  9872  			if v.Op != OpMIPSMOVWconst {
  9873  				break
  9874  			}
  9875  			c := v.AuxInt
  9876  			if !(int32(c) > 0) {
  9877  				break
  9878  			}
  9879  			b.Kind = BlockFirst
  9880  			b.SetControl(nil)
  9881  			return true
  9882  		}
  9883  		// match: (GTZ (MOVWconst [c]) yes no)
  9884  		// cond: int32(c) <= 0
  9885  		// result: (First nil no yes)
  9886  		for {
  9887  			v := b.Control
  9888  			if v.Op != OpMIPSMOVWconst {
  9889  				break
  9890  			}
  9891  			c := v.AuxInt
  9892  			if !(int32(c) <= 0) {
  9893  				break
  9894  			}
  9895  			b.Kind = BlockFirst
  9896  			b.SetControl(nil)
  9897  			b.swapSuccessors()
  9898  			return true
  9899  		}
  9900  	case BlockIf:
  9901  		// match: (If cond yes no)
  9902  		// cond:
  9903  		// result: (NE cond yes no)
  9904  		for {
  9905  			v := b.Control
  9906  			_ = v
  9907  			cond := b.Control
  9908  			b.Kind = BlockMIPSNE
  9909  			b.SetControl(cond)
  9910  			return true
  9911  		}
  9912  	case BlockMIPSLEZ:
  9913  		// match: (LEZ (MOVWconst [c]) yes no)
  9914  		// cond: int32(c) <= 0
  9915  		// result: (First nil yes no)
  9916  		for {
  9917  			v := b.Control
  9918  			if v.Op != OpMIPSMOVWconst {
  9919  				break
  9920  			}
  9921  			c := v.AuxInt
  9922  			if !(int32(c) <= 0) {
  9923  				break
  9924  			}
  9925  			b.Kind = BlockFirst
  9926  			b.SetControl(nil)
  9927  			return true
  9928  		}
  9929  		// match: (LEZ (MOVWconst [c]) yes no)
  9930  		// cond: int32(c) >  0
  9931  		// result: (First nil no yes)
  9932  		for {
  9933  			v := b.Control
  9934  			if v.Op != OpMIPSMOVWconst {
  9935  				break
  9936  			}
  9937  			c := v.AuxInt
  9938  			if !(int32(c) > 0) {
  9939  				break
  9940  			}
  9941  			b.Kind = BlockFirst
  9942  			b.SetControl(nil)
  9943  			b.swapSuccessors()
  9944  			return true
  9945  		}
  9946  	case BlockMIPSLTZ:
  9947  		// match: (LTZ (MOVWconst [c]) yes no)
  9948  		// cond: int32(c) <  0
  9949  		// result: (First nil yes no)
  9950  		for {
  9951  			v := b.Control
  9952  			if v.Op != OpMIPSMOVWconst {
  9953  				break
  9954  			}
  9955  			c := v.AuxInt
  9956  			if !(int32(c) < 0) {
  9957  				break
  9958  			}
  9959  			b.Kind = BlockFirst
  9960  			b.SetControl(nil)
  9961  			return true
  9962  		}
  9963  		// match: (LTZ (MOVWconst [c]) yes no)
  9964  		// cond: int32(c) >= 0
  9965  		// result: (First nil no yes)
  9966  		for {
  9967  			v := b.Control
  9968  			if v.Op != OpMIPSMOVWconst {
  9969  				break
  9970  			}
  9971  			c := v.AuxInt
  9972  			if !(int32(c) >= 0) {
  9973  				break
  9974  			}
  9975  			b.Kind = BlockFirst
  9976  			b.SetControl(nil)
  9977  			b.swapSuccessors()
  9978  			return true
  9979  		}
  9980  	case BlockMIPSNE:
  9981  		// match: (NE (FPFlagTrue cmp) yes no)
  9982  		// cond:
  9983  		// result: (FPT cmp yes no)
  9984  		for {
  9985  			v := b.Control
  9986  			if v.Op != OpMIPSFPFlagTrue {
  9987  				break
  9988  			}
  9989  			cmp := v.Args[0]
  9990  			b.Kind = BlockMIPSFPT
  9991  			b.SetControl(cmp)
  9992  			return true
  9993  		}
  9994  		// match: (NE (FPFlagFalse cmp) yes no)
  9995  		// cond:
  9996  		// result: (FPF cmp yes no)
  9997  		for {
  9998  			v := b.Control
  9999  			if v.Op != OpMIPSFPFlagFalse {
 10000  				break
 10001  			}
 10002  			cmp := v.Args[0]
 10003  			b.Kind = BlockMIPSFPF
 10004  			b.SetControl(cmp)
 10005  			return true
 10006  		}
 10007  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 10008  		// cond:
 10009  		// result: (EQ cmp yes no)
 10010  		for {
 10011  			v := b.Control
 10012  			if v.Op != OpMIPSXORconst {
 10013  				break
 10014  			}
 10015  			if v.AuxInt != 1 {
 10016  				break
 10017  			}
 10018  			cmp := v.Args[0]
 10019  			if cmp.Op != OpMIPSSGT {
 10020  				break
 10021  			}
 10022  			_ = cmp.Args[1]
 10023  			b.Kind = BlockMIPSEQ
 10024  			b.SetControl(cmp)
 10025  			return true
 10026  		}
 10027  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 10028  		// cond:
 10029  		// result: (EQ cmp yes no)
 10030  		for {
 10031  			v := b.Control
 10032  			if v.Op != OpMIPSXORconst {
 10033  				break
 10034  			}
 10035  			if v.AuxInt != 1 {
 10036  				break
 10037  			}
 10038  			cmp := v.Args[0]
 10039  			if cmp.Op != OpMIPSSGTU {
 10040  				break
 10041  			}
 10042  			_ = cmp.Args[1]
 10043  			b.Kind = BlockMIPSEQ
 10044  			b.SetControl(cmp)
 10045  			return true
 10046  		}
 10047  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 10048  		// cond:
 10049  		// result: (EQ cmp yes no)
 10050  		for {
 10051  			v := b.Control
 10052  			if v.Op != OpMIPSXORconst {
 10053  				break
 10054  			}
 10055  			if v.AuxInt != 1 {
 10056  				break
 10057  			}
 10058  			cmp := v.Args[0]
 10059  			if cmp.Op != OpMIPSSGTconst {
 10060  				break
 10061  			}
 10062  			b.Kind = BlockMIPSEQ
 10063  			b.SetControl(cmp)
 10064  			return true
 10065  		}
 10066  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 10067  		// cond:
 10068  		// result: (EQ cmp yes no)
 10069  		for {
 10070  			v := b.Control
 10071  			if v.Op != OpMIPSXORconst {
 10072  				break
 10073  			}
 10074  			if v.AuxInt != 1 {
 10075  				break
 10076  			}
 10077  			cmp := v.Args[0]
 10078  			if cmp.Op != OpMIPSSGTUconst {
 10079  				break
 10080  			}
 10081  			b.Kind = BlockMIPSEQ
 10082  			b.SetControl(cmp)
 10083  			return true
 10084  		}
 10085  		// match: (NE (XORconst [1] cmp:(SGTzero _)) yes no)
 10086  		// cond:
 10087  		// result: (EQ cmp yes no)
 10088  		for {
 10089  			v := b.Control
 10090  			if v.Op != OpMIPSXORconst {
 10091  				break
 10092  			}
 10093  			if v.AuxInt != 1 {
 10094  				break
 10095  			}
 10096  			cmp := v.Args[0]
 10097  			if cmp.Op != OpMIPSSGTzero {
 10098  				break
 10099  			}
 10100  			b.Kind = BlockMIPSEQ
 10101  			b.SetControl(cmp)
 10102  			return true
 10103  		}
 10104  		// match: (NE (XORconst [1] cmp:(SGTUzero _)) yes no)
 10105  		// cond:
 10106  		// result: (EQ cmp yes no)
 10107  		for {
 10108  			v := b.Control
 10109  			if v.Op != OpMIPSXORconst {
 10110  				break
 10111  			}
 10112  			if v.AuxInt != 1 {
 10113  				break
 10114  			}
 10115  			cmp := v.Args[0]
 10116  			if cmp.Op != OpMIPSSGTUzero {
 10117  				break
 10118  			}
 10119  			b.Kind = BlockMIPSEQ
 10120  			b.SetControl(cmp)
 10121  			return true
 10122  		}
 10123  		// match: (NE (SGTUconst [1] x) yes no)
 10124  		// cond:
 10125  		// result: (EQ x yes no)
 10126  		for {
 10127  			v := b.Control
 10128  			if v.Op != OpMIPSSGTUconst {
 10129  				break
 10130  			}
 10131  			if v.AuxInt != 1 {
 10132  				break
 10133  			}
 10134  			x := v.Args[0]
 10135  			b.Kind = BlockMIPSEQ
 10136  			b.SetControl(x)
 10137  			return true
 10138  		}
 10139  		// match: (NE (SGTUzero x) yes no)
 10140  		// cond:
 10141  		// result: (NE x yes no)
 10142  		for {
 10143  			v := b.Control
 10144  			if v.Op != OpMIPSSGTUzero {
 10145  				break
 10146  			}
 10147  			x := v.Args[0]
 10148  			b.Kind = BlockMIPSNE
 10149  			b.SetControl(x)
 10150  			return true
 10151  		}
 10152  		// match: (NE (SGTconst [0] x) yes no)
 10153  		// cond:
 10154  		// result: (LTZ x yes no)
 10155  		for {
 10156  			v := b.Control
 10157  			if v.Op != OpMIPSSGTconst {
 10158  				break
 10159  			}
 10160  			if v.AuxInt != 0 {
 10161  				break
 10162  			}
 10163  			x := v.Args[0]
 10164  			b.Kind = BlockMIPSLTZ
 10165  			b.SetControl(x)
 10166  			return true
 10167  		}
 10168  		// match: (NE (SGTzero x) yes no)
 10169  		// cond:
 10170  		// result: (GTZ x yes no)
 10171  		for {
 10172  			v := b.Control
 10173  			if v.Op != OpMIPSSGTzero {
 10174  				break
 10175  			}
 10176  			x := v.Args[0]
 10177  			b.Kind = BlockMIPSGTZ
 10178  			b.SetControl(x)
 10179  			return true
 10180  		}
 10181  		// match: (NE (MOVWconst [0]) yes no)
 10182  		// cond:
 10183  		// result: (First nil no yes)
 10184  		for {
 10185  			v := b.Control
 10186  			if v.Op != OpMIPSMOVWconst {
 10187  				break
 10188  			}
 10189  			if v.AuxInt != 0 {
 10190  				break
 10191  			}
 10192  			b.Kind = BlockFirst
 10193  			b.SetControl(nil)
 10194  			b.swapSuccessors()
 10195  			return true
 10196  		}
 10197  		// match: (NE (MOVWconst [c]) yes no)
 10198  		// cond: c != 0
 10199  		// result: (First nil yes no)
 10200  		for {
 10201  			v := b.Control
 10202  			if v.Op != OpMIPSMOVWconst {
 10203  				break
 10204  			}
 10205  			c := v.AuxInt
 10206  			if !(c != 0) {
 10207  				break
 10208  			}
 10209  			b.Kind = BlockFirst
 10210  			b.SetControl(nil)
 10211  			return true
 10212  		}
 10213  	}
 10214  	return false
 10215  }