github.com/hikaru7719/go@v0.0.0-20181025140707-c8b2ac68906a/src/cmd/compile/internal/ssa/rewriteMIPS.go (about)

     1  // Code generated from gen/MIPS.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  import "cmd/internal/obj"
     8  import "cmd/internal/objabi"
     9  import "cmd/compile/internal/types"
    10  
    11  var _ = math.MinInt8  // in case not otherwise used
    12  var _ = obj.ANOP      // in case not otherwise used
    13  var _ = objabi.GOROOT // in case not otherwise used
    14  var _ = types.TypeMem // in case not otherwise used
    15  
    16  func rewriteValueMIPS(v *Value) bool {
    17  	switch v.Op {
    18  	case OpAdd16:
    19  		return rewriteValueMIPS_OpAdd16_0(v)
    20  	case OpAdd32:
    21  		return rewriteValueMIPS_OpAdd32_0(v)
    22  	case OpAdd32F:
    23  		return rewriteValueMIPS_OpAdd32F_0(v)
    24  	case OpAdd32withcarry:
    25  		return rewriteValueMIPS_OpAdd32withcarry_0(v)
    26  	case OpAdd64F:
    27  		return rewriteValueMIPS_OpAdd64F_0(v)
    28  	case OpAdd8:
    29  		return rewriteValueMIPS_OpAdd8_0(v)
    30  	case OpAddPtr:
    31  		return rewriteValueMIPS_OpAddPtr_0(v)
    32  	case OpAddr:
    33  		return rewriteValueMIPS_OpAddr_0(v)
    34  	case OpAnd16:
    35  		return rewriteValueMIPS_OpAnd16_0(v)
    36  	case OpAnd32:
    37  		return rewriteValueMIPS_OpAnd32_0(v)
    38  	case OpAnd8:
    39  		return rewriteValueMIPS_OpAnd8_0(v)
    40  	case OpAndB:
    41  		return rewriteValueMIPS_OpAndB_0(v)
    42  	case OpAtomicAdd32:
    43  		return rewriteValueMIPS_OpAtomicAdd32_0(v)
    44  	case OpAtomicAnd8:
    45  		return rewriteValueMIPS_OpAtomicAnd8_0(v)
    46  	case OpAtomicCompareAndSwap32:
    47  		return rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v)
    48  	case OpAtomicExchange32:
    49  		return rewriteValueMIPS_OpAtomicExchange32_0(v)
    50  	case OpAtomicLoad32:
    51  		return rewriteValueMIPS_OpAtomicLoad32_0(v)
    52  	case OpAtomicLoadPtr:
    53  		return rewriteValueMIPS_OpAtomicLoadPtr_0(v)
    54  	case OpAtomicOr8:
    55  		return rewriteValueMIPS_OpAtomicOr8_0(v)
    56  	case OpAtomicStore32:
    57  		return rewriteValueMIPS_OpAtomicStore32_0(v)
    58  	case OpAtomicStorePtrNoWB:
    59  		return rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v)
    60  	case OpAvg32u:
    61  		return rewriteValueMIPS_OpAvg32u_0(v)
    62  	case OpBitLen32:
    63  		return rewriteValueMIPS_OpBitLen32_0(v)
    64  	case OpClosureCall:
    65  		return rewriteValueMIPS_OpClosureCall_0(v)
    66  	case OpCom16:
    67  		return rewriteValueMIPS_OpCom16_0(v)
    68  	case OpCom32:
    69  		return rewriteValueMIPS_OpCom32_0(v)
    70  	case OpCom8:
    71  		return rewriteValueMIPS_OpCom8_0(v)
    72  	case OpConst16:
    73  		return rewriteValueMIPS_OpConst16_0(v)
    74  	case OpConst32:
    75  		return rewriteValueMIPS_OpConst32_0(v)
    76  	case OpConst32F:
    77  		return rewriteValueMIPS_OpConst32F_0(v)
    78  	case OpConst64F:
    79  		return rewriteValueMIPS_OpConst64F_0(v)
    80  	case OpConst8:
    81  		return rewriteValueMIPS_OpConst8_0(v)
    82  	case OpConstBool:
    83  		return rewriteValueMIPS_OpConstBool_0(v)
    84  	case OpConstNil:
    85  		return rewriteValueMIPS_OpConstNil_0(v)
    86  	case OpCtz32:
    87  		return rewriteValueMIPS_OpCtz32_0(v)
    88  	case OpCtz32NonZero:
    89  		return rewriteValueMIPS_OpCtz32NonZero_0(v)
    90  	case OpCvt32Fto32:
    91  		return rewriteValueMIPS_OpCvt32Fto32_0(v)
    92  	case OpCvt32Fto64F:
    93  		return rewriteValueMIPS_OpCvt32Fto64F_0(v)
    94  	case OpCvt32to32F:
    95  		return rewriteValueMIPS_OpCvt32to32F_0(v)
    96  	case OpCvt32to64F:
    97  		return rewriteValueMIPS_OpCvt32to64F_0(v)
    98  	case OpCvt64Fto32:
    99  		return rewriteValueMIPS_OpCvt64Fto32_0(v)
   100  	case OpCvt64Fto32F:
   101  		return rewriteValueMIPS_OpCvt64Fto32F_0(v)
   102  	case OpDiv16:
   103  		return rewriteValueMIPS_OpDiv16_0(v)
   104  	case OpDiv16u:
   105  		return rewriteValueMIPS_OpDiv16u_0(v)
   106  	case OpDiv32:
   107  		return rewriteValueMIPS_OpDiv32_0(v)
   108  	case OpDiv32F:
   109  		return rewriteValueMIPS_OpDiv32F_0(v)
   110  	case OpDiv32u:
   111  		return rewriteValueMIPS_OpDiv32u_0(v)
   112  	case OpDiv64F:
   113  		return rewriteValueMIPS_OpDiv64F_0(v)
   114  	case OpDiv8:
   115  		return rewriteValueMIPS_OpDiv8_0(v)
   116  	case OpDiv8u:
   117  		return rewriteValueMIPS_OpDiv8u_0(v)
   118  	case OpEq16:
   119  		return rewriteValueMIPS_OpEq16_0(v)
   120  	case OpEq32:
   121  		return rewriteValueMIPS_OpEq32_0(v)
   122  	case OpEq32F:
   123  		return rewriteValueMIPS_OpEq32F_0(v)
   124  	case OpEq64F:
   125  		return rewriteValueMIPS_OpEq64F_0(v)
   126  	case OpEq8:
   127  		return rewriteValueMIPS_OpEq8_0(v)
   128  	case OpEqB:
   129  		return rewriteValueMIPS_OpEqB_0(v)
   130  	case OpEqPtr:
   131  		return rewriteValueMIPS_OpEqPtr_0(v)
   132  	case OpGeq16:
   133  		return rewriteValueMIPS_OpGeq16_0(v)
   134  	case OpGeq16U:
   135  		return rewriteValueMIPS_OpGeq16U_0(v)
   136  	case OpGeq32:
   137  		return rewriteValueMIPS_OpGeq32_0(v)
   138  	case OpGeq32F:
   139  		return rewriteValueMIPS_OpGeq32F_0(v)
   140  	case OpGeq32U:
   141  		return rewriteValueMIPS_OpGeq32U_0(v)
   142  	case OpGeq64F:
   143  		return rewriteValueMIPS_OpGeq64F_0(v)
   144  	case OpGeq8:
   145  		return rewriteValueMIPS_OpGeq8_0(v)
   146  	case OpGeq8U:
   147  		return rewriteValueMIPS_OpGeq8U_0(v)
   148  	case OpGetCallerPC:
   149  		return rewriteValueMIPS_OpGetCallerPC_0(v)
   150  	case OpGetCallerSP:
   151  		return rewriteValueMIPS_OpGetCallerSP_0(v)
   152  	case OpGetClosurePtr:
   153  		return rewriteValueMIPS_OpGetClosurePtr_0(v)
   154  	case OpGreater16:
   155  		return rewriteValueMIPS_OpGreater16_0(v)
   156  	case OpGreater16U:
   157  		return rewriteValueMIPS_OpGreater16U_0(v)
   158  	case OpGreater32:
   159  		return rewriteValueMIPS_OpGreater32_0(v)
   160  	case OpGreater32F:
   161  		return rewriteValueMIPS_OpGreater32F_0(v)
   162  	case OpGreater32U:
   163  		return rewriteValueMIPS_OpGreater32U_0(v)
   164  	case OpGreater64F:
   165  		return rewriteValueMIPS_OpGreater64F_0(v)
   166  	case OpGreater8:
   167  		return rewriteValueMIPS_OpGreater8_0(v)
   168  	case OpGreater8U:
   169  		return rewriteValueMIPS_OpGreater8U_0(v)
   170  	case OpHmul32:
   171  		return rewriteValueMIPS_OpHmul32_0(v)
   172  	case OpHmul32u:
   173  		return rewriteValueMIPS_OpHmul32u_0(v)
   174  	case OpInterCall:
   175  		return rewriteValueMIPS_OpInterCall_0(v)
   176  	case OpIsInBounds:
   177  		return rewriteValueMIPS_OpIsInBounds_0(v)
   178  	case OpIsNonNil:
   179  		return rewriteValueMIPS_OpIsNonNil_0(v)
   180  	case OpIsSliceInBounds:
   181  		return rewriteValueMIPS_OpIsSliceInBounds_0(v)
   182  	case OpLeq16:
   183  		return rewriteValueMIPS_OpLeq16_0(v)
   184  	case OpLeq16U:
   185  		return rewriteValueMIPS_OpLeq16U_0(v)
   186  	case OpLeq32:
   187  		return rewriteValueMIPS_OpLeq32_0(v)
   188  	case OpLeq32F:
   189  		return rewriteValueMIPS_OpLeq32F_0(v)
   190  	case OpLeq32U:
   191  		return rewriteValueMIPS_OpLeq32U_0(v)
   192  	case OpLeq64F:
   193  		return rewriteValueMIPS_OpLeq64F_0(v)
   194  	case OpLeq8:
   195  		return rewriteValueMIPS_OpLeq8_0(v)
   196  	case OpLeq8U:
   197  		return rewriteValueMIPS_OpLeq8U_0(v)
   198  	case OpLess16:
   199  		return rewriteValueMIPS_OpLess16_0(v)
   200  	case OpLess16U:
   201  		return rewriteValueMIPS_OpLess16U_0(v)
   202  	case OpLess32:
   203  		return rewriteValueMIPS_OpLess32_0(v)
   204  	case OpLess32F:
   205  		return rewriteValueMIPS_OpLess32F_0(v)
   206  	case OpLess32U:
   207  		return rewriteValueMIPS_OpLess32U_0(v)
   208  	case OpLess64F:
   209  		return rewriteValueMIPS_OpLess64F_0(v)
   210  	case OpLess8:
   211  		return rewriteValueMIPS_OpLess8_0(v)
   212  	case OpLess8U:
   213  		return rewriteValueMIPS_OpLess8U_0(v)
   214  	case OpLoad:
   215  		return rewriteValueMIPS_OpLoad_0(v)
   216  	case OpLocalAddr:
   217  		return rewriteValueMIPS_OpLocalAddr_0(v)
   218  	case OpLsh16x16:
   219  		return rewriteValueMIPS_OpLsh16x16_0(v)
   220  	case OpLsh16x32:
   221  		return rewriteValueMIPS_OpLsh16x32_0(v)
   222  	case OpLsh16x64:
   223  		return rewriteValueMIPS_OpLsh16x64_0(v)
   224  	case OpLsh16x8:
   225  		return rewriteValueMIPS_OpLsh16x8_0(v)
   226  	case OpLsh32x16:
   227  		return rewriteValueMIPS_OpLsh32x16_0(v)
   228  	case OpLsh32x32:
   229  		return rewriteValueMIPS_OpLsh32x32_0(v)
   230  	case OpLsh32x64:
   231  		return rewriteValueMIPS_OpLsh32x64_0(v)
   232  	case OpLsh32x8:
   233  		return rewriteValueMIPS_OpLsh32x8_0(v)
   234  	case OpLsh8x16:
   235  		return rewriteValueMIPS_OpLsh8x16_0(v)
   236  	case OpLsh8x32:
   237  		return rewriteValueMIPS_OpLsh8x32_0(v)
   238  	case OpLsh8x64:
   239  		return rewriteValueMIPS_OpLsh8x64_0(v)
   240  	case OpLsh8x8:
   241  		return rewriteValueMIPS_OpLsh8x8_0(v)
   242  	case OpMIPSADD:
   243  		return rewriteValueMIPS_OpMIPSADD_0(v)
   244  	case OpMIPSADDconst:
   245  		return rewriteValueMIPS_OpMIPSADDconst_0(v)
   246  	case OpMIPSAND:
   247  		return rewriteValueMIPS_OpMIPSAND_0(v)
   248  	case OpMIPSANDconst:
   249  		return rewriteValueMIPS_OpMIPSANDconst_0(v)
   250  	case OpMIPSCMOVZ:
   251  		return rewriteValueMIPS_OpMIPSCMOVZ_0(v)
   252  	case OpMIPSCMOVZzero:
   253  		return rewriteValueMIPS_OpMIPSCMOVZzero_0(v)
   254  	case OpMIPSLoweredAtomicAdd:
   255  		return rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v)
   256  	case OpMIPSLoweredAtomicStore:
   257  		return rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v)
   258  	case OpMIPSMOVBUload:
   259  		return rewriteValueMIPS_OpMIPSMOVBUload_0(v)
   260  	case OpMIPSMOVBUreg:
   261  		return rewriteValueMIPS_OpMIPSMOVBUreg_0(v)
   262  	case OpMIPSMOVBload:
   263  		return rewriteValueMIPS_OpMIPSMOVBload_0(v)
   264  	case OpMIPSMOVBreg:
   265  		return rewriteValueMIPS_OpMIPSMOVBreg_0(v)
   266  	case OpMIPSMOVBstore:
   267  		return rewriteValueMIPS_OpMIPSMOVBstore_0(v)
   268  	case OpMIPSMOVBstorezero:
   269  		return rewriteValueMIPS_OpMIPSMOVBstorezero_0(v)
   270  	case OpMIPSMOVDload:
   271  		return rewriteValueMIPS_OpMIPSMOVDload_0(v)
   272  	case OpMIPSMOVDstore:
   273  		return rewriteValueMIPS_OpMIPSMOVDstore_0(v)
   274  	case OpMIPSMOVFload:
   275  		return rewriteValueMIPS_OpMIPSMOVFload_0(v)
   276  	case OpMIPSMOVFstore:
   277  		return rewriteValueMIPS_OpMIPSMOVFstore_0(v)
   278  	case OpMIPSMOVHUload:
   279  		return rewriteValueMIPS_OpMIPSMOVHUload_0(v)
   280  	case OpMIPSMOVHUreg:
   281  		return rewriteValueMIPS_OpMIPSMOVHUreg_0(v)
   282  	case OpMIPSMOVHload:
   283  		return rewriteValueMIPS_OpMIPSMOVHload_0(v)
   284  	case OpMIPSMOVHreg:
   285  		return rewriteValueMIPS_OpMIPSMOVHreg_0(v)
   286  	case OpMIPSMOVHstore:
   287  		return rewriteValueMIPS_OpMIPSMOVHstore_0(v)
   288  	case OpMIPSMOVHstorezero:
   289  		return rewriteValueMIPS_OpMIPSMOVHstorezero_0(v)
   290  	case OpMIPSMOVWload:
   291  		return rewriteValueMIPS_OpMIPSMOVWload_0(v)
   292  	case OpMIPSMOVWreg:
   293  		return rewriteValueMIPS_OpMIPSMOVWreg_0(v)
   294  	case OpMIPSMOVWstore:
   295  		return rewriteValueMIPS_OpMIPSMOVWstore_0(v)
   296  	case OpMIPSMOVWstorezero:
   297  		return rewriteValueMIPS_OpMIPSMOVWstorezero_0(v)
   298  	case OpMIPSMUL:
   299  		return rewriteValueMIPS_OpMIPSMUL_0(v)
   300  	case OpMIPSNEG:
   301  		return rewriteValueMIPS_OpMIPSNEG_0(v)
   302  	case OpMIPSNOR:
   303  		return rewriteValueMIPS_OpMIPSNOR_0(v)
   304  	case OpMIPSNORconst:
   305  		return rewriteValueMIPS_OpMIPSNORconst_0(v)
   306  	case OpMIPSOR:
   307  		return rewriteValueMIPS_OpMIPSOR_0(v)
   308  	case OpMIPSORconst:
   309  		return rewriteValueMIPS_OpMIPSORconst_0(v)
   310  	case OpMIPSSGT:
   311  		return rewriteValueMIPS_OpMIPSSGT_0(v)
   312  	case OpMIPSSGTU:
   313  		return rewriteValueMIPS_OpMIPSSGTU_0(v)
   314  	case OpMIPSSGTUconst:
   315  		return rewriteValueMIPS_OpMIPSSGTUconst_0(v)
   316  	case OpMIPSSGTUzero:
   317  		return rewriteValueMIPS_OpMIPSSGTUzero_0(v)
   318  	case OpMIPSSGTconst:
   319  		return rewriteValueMIPS_OpMIPSSGTconst_0(v) || rewriteValueMIPS_OpMIPSSGTconst_10(v)
   320  	case OpMIPSSGTzero:
   321  		return rewriteValueMIPS_OpMIPSSGTzero_0(v)
   322  	case OpMIPSSLL:
   323  		return rewriteValueMIPS_OpMIPSSLL_0(v)
   324  	case OpMIPSSLLconst:
   325  		return rewriteValueMIPS_OpMIPSSLLconst_0(v)
   326  	case OpMIPSSRA:
   327  		return rewriteValueMIPS_OpMIPSSRA_0(v)
   328  	case OpMIPSSRAconst:
   329  		return rewriteValueMIPS_OpMIPSSRAconst_0(v)
   330  	case OpMIPSSRL:
   331  		return rewriteValueMIPS_OpMIPSSRL_0(v)
   332  	case OpMIPSSRLconst:
   333  		return rewriteValueMIPS_OpMIPSSRLconst_0(v)
   334  	case OpMIPSSUB:
   335  		return rewriteValueMIPS_OpMIPSSUB_0(v)
   336  	case OpMIPSSUBconst:
   337  		return rewriteValueMIPS_OpMIPSSUBconst_0(v)
   338  	case OpMIPSXOR:
   339  		return rewriteValueMIPS_OpMIPSXOR_0(v)
   340  	case OpMIPSXORconst:
   341  		return rewriteValueMIPS_OpMIPSXORconst_0(v)
   342  	case OpMod16:
   343  		return rewriteValueMIPS_OpMod16_0(v)
   344  	case OpMod16u:
   345  		return rewriteValueMIPS_OpMod16u_0(v)
   346  	case OpMod32:
   347  		return rewriteValueMIPS_OpMod32_0(v)
   348  	case OpMod32u:
   349  		return rewriteValueMIPS_OpMod32u_0(v)
   350  	case OpMod8:
   351  		return rewriteValueMIPS_OpMod8_0(v)
   352  	case OpMod8u:
   353  		return rewriteValueMIPS_OpMod8u_0(v)
   354  	case OpMove:
   355  		return rewriteValueMIPS_OpMove_0(v) || rewriteValueMIPS_OpMove_10(v)
   356  	case OpMul16:
   357  		return rewriteValueMIPS_OpMul16_0(v)
   358  	case OpMul32:
   359  		return rewriteValueMIPS_OpMul32_0(v)
   360  	case OpMul32F:
   361  		return rewriteValueMIPS_OpMul32F_0(v)
   362  	case OpMul32uhilo:
   363  		return rewriteValueMIPS_OpMul32uhilo_0(v)
   364  	case OpMul64F:
   365  		return rewriteValueMIPS_OpMul64F_0(v)
   366  	case OpMul8:
   367  		return rewriteValueMIPS_OpMul8_0(v)
   368  	case OpNeg16:
   369  		return rewriteValueMIPS_OpNeg16_0(v)
   370  	case OpNeg32:
   371  		return rewriteValueMIPS_OpNeg32_0(v)
   372  	case OpNeg32F:
   373  		return rewriteValueMIPS_OpNeg32F_0(v)
   374  	case OpNeg64F:
   375  		return rewriteValueMIPS_OpNeg64F_0(v)
   376  	case OpNeg8:
   377  		return rewriteValueMIPS_OpNeg8_0(v)
   378  	case OpNeq16:
   379  		return rewriteValueMIPS_OpNeq16_0(v)
   380  	case OpNeq32:
   381  		return rewriteValueMIPS_OpNeq32_0(v)
   382  	case OpNeq32F:
   383  		return rewriteValueMIPS_OpNeq32F_0(v)
   384  	case OpNeq64F:
   385  		return rewriteValueMIPS_OpNeq64F_0(v)
   386  	case OpNeq8:
   387  		return rewriteValueMIPS_OpNeq8_0(v)
   388  	case OpNeqB:
   389  		return rewriteValueMIPS_OpNeqB_0(v)
   390  	case OpNeqPtr:
   391  		return rewriteValueMIPS_OpNeqPtr_0(v)
   392  	case OpNilCheck:
   393  		return rewriteValueMIPS_OpNilCheck_0(v)
   394  	case OpNot:
   395  		return rewriteValueMIPS_OpNot_0(v)
   396  	case OpOffPtr:
   397  		return rewriteValueMIPS_OpOffPtr_0(v)
   398  	case OpOr16:
   399  		return rewriteValueMIPS_OpOr16_0(v)
   400  	case OpOr32:
   401  		return rewriteValueMIPS_OpOr32_0(v)
   402  	case OpOr8:
   403  		return rewriteValueMIPS_OpOr8_0(v)
   404  	case OpOrB:
   405  		return rewriteValueMIPS_OpOrB_0(v)
   406  	case OpRound32F:
   407  		return rewriteValueMIPS_OpRound32F_0(v)
   408  	case OpRound64F:
   409  		return rewriteValueMIPS_OpRound64F_0(v)
   410  	case OpRsh16Ux16:
   411  		return rewriteValueMIPS_OpRsh16Ux16_0(v)
   412  	case OpRsh16Ux32:
   413  		return rewriteValueMIPS_OpRsh16Ux32_0(v)
   414  	case OpRsh16Ux64:
   415  		return rewriteValueMIPS_OpRsh16Ux64_0(v)
   416  	case OpRsh16Ux8:
   417  		return rewriteValueMIPS_OpRsh16Ux8_0(v)
   418  	case OpRsh16x16:
   419  		return rewriteValueMIPS_OpRsh16x16_0(v)
   420  	case OpRsh16x32:
   421  		return rewriteValueMIPS_OpRsh16x32_0(v)
   422  	case OpRsh16x64:
   423  		return rewriteValueMIPS_OpRsh16x64_0(v)
   424  	case OpRsh16x8:
   425  		return rewriteValueMIPS_OpRsh16x8_0(v)
   426  	case OpRsh32Ux16:
   427  		return rewriteValueMIPS_OpRsh32Ux16_0(v)
   428  	case OpRsh32Ux32:
   429  		return rewriteValueMIPS_OpRsh32Ux32_0(v)
   430  	case OpRsh32Ux64:
   431  		return rewriteValueMIPS_OpRsh32Ux64_0(v)
   432  	case OpRsh32Ux8:
   433  		return rewriteValueMIPS_OpRsh32Ux8_0(v)
   434  	case OpRsh32x16:
   435  		return rewriteValueMIPS_OpRsh32x16_0(v)
   436  	case OpRsh32x32:
   437  		return rewriteValueMIPS_OpRsh32x32_0(v)
   438  	case OpRsh32x64:
   439  		return rewriteValueMIPS_OpRsh32x64_0(v)
   440  	case OpRsh32x8:
   441  		return rewriteValueMIPS_OpRsh32x8_0(v)
   442  	case OpRsh8Ux16:
   443  		return rewriteValueMIPS_OpRsh8Ux16_0(v)
   444  	case OpRsh8Ux32:
   445  		return rewriteValueMIPS_OpRsh8Ux32_0(v)
   446  	case OpRsh8Ux64:
   447  		return rewriteValueMIPS_OpRsh8Ux64_0(v)
   448  	case OpRsh8Ux8:
   449  		return rewriteValueMIPS_OpRsh8Ux8_0(v)
   450  	case OpRsh8x16:
   451  		return rewriteValueMIPS_OpRsh8x16_0(v)
   452  	case OpRsh8x32:
   453  		return rewriteValueMIPS_OpRsh8x32_0(v)
   454  	case OpRsh8x64:
   455  		return rewriteValueMIPS_OpRsh8x64_0(v)
   456  	case OpRsh8x8:
   457  		return rewriteValueMIPS_OpRsh8x8_0(v)
   458  	case OpSelect0:
   459  		return rewriteValueMIPS_OpSelect0_0(v) || rewriteValueMIPS_OpSelect0_10(v)
   460  	case OpSelect1:
   461  		return rewriteValueMIPS_OpSelect1_0(v) || rewriteValueMIPS_OpSelect1_10(v)
   462  	case OpSignExt16to32:
   463  		return rewriteValueMIPS_OpSignExt16to32_0(v)
   464  	case OpSignExt8to16:
   465  		return rewriteValueMIPS_OpSignExt8to16_0(v)
   466  	case OpSignExt8to32:
   467  		return rewriteValueMIPS_OpSignExt8to32_0(v)
   468  	case OpSignmask:
   469  		return rewriteValueMIPS_OpSignmask_0(v)
   470  	case OpSlicemask:
   471  		return rewriteValueMIPS_OpSlicemask_0(v)
   472  	case OpSqrt:
   473  		return rewriteValueMIPS_OpSqrt_0(v)
   474  	case OpStaticCall:
   475  		return rewriteValueMIPS_OpStaticCall_0(v)
   476  	case OpStore:
   477  		return rewriteValueMIPS_OpStore_0(v)
   478  	case OpSub16:
   479  		return rewriteValueMIPS_OpSub16_0(v)
   480  	case OpSub32:
   481  		return rewriteValueMIPS_OpSub32_0(v)
   482  	case OpSub32F:
   483  		return rewriteValueMIPS_OpSub32F_0(v)
   484  	case OpSub32withcarry:
   485  		return rewriteValueMIPS_OpSub32withcarry_0(v)
   486  	case OpSub64F:
   487  		return rewriteValueMIPS_OpSub64F_0(v)
   488  	case OpSub8:
   489  		return rewriteValueMIPS_OpSub8_0(v)
   490  	case OpSubPtr:
   491  		return rewriteValueMIPS_OpSubPtr_0(v)
   492  	case OpTrunc16to8:
   493  		return rewriteValueMIPS_OpTrunc16to8_0(v)
   494  	case OpTrunc32to16:
   495  		return rewriteValueMIPS_OpTrunc32to16_0(v)
   496  	case OpTrunc32to8:
   497  		return rewriteValueMIPS_OpTrunc32to8_0(v)
   498  	case OpWB:
   499  		return rewriteValueMIPS_OpWB_0(v)
   500  	case OpXor16:
   501  		return rewriteValueMIPS_OpXor16_0(v)
   502  	case OpXor32:
   503  		return rewriteValueMIPS_OpXor32_0(v)
   504  	case OpXor8:
   505  		return rewriteValueMIPS_OpXor8_0(v)
   506  	case OpZero:
   507  		return rewriteValueMIPS_OpZero_0(v) || rewriteValueMIPS_OpZero_10(v)
   508  	case OpZeroExt16to32:
   509  		return rewriteValueMIPS_OpZeroExt16to32_0(v)
   510  	case OpZeroExt8to16:
   511  		return rewriteValueMIPS_OpZeroExt8to16_0(v)
   512  	case OpZeroExt8to32:
   513  		return rewriteValueMIPS_OpZeroExt8to32_0(v)
   514  	case OpZeromask:
   515  		return rewriteValueMIPS_OpZeromask_0(v)
   516  	}
   517  	return false
   518  }
   519  func rewriteValueMIPS_OpAdd16_0(v *Value) bool {
   520  	// match: (Add16 x y)
   521  	// cond:
   522  	// result: (ADD x y)
   523  	for {
   524  		_ = v.Args[1]
   525  		x := v.Args[0]
   526  		y := v.Args[1]
   527  		v.reset(OpMIPSADD)
   528  		v.AddArg(x)
   529  		v.AddArg(y)
   530  		return true
   531  	}
   532  }
   533  func rewriteValueMIPS_OpAdd32_0(v *Value) bool {
   534  	// match: (Add32 x y)
   535  	// cond:
   536  	// result: (ADD x y)
   537  	for {
   538  		_ = v.Args[1]
   539  		x := v.Args[0]
   540  		y := v.Args[1]
   541  		v.reset(OpMIPSADD)
   542  		v.AddArg(x)
   543  		v.AddArg(y)
   544  		return true
   545  	}
   546  }
   547  func rewriteValueMIPS_OpAdd32F_0(v *Value) bool {
   548  	// match: (Add32F x y)
   549  	// cond:
   550  	// result: (ADDF x y)
   551  	for {
   552  		_ = v.Args[1]
   553  		x := v.Args[0]
   554  		y := v.Args[1]
   555  		v.reset(OpMIPSADDF)
   556  		v.AddArg(x)
   557  		v.AddArg(y)
   558  		return true
   559  	}
   560  }
   561  func rewriteValueMIPS_OpAdd32withcarry_0(v *Value) bool {
   562  	b := v.Block
   563  	_ = b
   564  	// match: (Add32withcarry <t> x y c)
   565  	// cond:
   566  	// result: (ADD c (ADD <t> x y))
   567  	for {
   568  		t := v.Type
   569  		_ = v.Args[2]
   570  		x := v.Args[0]
   571  		y := v.Args[1]
   572  		c := v.Args[2]
   573  		v.reset(OpMIPSADD)
   574  		v.AddArg(c)
   575  		v0 := b.NewValue0(v.Pos, OpMIPSADD, t)
   576  		v0.AddArg(x)
   577  		v0.AddArg(y)
   578  		v.AddArg(v0)
   579  		return true
   580  	}
   581  }
   582  func rewriteValueMIPS_OpAdd64F_0(v *Value) bool {
   583  	// match: (Add64F x y)
   584  	// cond:
   585  	// result: (ADDD x y)
   586  	for {
   587  		_ = v.Args[1]
   588  		x := v.Args[0]
   589  		y := v.Args[1]
   590  		v.reset(OpMIPSADDD)
   591  		v.AddArg(x)
   592  		v.AddArg(y)
   593  		return true
   594  	}
   595  }
   596  func rewriteValueMIPS_OpAdd8_0(v *Value) bool {
   597  	// match: (Add8 x y)
   598  	// cond:
   599  	// result: (ADD x y)
   600  	for {
   601  		_ = v.Args[1]
   602  		x := v.Args[0]
   603  		y := v.Args[1]
   604  		v.reset(OpMIPSADD)
   605  		v.AddArg(x)
   606  		v.AddArg(y)
   607  		return true
   608  	}
   609  }
   610  func rewriteValueMIPS_OpAddPtr_0(v *Value) bool {
   611  	// match: (AddPtr x y)
   612  	// cond:
   613  	// result: (ADD x y)
   614  	for {
   615  		_ = v.Args[1]
   616  		x := v.Args[0]
   617  		y := v.Args[1]
   618  		v.reset(OpMIPSADD)
   619  		v.AddArg(x)
   620  		v.AddArg(y)
   621  		return true
   622  	}
   623  }
   624  func rewriteValueMIPS_OpAddr_0(v *Value) bool {
   625  	// match: (Addr {sym} base)
   626  	// cond:
   627  	// result: (MOVWaddr {sym} base)
   628  	for {
   629  		sym := v.Aux
   630  		base := v.Args[0]
   631  		v.reset(OpMIPSMOVWaddr)
   632  		v.Aux = sym
   633  		v.AddArg(base)
   634  		return true
   635  	}
   636  }
   637  func rewriteValueMIPS_OpAnd16_0(v *Value) bool {
   638  	// match: (And16 x y)
   639  	// cond:
   640  	// result: (AND x y)
   641  	for {
   642  		_ = v.Args[1]
   643  		x := v.Args[0]
   644  		y := v.Args[1]
   645  		v.reset(OpMIPSAND)
   646  		v.AddArg(x)
   647  		v.AddArg(y)
   648  		return true
   649  	}
   650  }
   651  func rewriteValueMIPS_OpAnd32_0(v *Value) bool {
   652  	// match: (And32 x y)
   653  	// cond:
   654  	// result: (AND x y)
   655  	for {
   656  		_ = v.Args[1]
   657  		x := v.Args[0]
   658  		y := v.Args[1]
   659  		v.reset(OpMIPSAND)
   660  		v.AddArg(x)
   661  		v.AddArg(y)
   662  		return true
   663  	}
   664  }
   665  func rewriteValueMIPS_OpAnd8_0(v *Value) bool {
   666  	// match: (And8 x y)
   667  	// cond:
   668  	// result: (AND x y)
   669  	for {
   670  		_ = v.Args[1]
   671  		x := v.Args[0]
   672  		y := v.Args[1]
   673  		v.reset(OpMIPSAND)
   674  		v.AddArg(x)
   675  		v.AddArg(y)
   676  		return true
   677  	}
   678  }
   679  func rewriteValueMIPS_OpAndB_0(v *Value) bool {
   680  	// match: (AndB x y)
   681  	// cond:
   682  	// result: (AND x y)
   683  	for {
   684  		_ = v.Args[1]
   685  		x := v.Args[0]
   686  		y := v.Args[1]
   687  		v.reset(OpMIPSAND)
   688  		v.AddArg(x)
   689  		v.AddArg(y)
   690  		return true
   691  	}
   692  }
   693  func rewriteValueMIPS_OpAtomicAdd32_0(v *Value) bool {
   694  	// match: (AtomicAdd32 ptr val mem)
   695  	// cond:
   696  	// result: (LoweredAtomicAdd ptr val mem)
   697  	for {
   698  		_ = v.Args[2]
   699  		ptr := v.Args[0]
   700  		val := v.Args[1]
   701  		mem := v.Args[2]
   702  		v.reset(OpMIPSLoweredAtomicAdd)
   703  		v.AddArg(ptr)
   704  		v.AddArg(val)
   705  		v.AddArg(mem)
   706  		return true
   707  	}
   708  }
   709  func rewriteValueMIPS_OpAtomicAnd8_0(v *Value) bool {
   710  	b := v.Block
   711  	_ = b
   712  	config := b.Func.Config
   713  	_ = config
   714  	typ := &b.Func.Config.Types
   715  	_ = typ
   716  	// match: (AtomicAnd8 ptr val mem)
   717  	// cond: !config.BigEndian
   718  	// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))))) mem)
   719  	for {
   720  		_ = v.Args[2]
   721  		ptr := v.Args[0]
   722  		val := v.Args[1]
   723  		mem := v.Args[2]
   724  		if !(!config.BigEndian) {
   725  			break
   726  		}
   727  		v.reset(OpMIPSLoweredAtomicAnd)
   728  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   729  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   730  		v1.AuxInt = ^3
   731  		v0.AddArg(v1)
   732  		v0.AddArg(ptr)
   733  		v.AddArg(v0)
   734  		v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32)
   735  		v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   736  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   737  		v4.AddArg(val)
   738  		v3.AddArg(v4)
   739  		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   740  		v5.AuxInt = 3
   741  		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   742  		v6.AuxInt = 3
   743  		v6.AddArg(ptr)
   744  		v5.AddArg(v6)
   745  		v3.AddArg(v5)
   746  		v2.AddArg(v3)
   747  		v7 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32)
   748  		v7.AuxInt = 0
   749  		v8 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   750  		v9 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   751  		v9.AuxInt = 0xff
   752  		v8.AddArg(v9)
   753  		v10 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   754  		v10.AuxInt = 3
   755  		v11 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   756  		v11.AuxInt = 3
   757  		v11.AddArg(ptr)
   758  		v10.AddArg(v11)
   759  		v8.AddArg(v10)
   760  		v7.AddArg(v8)
   761  		v2.AddArg(v7)
   762  		v.AddArg(v2)
   763  		v.AddArg(mem)
   764  		return true
   765  	}
   766  	// match: (AtomicAnd8 ptr val mem)
   767  	// cond: config.BigEndian
   768  	// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))))) mem)
   769  	for {
   770  		_ = v.Args[2]
   771  		ptr := v.Args[0]
   772  		val := v.Args[1]
   773  		mem := v.Args[2]
   774  		if !(config.BigEndian) {
   775  			break
   776  		}
   777  		v.reset(OpMIPSLoweredAtomicAnd)
   778  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   779  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   780  		v1.AuxInt = ^3
   781  		v0.AddArg(v1)
   782  		v0.AddArg(ptr)
   783  		v.AddArg(v0)
   784  		v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32)
   785  		v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   786  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   787  		v4.AddArg(val)
   788  		v3.AddArg(v4)
   789  		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   790  		v5.AuxInt = 3
   791  		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   792  		v6.AuxInt = 3
   793  		v7 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   794  		v7.AuxInt = 3
   795  		v7.AddArg(ptr)
   796  		v6.AddArg(v7)
   797  		v5.AddArg(v6)
   798  		v3.AddArg(v5)
   799  		v2.AddArg(v3)
   800  		v8 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32)
   801  		v8.AuxInt = 0
   802  		v9 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   803  		v10 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   804  		v10.AuxInt = 0xff
   805  		v9.AddArg(v10)
   806  		v11 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   807  		v11.AuxInt = 3
   808  		v12 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   809  		v12.AuxInt = 3
   810  		v13 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   811  		v13.AuxInt = 3
   812  		v13.AddArg(ptr)
   813  		v12.AddArg(v13)
   814  		v11.AddArg(v12)
   815  		v9.AddArg(v11)
   816  		v8.AddArg(v9)
   817  		v2.AddArg(v8)
   818  		v.AddArg(v2)
   819  		v.AddArg(mem)
   820  		return true
   821  	}
   822  	return false
   823  }
   824  func rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v *Value) bool {
   825  	// match: (AtomicCompareAndSwap32 ptr old new_ mem)
   826  	// cond:
   827  	// result: (LoweredAtomicCas ptr old new_ mem)
   828  	for {
   829  		_ = v.Args[3]
   830  		ptr := v.Args[0]
   831  		old := v.Args[1]
   832  		new_ := v.Args[2]
   833  		mem := v.Args[3]
   834  		v.reset(OpMIPSLoweredAtomicCas)
   835  		v.AddArg(ptr)
   836  		v.AddArg(old)
   837  		v.AddArg(new_)
   838  		v.AddArg(mem)
   839  		return true
   840  	}
   841  }
   842  func rewriteValueMIPS_OpAtomicExchange32_0(v *Value) bool {
   843  	// match: (AtomicExchange32 ptr val mem)
   844  	// cond:
   845  	// result: (LoweredAtomicExchange ptr val mem)
   846  	for {
   847  		_ = v.Args[2]
   848  		ptr := v.Args[0]
   849  		val := v.Args[1]
   850  		mem := v.Args[2]
   851  		v.reset(OpMIPSLoweredAtomicExchange)
   852  		v.AddArg(ptr)
   853  		v.AddArg(val)
   854  		v.AddArg(mem)
   855  		return true
   856  	}
   857  }
   858  func rewriteValueMIPS_OpAtomicLoad32_0(v *Value) bool {
   859  	// match: (AtomicLoad32 ptr mem)
   860  	// cond:
   861  	// result: (LoweredAtomicLoad ptr mem)
   862  	for {
   863  		_ = v.Args[1]
   864  		ptr := v.Args[0]
   865  		mem := v.Args[1]
   866  		v.reset(OpMIPSLoweredAtomicLoad)
   867  		v.AddArg(ptr)
   868  		v.AddArg(mem)
   869  		return true
   870  	}
   871  }
   872  func rewriteValueMIPS_OpAtomicLoadPtr_0(v *Value) bool {
   873  	// match: (AtomicLoadPtr ptr mem)
   874  	// cond:
   875  	// result: (LoweredAtomicLoad ptr mem)
   876  	for {
   877  		_ = v.Args[1]
   878  		ptr := v.Args[0]
   879  		mem := v.Args[1]
   880  		v.reset(OpMIPSLoweredAtomicLoad)
   881  		v.AddArg(ptr)
   882  		v.AddArg(mem)
   883  		return true
   884  	}
   885  }
   886  func rewriteValueMIPS_OpAtomicOr8_0(v *Value) bool {
   887  	b := v.Block
   888  	_ = b
   889  	config := b.Func.Config
   890  	_ = config
   891  	typ := &b.Func.Config.Types
   892  	_ = typ
   893  	// match: (AtomicOr8 ptr val mem)
   894  	// cond: !config.BigEndian
   895  	// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) mem)
   896  	for {
   897  		_ = v.Args[2]
   898  		ptr := v.Args[0]
   899  		val := v.Args[1]
   900  		mem := v.Args[2]
   901  		if !(!config.BigEndian) {
   902  			break
   903  		}
   904  		v.reset(OpMIPSLoweredAtomicOr)
   905  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   906  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   907  		v1.AuxInt = ^3
   908  		v0.AddArg(v1)
   909  		v0.AddArg(ptr)
   910  		v.AddArg(v0)
   911  		v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   912  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   913  		v3.AddArg(val)
   914  		v2.AddArg(v3)
   915  		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   916  		v4.AuxInt = 3
   917  		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   918  		v5.AuxInt = 3
   919  		v5.AddArg(ptr)
   920  		v4.AddArg(v5)
   921  		v2.AddArg(v4)
   922  		v.AddArg(v2)
   923  		v.AddArg(mem)
   924  		return true
   925  	}
   926  	// match: (AtomicOr8 ptr val mem)
   927  	// cond: config.BigEndian
   928  	// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) mem)
   929  	for {
   930  		_ = v.Args[2]
   931  		ptr := v.Args[0]
   932  		val := v.Args[1]
   933  		mem := v.Args[2]
   934  		if !(config.BigEndian) {
   935  			break
   936  		}
   937  		v.reset(OpMIPSLoweredAtomicOr)
   938  		v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   939  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   940  		v1.AuxInt = ^3
   941  		v0.AddArg(v1)
   942  		v0.AddArg(ptr)
   943  		v.AddArg(v0)
   944  		v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   945  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   946  		v3.AddArg(val)
   947  		v2.AddArg(v3)
   948  		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   949  		v4.AuxInt = 3
   950  		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   951  		v5.AuxInt = 3
   952  		v6 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   953  		v6.AuxInt = 3
   954  		v6.AddArg(ptr)
   955  		v5.AddArg(v6)
   956  		v4.AddArg(v5)
   957  		v2.AddArg(v4)
   958  		v.AddArg(v2)
   959  		v.AddArg(mem)
   960  		return true
   961  	}
   962  	return false
   963  }
   964  func rewriteValueMIPS_OpAtomicStore32_0(v *Value) bool {
   965  	// match: (AtomicStore32 ptr val mem)
   966  	// cond:
   967  	// result: (LoweredAtomicStore ptr val mem)
   968  	for {
   969  		_ = v.Args[2]
   970  		ptr := v.Args[0]
   971  		val := v.Args[1]
   972  		mem := v.Args[2]
   973  		v.reset(OpMIPSLoweredAtomicStore)
   974  		v.AddArg(ptr)
   975  		v.AddArg(val)
   976  		v.AddArg(mem)
   977  		return true
   978  	}
   979  }
   980  func rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v *Value) bool {
   981  	// match: (AtomicStorePtrNoWB ptr val mem)
   982  	// cond:
   983  	// result: (LoweredAtomicStore ptr val mem)
   984  	for {
   985  		_ = v.Args[2]
   986  		ptr := v.Args[0]
   987  		val := v.Args[1]
   988  		mem := v.Args[2]
   989  		v.reset(OpMIPSLoweredAtomicStore)
   990  		v.AddArg(ptr)
   991  		v.AddArg(val)
   992  		v.AddArg(mem)
   993  		return true
   994  	}
   995  }
   996  func rewriteValueMIPS_OpAvg32u_0(v *Value) bool {
   997  	b := v.Block
   998  	_ = b
   999  	// match: (Avg32u <t> x y)
  1000  	// cond:
  1001  	// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
  1002  	for {
  1003  		t := v.Type
  1004  		_ = v.Args[1]
  1005  		x := v.Args[0]
  1006  		y := v.Args[1]
  1007  		v.reset(OpMIPSADD)
  1008  		v0 := b.NewValue0(v.Pos, OpMIPSSRLconst, t)
  1009  		v0.AuxInt = 1
  1010  		v1 := b.NewValue0(v.Pos, OpMIPSSUB, t)
  1011  		v1.AddArg(x)
  1012  		v1.AddArg(y)
  1013  		v0.AddArg(v1)
  1014  		v.AddArg(v0)
  1015  		v.AddArg(y)
  1016  		return true
  1017  	}
  1018  }
  1019  func rewriteValueMIPS_OpBitLen32_0(v *Value) bool {
  1020  	b := v.Block
  1021  	_ = b
  1022  	typ := &b.Func.Config.Types
  1023  	_ = typ
  1024  	// match: (BitLen32 <t> x)
  1025  	// cond:
  1026  	// result: (SUB (MOVWconst [32]) (CLZ <t> x))
  1027  	for {
  1028  		t := v.Type
  1029  		x := v.Args[0]
  1030  		v.reset(OpMIPSSUB)
  1031  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  1032  		v0.AuxInt = 32
  1033  		v.AddArg(v0)
  1034  		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1035  		v1.AddArg(x)
  1036  		v.AddArg(v1)
  1037  		return true
  1038  	}
  1039  }
  1040  func rewriteValueMIPS_OpClosureCall_0(v *Value) bool {
  1041  	// match: (ClosureCall [argwid] entry closure mem)
  1042  	// cond:
  1043  	// result: (CALLclosure [argwid] entry closure mem)
  1044  	for {
  1045  		argwid := v.AuxInt
  1046  		_ = v.Args[2]
  1047  		entry := v.Args[0]
  1048  		closure := v.Args[1]
  1049  		mem := v.Args[2]
  1050  		v.reset(OpMIPSCALLclosure)
  1051  		v.AuxInt = argwid
  1052  		v.AddArg(entry)
  1053  		v.AddArg(closure)
  1054  		v.AddArg(mem)
  1055  		return true
  1056  	}
  1057  }
  1058  func rewriteValueMIPS_OpCom16_0(v *Value) bool {
  1059  	// match: (Com16 x)
  1060  	// cond:
  1061  	// result: (NORconst [0] x)
  1062  	for {
  1063  		x := v.Args[0]
  1064  		v.reset(OpMIPSNORconst)
  1065  		v.AuxInt = 0
  1066  		v.AddArg(x)
  1067  		return true
  1068  	}
  1069  }
  1070  func rewriteValueMIPS_OpCom32_0(v *Value) bool {
  1071  	// match: (Com32 x)
  1072  	// cond:
  1073  	// result: (NORconst [0] x)
  1074  	for {
  1075  		x := v.Args[0]
  1076  		v.reset(OpMIPSNORconst)
  1077  		v.AuxInt = 0
  1078  		v.AddArg(x)
  1079  		return true
  1080  	}
  1081  }
  1082  func rewriteValueMIPS_OpCom8_0(v *Value) bool {
  1083  	// match: (Com8 x)
  1084  	// cond:
  1085  	// result: (NORconst [0] x)
  1086  	for {
  1087  		x := v.Args[0]
  1088  		v.reset(OpMIPSNORconst)
  1089  		v.AuxInt = 0
  1090  		v.AddArg(x)
  1091  		return true
  1092  	}
  1093  }
  1094  func rewriteValueMIPS_OpConst16_0(v *Value) bool {
  1095  	// match: (Const16 [val])
  1096  	// cond:
  1097  	// result: (MOVWconst [val])
  1098  	for {
  1099  		val := v.AuxInt
  1100  		v.reset(OpMIPSMOVWconst)
  1101  		v.AuxInt = val
  1102  		return true
  1103  	}
  1104  }
  1105  func rewriteValueMIPS_OpConst32_0(v *Value) bool {
  1106  	// match: (Const32 [val])
  1107  	// cond:
  1108  	// result: (MOVWconst [val])
  1109  	for {
  1110  		val := v.AuxInt
  1111  		v.reset(OpMIPSMOVWconst)
  1112  		v.AuxInt = val
  1113  		return true
  1114  	}
  1115  }
  1116  func rewriteValueMIPS_OpConst32F_0(v *Value) bool {
  1117  	// match: (Const32F [val])
  1118  	// cond:
  1119  	// result: (MOVFconst [val])
  1120  	for {
  1121  		val := v.AuxInt
  1122  		v.reset(OpMIPSMOVFconst)
  1123  		v.AuxInt = val
  1124  		return true
  1125  	}
  1126  }
  1127  func rewriteValueMIPS_OpConst64F_0(v *Value) bool {
  1128  	// match: (Const64F [val])
  1129  	// cond:
  1130  	// result: (MOVDconst [val])
  1131  	for {
  1132  		val := v.AuxInt
  1133  		v.reset(OpMIPSMOVDconst)
  1134  		v.AuxInt = val
  1135  		return true
  1136  	}
  1137  }
  1138  func rewriteValueMIPS_OpConst8_0(v *Value) bool {
  1139  	// match: (Const8 [val])
  1140  	// cond:
  1141  	// result: (MOVWconst [val])
  1142  	for {
  1143  		val := v.AuxInt
  1144  		v.reset(OpMIPSMOVWconst)
  1145  		v.AuxInt = val
  1146  		return true
  1147  	}
  1148  }
  1149  func rewriteValueMIPS_OpConstBool_0(v *Value) bool {
  1150  	// match: (ConstBool [b])
  1151  	// cond:
  1152  	// result: (MOVWconst [b])
  1153  	for {
  1154  		b := v.AuxInt
  1155  		v.reset(OpMIPSMOVWconst)
  1156  		v.AuxInt = b
  1157  		return true
  1158  	}
  1159  }
  1160  func rewriteValueMIPS_OpConstNil_0(v *Value) bool {
  1161  	// match: (ConstNil)
  1162  	// cond:
  1163  	// result: (MOVWconst [0])
  1164  	for {
  1165  		v.reset(OpMIPSMOVWconst)
  1166  		v.AuxInt = 0
  1167  		return true
  1168  	}
  1169  }
  1170  func rewriteValueMIPS_OpCtz32_0(v *Value) bool {
  1171  	b := v.Block
  1172  	_ = b
  1173  	typ := &b.Func.Config.Types
  1174  	_ = typ
  1175  	// match: (Ctz32 <t> x)
  1176  	// cond:
  1177  	// result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x)))))
  1178  	for {
  1179  		t := v.Type
  1180  		x := v.Args[0]
  1181  		v.reset(OpMIPSSUB)
  1182  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  1183  		v0.AuxInt = 32
  1184  		v.AddArg(v0)
  1185  		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1186  		v2 := b.NewValue0(v.Pos, OpMIPSSUBconst, t)
  1187  		v2.AuxInt = 1
  1188  		v3 := b.NewValue0(v.Pos, OpMIPSAND, t)
  1189  		v3.AddArg(x)
  1190  		v4 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  1191  		v4.AddArg(x)
  1192  		v3.AddArg(v4)
  1193  		v2.AddArg(v3)
  1194  		v1.AddArg(v2)
  1195  		v.AddArg(v1)
  1196  		return true
  1197  	}
  1198  }
  1199  func rewriteValueMIPS_OpCtz32NonZero_0(v *Value) bool {
  1200  	// match: (Ctz32NonZero x)
  1201  	// cond:
  1202  	// result: (Ctz32 x)
  1203  	for {
  1204  		x := v.Args[0]
  1205  		v.reset(OpCtz32)
  1206  		v.AddArg(x)
  1207  		return true
  1208  	}
  1209  }
  1210  func rewriteValueMIPS_OpCvt32Fto32_0(v *Value) bool {
  1211  	// match: (Cvt32Fto32 x)
  1212  	// cond:
  1213  	// result: (TRUNCFW x)
  1214  	for {
  1215  		x := v.Args[0]
  1216  		v.reset(OpMIPSTRUNCFW)
  1217  		v.AddArg(x)
  1218  		return true
  1219  	}
  1220  }
  1221  func rewriteValueMIPS_OpCvt32Fto64F_0(v *Value) bool {
  1222  	// match: (Cvt32Fto64F x)
  1223  	// cond:
  1224  	// result: (MOVFD x)
  1225  	for {
  1226  		x := v.Args[0]
  1227  		v.reset(OpMIPSMOVFD)
  1228  		v.AddArg(x)
  1229  		return true
  1230  	}
  1231  }
  1232  func rewriteValueMIPS_OpCvt32to32F_0(v *Value) bool {
  1233  	// match: (Cvt32to32F x)
  1234  	// cond:
  1235  	// result: (MOVWF x)
  1236  	for {
  1237  		x := v.Args[0]
  1238  		v.reset(OpMIPSMOVWF)
  1239  		v.AddArg(x)
  1240  		return true
  1241  	}
  1242  }
  1243  func rewriteValueMIPS_OpCvt32to64F_0(v *Value) bool {
  1244  	// match: (Cvt32to64F x)
  1245  	// cond:
  1246  	// result: (MOVWD x)
  1247  	for {
  1248  		x := v.Args[0]
  1249  		v.reset(OpMIPSMOVWD)
  1250  		v.AddArg(x)
  1251  		return true
  1252  	}
  1253  }
  1254  func rewriteValueMIPS_OpCvt64Fto32_0(v *Value) bool {
  1255  	// match: (Cvt64Fto32 x)
  1256  	// cond:
  1257  	// result: (TRUNCDW x)
  1258  	for {
  1259  		x := v.Args[0]
  1260  		v.reset(OpMIPSTRUNCDW)
  1261  		v.AddArg(x)
  1262  		return true
  1263  	}
  1264  }
  1265  func rewriteValueMIPS_OpCvt64Fto32F_0(v *Value) bool {
  1266  	// match: (Cvt64Fto32F x)
  1267  	// cond:
  1268  	// result: (MOVDF x)
  1269  	for {
  1270  		x := v.Args[0]
  1271  		v.reset(OpMIPSMOVDF)
  1272  		v.AddArg(x)
  1273  		return true
  1274  	}
  1275  }
  1276  func rewriteValueMIPS_OpDiv16_0(v *Value) bool {
  1277  	b := v.Block
  1278  	_ = b
  1279  	typ := &b.Func.Config.Types
  1280  	_ = typ
  1281  	// match: (Div16 x y)
  1282  	// cond:
  1283  	// result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  1284  	for {
  1285  		_ = v.Args[1]
  1286  		x := v.Args[0]
  1287  		y := v.Args[1]
  1288  		v.reset(OpSelect1)
  1289  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1290  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1291  		v1.AddArg(x)
  1292  		v0.AddArg(v1)
  1293  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1294  		v2.AddArg(y)
  1295  		v0.AddArg(v2)
  1296  		v.AddArg(v0)
  1297  		return true
  1298  	}
  1299  }
  1300  func rewriteValueMIPS_OpDiv16u_0(v *Value) bool {
  1301  	b := v.Block
  1302  	_ = b
  1303  	typ := &b.Func.Config.Types
  1304  	_ = typ
  1305  	// match: (Div16u x y)
  1306  	// cond:
  1307  	// result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1308  	for {
  1309  		_ = v.Args[1]
  1310  		x := v.Args[0]
  1311  		y := v.Args[1]
  1312  		v.reset(OpSelect1)
  1313  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1314  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1315  		v1.AddArg(x)
  1316  		v0.AddArg(v1)
  1317  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1318  		v2.AddArg(y)
  1319  		v0.AddArg(v2)
  1320  		v.AddArg(v0)
  1321  		return true
  1322  	}
  1323  }
  1324  func rewriteValueMIPS_OpDiv32_0(v *Value) bool {
  1325  	b := v.Block
  1326  	_ = b
  1327  	typ := &b.Func.Config.Types
  1328  	_ = typ
  1329  	// match: (Div32 x y)
  1330  	// cond:
  1331  	// result: (Select1 (DIV x y))
  1332  	for {
  1333  		_ = v.Args[1]
  1334  		x := v.Args[0]
  1335  		y := v.Args[1]
  1336  		v.reset(OpSelect1)
  1337  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1338  		v0.AddArg(x)
  1339  		v0.AddArg(y)
  1340  		v.AddArg(v0)
  1341  		return true
  1342  	}
  1343  }
  1344  func rewriteValueMIPS_OpDiv32F_0(v *Value) bool {
  1345  	// match: (Div32F x y)
  1346  	// cond:
  1347  	// result: (DIVF x y)
  1348  	for {
  1349  		_ = v.Args[1]
  1350  		x := v.Args[0]
  1351  		y := v.Args[1]
  1352  		v.reset(OpMIPSDIVF)
  1353  		v.AddArg(x)
  1354  		v.AddArg(y)
  1355  		return true
  1356  	}
  1357  }
  1358  func rewriteValueMIPS_OpDiv32u_0(v *Value) bool {
  1359  	b := v.Block
  1360  	_ = b
  1361  	typ := &b.Func.Config.Types
  1362  	_ = typ
  1363  	// match: (Div32u x y)
  1364  	// cond:
  1365  	// result: (Select1 (DIVU x y))
  1366  	for {
  1367  		_ = v.Args[1]
  1368  		x := v.Args[0]
  1369  		y := v.Args[1]
  1370  		v.reset(OpSelect1)
  1371  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1372  		v0.AddArg(x)
  1373  		v0.AddArg(y)
  1374  		v.AddArg(v0)
  1375  		return true
  1376  	}
  1377  }
  1378  func rewriteValueMIPS_OpDiv64F_0(v *Value) bool {
  1379  	// match: (Div64F x y)
  1380  	// cond:
  1381  	// result: (DIVD x y)
  1382  	for {
  1383  		_ = v.Args[1]
  1384  		x := v.Args[0]
  1385  		y := v.Args[1]
  1386  		v.reset(OpMIPSDIVD)
  1387  		v.AddArg(x)
  1388  		v.AddArg(y)
  1389  		return true
  1390  	}
  1391  }
  1392  func rewriteValueMIPS_OpDiv8_0(v *Value) bool {
  1393  	b := v.Block
  1394  	_ = b
  1395  	typ := &b.Func.Config.Types
  1396  	_ = typ
  1397  	// match: (Div8 x y)
  1398  	// cond:
  1399  	// result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  1400  	for {
  1401  		_ = v.Args[1]
  1402  		x := v.Args[0]
  1403  		y := v.Args[1]
  1404  		v.reset(OpSelect1)
  1405  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1406  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1407  		v1.AddArg(x)
  1408  		v0.AddArg(v1)
  1409  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1410  		v2.AddArg(y)
  1411  		v0.AddArg(v2)
  1412  		v.AddArg(v0)
  1413  		return true
  1414  	}
  1415  }
  1416  func rewriteValueMIPS_OpDiv8u_0(v *Value) bool {
  1417  	b := v.Block
  1418  	_ = b
  1419  	typ := &b.Func.Config.Types
  1420  	_ = typ
  1421  	// match: (Div8u x y)
  1422  	// cond:
  1423  	// result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1424  	for {
  1425  		_ = v.Args[1]
  1426  		x := v.Args[0]
  1427  		y := v.Args[1]
  1428  		v.reset(OpSelect1)
  1429  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1430  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1431  		v1.AddArg(x)
  1432  		v0.AddArg(v1)
  1433  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1434  		v2.AddArg(y)
  1435  		v0.AddArg(v2)
  1436  		v.AddArg(v0)
  1437  		return true
  1438  	}
  1439  }
  1440  func rewriteValueMIPS_OpEq16_0(v *Value) bool {
  1441  	b := v.Block
  1442  	_ = b
  1443  	typ := &b.Func.Config.Types
  1444  	_ = typ
  1445  	// match: (Eq16 x y)
  1446  	// cond:
  1447  	// result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1448  	for {
  1449  		_ = v.Args[1]
  1450  		x := v.Args[0]
  1451  		y := v.Args[1]
  1452  		v.reset(OpMIPSSGTUconst)
  1453  		v.AuxInt = 1
  1454  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1455  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1456  		v1.AddArg(x)
  1457  		v0.AddArg(v1)
  1458  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1459  		v2.AddArg(y)
  1460  		v0.AddArg(v2)
  1461  		v.AddArg(v0)
  1462  		return true
  1463  	}
  1464  }
  1465  func rewriteValueMIPS_OpEq32_0(v *Value) bool {
  1466  	b := v.Block
  1467  	_ = b
  1468  	typ := &b.Func.Config.Types
  1469  	_ = typ
  1470  	// match: (Eq32 x y)
  1471  	// cond:
  1472  	// result: (SGTUconst [1] (XOR x y))
  1473  	for {
  1474  		_ = v.Args[1]
  1475  		x := v.Args[0]
  1476  		y := v.Args[1]
  1477  		v.reset(OpMIPSSGTUconst)
  1478  		v.AuxInt = 1
  1479  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1480  		v0.AddArg(x)
  1481  		v0.AddArg(y)
  1482  		v.AddArg(v0)
  1483  		return true
  1484  	}
  1485  }
  1486  func rewriteValueMIPS_OpEq32F_0(v *Value) bool {
  1487  	b := v.Block
  1488  	_ = b
  1489  	// match: (Eq32F x y)
  1490  	// cond:
  1491  	// result: (FPFlagTrue (CMPEQF x y))
  1492  	for {
  1493  		_ = v.Args[1]
  1494  		x := v.Args[0]
  1495  		y := v.Args[1]
  1496  		v.reset(OpMIPSFPFlagTrue)
  1497  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags)
  1498  		v0.AddArg(x)
  1499  		v0.AddArg(y)
  1500  		v.AddArg(v0)
  1501  		return true
  1502  	}
  1503  }
  1504  func rewriteValueMIPS_OpEq64F_0(v *Value) bool {
  1505  	b := v.Block
  1506  	_ = b
  1507  	// match: (Eq64F x y)
  1508  	// cond:
  1509  	// result: (FPFlagTrue (CMPEQD x y))
  1510  	for {
  1511  		_ = v.Args[1]
  1512  		x := v.Args[0]
  1513  		y := v.Args[1]
  1514  		v.reset(OpMIPSFPFlagTrue)
  1515  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags)
  1516  		v0.AddArg(x)
  1517  		v0.AddArg(y)
  1518  		v.AddArg(v0)
  1519  		return true
  1520  	}
  1521  }
  1522  func rewriteValueMIPS_OpEq8_0(v *Value) bool {
  1523  	b := v.Block
  1524  	_ = b
  1525  	typ := &b.Func.Config.Types
  1526  	_ = typ
  1527  	// match: (Eq8 x y)
  1528  	// cond:
  1529  	// result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1530  	for {
  1531  		_ = v.Args[1]
  1532  		x := v.Args[0]
  1533  		y := v.Args[1]
  1534  		v.reset(OpMIPSSGTUconst)
  1535  		v.AuxInt = 1
  1536  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1537  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1538  		v1.AddArg(x)
  1539  		v0.AddArg(v1)
  1540  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1541  		v2.AddArg(y)
  1542  		v0.AddArg(v2)
  1543  		v.AddArg(v0)
  1544  		return true
  1545  	}
  1546  }
  1547  func rewriteValueMIPS_OpEqB_0(v *Value) bool {
  1548  	b := v.Block
  1549  	_ = b
  1550  	typ := &b.Func.Config.Types
  1551  	_ = typ
  1552  	// match: (EqB x y)
  1553  	// cond:
  1554  	// result: (XORconst [1] (XOR <typ.Bool> x y))
  1555  	for {
  1556  		_ = v.Args[1]
  1557  		x := v.Args[0]
  1558  		y := v.Args[1]
  1559  		v.reset(OpMIPSXORconst)
  1560  		v.AuxInt = 1
  1561  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.Bool)
  1562  		v0.AddArg(x)
  1563  		v0.AddArg(y)
  1564  		v.AddArg(v0)
  1565  		return true
  1566  	}
  1567  }
  1568  func rewriteValueMIPS_OpEqPtr_0(v *Value) bool {
  1569  	b := v.Block
  1570  	_ = b
  1571  	typ := &b.Func.Config.Types
  1572  	_ = typ
  1573  	// match: (EqPtr x y)
  1574  	// cond:
  1575  	// result: (SGTUconst [1] (XOR x y))
  1576  	for {
  1577  		_ = v.Args[1]
  1578  		x := v.Args[0]
  1579  		y := v.Args[1]
  1580  		v.reset(OpMIPSSGTUconst)
  1581  		v.AuxInt = 1
  1582  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1583  		v0.AddArg(x)
  1584  		v0.AddArg(y)
  1585  		v.AddArg(v0)
  1586  		return true
  1587  	}
  1588  }
  1589  func rewriteValueMIPS_OpGeq16_0(v *Value) bool {
  1590  	b := v.Block
  1591  	_ = b
  1592  	typ := &b.Func.Config.Types
  1593  	_ = typ
  1594  	// match: (Geq16 x y)
  1595  	// cond:
  1596  	// result: (XORconst [1] (SGT (SignExt16to32 y) (SignExt16to32 x)))
  1597  	for {
  1598  		_ = v.Args[1]
  1599  		x := v.Args[0]
  1600  		y := v.Args[1]
  1601  		v.reset(OpMIPSXORconst)
  1602  		v.AuxInt = 1
  1603  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1604  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1605  		v1.AddArg(y)
  1606  		v0.AddArg(v1)
  1607  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1608  		v2.AddArg(x)
  1609  		v0.AddArg(v2)
  1610  		v.AddArg(v0)
  1611  		return true
  1612  	}
  1613  }
  1614  func rewriteValueMIPS_OpGeq16U_0(v *Value) bool {
  1615  	b := v.Block
  1616  	_ = b
  1617  	typ := &b.Func.Config.Types
  1618  	_ = typ
  1619  	// match: (Geq16U x y)
  1620  	// cond:
  1621  	// result: (XORconst [1] (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)))
  1622  	for {
  1623  		_ = v.Args[1]
  1624  		x := v.Args[0]
  1625  		y := v.Args[1]
  1626  		v.reset(OpMIPSXORconst)
  1627  		v.AuxInt = 1
  1628  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1629  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1630  		v1.AddArg(y)
  1631  		v0.AddArg(v1)
  1632  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1633  		v2.AddArg(x)
  1634  		v0.AddArg(v2)
  1635  		v.AddArg(v0)
  1636  		return true
  1637  	}
  1638  }
  1639  func rewriteValueMIPS_OpGeq32_0(v *Value) bool {
  1640  	b := v.Block
  1641  	_ = b
  1642  	typ := &b.Func.Config.Types
  1643  	_ = typ
  1644  	// match: (Geq32 x y)
  1645  	// cond:
  1646  	// result: (XORconst [1] (SGT y x))
  1647  	for {
  1648  		_ = v.Args[1]
  1649  		x := v.Args[0]
  1650  		y := v.Args[1]
  1651  		v.reset(OpMIPSXORconst)
  1652  		v.AuxInt = 1
  1653  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1654  		v0.AddArg(y)
  1655  		v0.AddArg(x)
  1656  		v.AddArg(v0)
  1657  		return true
  1658  	}
  1659  }
  1660  func rewriteValueMIPS_OpGeq32F_0(v *Value) bool {
  1661  	b := v.Block
  1662  	_ = b
  1663  	// match: (Geq32F x y)
  1664  	// cond:
  1665  	// result: (FPFlagTrue (CMPGEF x y))
  1666  	for {
  1667  		_ = v.Args[1]
  1668  		x := v.Args[0]
  1669  		y := v.Args[1]
  1670  		v.reset(OpMIPSFPFlagTrue)
  1671  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags)
  1672  		v0.AddArg(x)
  1673  		v0.AddArg(y)
  1674  		v.AddArg(v0)
  1675  		return true
  1676  	}
  1677  }
  1678  func rewriteValueMIPS_OpGeq32U_0(v *Value) bool {
  1679  	b := v.Block
  1680  	_ = b
  1681  	typ := &b.Func.Config.Types
  1682  	_ = typ
  1683  	// match: (Geq32U x y)
  1684  	// cond:
  1685  	// result: (XORconst [1] (SGTU y x))
  1686  	for {
  1687  		_ = v.Args[1]
  1688  		x := v.Args[0]
  1689  		y := v.Args[1]
  1690  		v.reset(OpMIPSXORconst)
  1691  		v.AuxInt = 1
  1692  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1693  		v0.AddArg(y)
  1694  		v0.AddArg(x)
  1695  		v.AddArg(v0)
  1696  		return true
  1697  	}
  1698  }
  1699  func rewriteValueMIPS_OpGeq64F_0(v *Value) bool {
  1700  	b := v.Block
  1701  	_ = b
  1702  	// match: (Geq64F x y)
  1703  	// cond:
  1704  	// result: (FPFlagTrue (CMPGED x y))
  1705  	for {
  1706  		_ = v.Args[1]
  1707  		x := v.Args[0]
  1708  		y := v.Args[1]
  1709  		v.reset(OpMIPSFPFlagTrue)
  1710  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags)
  1711  		v0.AddArg(x)
  1712  		v0.AddArg(y)
  1713  		v.AddArg(v0)
  1714  		return true
  1715  	}
  1716  }
  1717  func rewriteValueMIPS_OpGeq8_0(v *Value) bool {
  1718  	b := v.Block
  1719  	_ = b
  1720  	typ := &b.Func.Config.Types
  1721  	_ = typ
  1722  	// match: (Geq8 x y)
  1723  	// cond:
  1724  	// result: (XORconst [1] (SGT (SignExt8to32 y) (SignExt8to32 x)))
  1725  	for {
  1726  		_ = v.Args[1]
  1727  		x := v.Args[0]
  1728  		y := v.Args[1]
  1729  		v.reset(OpMIPSXORconst)
  1730  		v.AuxInt = 1
  1731  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1732  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1733  		v1.AddArg(y)
  1734  		v0.AddArg(v1)
  1735  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1736  		v2.AddArg(x)
  1737  		v0.AddArg(v2)
  1738  		v.AddArg(v0)
  1739  		return true
  1740  	}
  1741  }
  1742  func rewriteValueMIPS_OpGeq8U_0(v *Value) bool {
  1743  	b := v.Block
  1744  	_ = b
  1745  	typ := &b.Func.Config.Types
  1746  	_ = typ
  1747  	// match: (Geq8U x y)
  1748  	// cond:
  1749  	// result: (XORconst [1] (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)))
  1750  	for {
  1751  		_ = v.Args[1]
  1752  		x := v.Args[0]
  1753  		y := v.Args[1]
  1754  		v.reset(OpMIPSXORconst)
  1755  		v.AuxInt = 1
  1756  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1757  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1758  		v1.AddArg(y)
  1759  		v0.AddArg(v1)
  1760  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1761  		v2.AddArg(x)
  1762  		v0.AddArg(v2)
  1763  		v.AddArg(v0)
  1764  		return true
  1765  	}
  1766  }
  1767  func rewriteValueMIPS_OpGetCallerPC_0(v *Value) bool {
  1768  	// match: (GetCallerPC)
  1769  	// cond:
  1770  	// result: (LoweredGetCallerPC)
  1771  	for {
  1772  		v.reset(OpMIPSLoweredGetCallerPC)
  1773  		return true
  1774  	}
  1775  }
  1776  func rewriteValueMIPS_OpGetCallerSP_0(v *Value) bool {
  1777  	// match: (GetCallerSP)
  1778  	// cond:
  1779  	// result: (LoweredGetCallerSP)
  1780  	for {
  1781  		v.reset(OpMIPSLoweredGetCallerSP)
  1782  		return true
  1783  	}
  1784  }
  1785  func rewriteValueMIPS_OpGetClosurePtr_0(v *Value) bool {
  1786  	// match: (GetClosurePtr)
  1787  	// cond:
  1788  	// result: (LoweredGetClosurePtr)
  1789  	for {
  1790  		v.reset(OpMIPSLoweredGetClosurePtr)
  1791  		return true
  1792  	}
  1793  }
  1794  func rewriteValueMIPS_OpGreater16_0(v *Value) bool {
  1795  	b := v.Block
  1796  	_ = b
  1797  	typ := &b.Func.Config.Types
  1798  	_ = typ
  1799  	// match: (Greater16 x y)
  1800  	// cond:
  1801  	// result: (SGT (SignExt16to32 x) (SignExt16to32 y))
  1802  	for {
  1803  		_ = v.Args[1]
  1804  		x := v.Args[0]
  1805  		y := v.Args[1]
  1806  		v.reset(OpMIPSSGT)
  1807  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1808  		v0.AddArg(x)
  1809  		v.AddArg(v0)
  1810  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1811  		v1.AddArg(y)
  1812  		v.AddArg(v1)
  1813  		return true
  1814  	}
  1815  }
  1816  func rewriteValueMIPS_OpGreater16U_0(v *Value) bool {
  1817  	b := v.Block
  1818  	_ = b
  1819  	typ := &b.Func.Config.Types
  1820  	_ = typ
  1821  	// match: (Greater16U x y)
  1822  	// cond:
  1823  	// result: (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))
  1824  	for {
  1825  		_ = v.Args[1]
  1826  		x := v.Args[0]
  1827  		y := v.Args[1]
  1828  		v.reset(OpMIPSSGTU)
  1829  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1830  		v0.AddArg(x)
  1831  		v.AddArg(v0)
  1832  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1833  		v1.AddArg(y)
  1834  		v.AddArg(v1)
  1835  		return true
  1836  	}
  1837  }
  1838  func rewriteValueMIPS_OpGreater32_0(v *Value) bool {
  1839  	// match: (Greater32 x y)
  1840  	// cond:
  1841  	// result: (SGT x y)
  1842  	for {
  1843  		_ = v.Args[1]
  1844  		x := v.Args[0]
  1845  		y := v.Args[1]
  1846  		v.reset(OpMIPSSGT)
  1847  		v.AddArg(x)
  1848  		v.AddArg(y)
  1849  		return true
  1850  	}
  1851  }
  1852  func rewriteValueMIPS_OpGreater32F_0(v *Value) bool {
  1853  	b := v.Block
  1854  	_ = b
  1855  	// match: (Greater32F x y)
  1856  	// cond:
  1857  	// result: (FPFlagTrue (CMPGTF x y))
  1858  	for {
  1859  		_ = v.Args[1]
  1860  		x := v.Args[0]
  1861  		y := v.Args[1]
  1862  		v.reset(OpMIPSFPFlagTrue)
  1863  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags)
  1864  		v0.AddArg(x)
  1865  		v0.AddArg(y)
  1866  		v.AddArg(v0)
  1867  		return true
  1868  	}
  1869  }
  1870  func rewriteValueMIPS_OpGreater32U_0(v *Value) bool {
  1871  	// match: (Greater32U x y)
  1872  	// cond:
  1873  	// result: (SGTU x y)
  1874  	for {
  1875  		_ = v.Args[1]
  1876  		x := v.Args[0]
  1877  		y := v.Args[1]
  1878  		v.reset(OpMIPSSGTU)
  1879  		v.AddArg(x)
  1880  		v.AddArg(y)
  1881  		return true
  1882  	}
  1883  }
  1884  func rewriteValueMIPS_OpGreater64F_0(v *Value) bool {
  1885  	b := v.Block
  1886  	_ = b
  1887  	// match: (Greater64F x y)
  1888  	// cond:
  1889  	// result: (FPFlagTrue (CMPGTD x y))
  1890  	for {
  1891  		_ = v.Args[1]
  1892  		x := v.Args[0]
  1893  		y := v.Args[1]
  1894  		v.reset(OpMIPSFPFlagTrue)
  1895  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags)
  1896  		v0.AddArg(x)
  1897  		v0.AddArg(y)
  1898  		v.AddArg(v0)
  1899  		return true
  1900  	}
  1901  }
  1902  func rewriteValueMIPS_OpGreater8_0(v *Value) bool {
  1903  	b := v.Block
  1904  	_ = b
  1905  	typ := &b.Func.Config.Types
  1906  	_ = typ
  1907  	// match: (Greater8 x y)
  1908  	// cond:
  1909  	// result: (SGT (SignExt8to32 x) (SignExt8to32 y))
  1910  	for {
  1911  		_ = v.Args[1]
  1912  		x := v.Args[0]
  1913  		y := v.Args[1]
  1914  		v.reset(OpMIPSSGT)
  1915  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1916  		v0.AddArg(x)
  1917  		v.AddArg(v0)
  1918  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1919  		v1.AddArg(y)
  1920  		v.AddArg(v1)
  1921  		return true
  1922  	}
  1923  }
  1924  func rewriteValueMIPS_OpGreater8U_0(v *Value) bool {
  1925  	b := v.Block
  1926  	_ = b
  1927  	typ := &b.Func.Config.Types
  1928  	_ = typ
  1929  	// match: (Greater8U x y)
  1930  	// cond:
  1931  	// result: (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))
  1932  	for {
  1933  		_ = v.Args[1]
  1934  		x := v.Args[0]
  1935  		y := v.Args[1]
  1936  		v.reset(OpMIPSSGTU)
  1937  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1938  		v0.AddArg(x)
  1939  		v.AddArg(v0)
  1940  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1941  		v1.AddArg(y)
  1942  		v.AddArg(v1)
  1943  		return true
  1944  	}
  1945  }
  1946  func rewriteValueMIPS_OpHmul32_0(v *Value) bool {
  1947  	b := v.Block
  1948  	_ = b
  1949  	typ := &b.Func.Config.Types
  1950  	_ = typ
  1951  	// match: (Hmul32 x y)
  1952  	// cond:
  1953  	// result: (Select0 (MULT x y))
  1954  	for {
  1955  		_ = v.Args[1]
  1956  		x := v.Args[0]
  1957  		y := v.Args[1]
  1958  		v.reset(OpSelect0)
  1959  		v0 := b.NewValue0(v.Pos, OpMIPSMULT, types.NewTuple(typ.Int32, typ.Int32))
  1960  		v0.AddArg(x)
  1961  		v0.AddArg(y)
  1962  		v.AddArg(v0)
  1963  		return true
  1964  	}
  1965  }
  1966  func rewriteValueMIPS_OpHmul32u_0(v *Value) bool {
  1967  	b := v.Block
  1968  	_ = b
  1969  	typ := &b.Func.Config.Types
  1970  	_ = typ
  1971  	// match: (Hmul32u x y)
  1972  	// cond:
  1973  	// result: (Select0 (MULTU x y))
  1974  	for {
  1975  		_ = v.Args[1]
  1976  		x := v.Args[0]
  1977  		y := v.Args[1]
  1978  		v.reset(OpSelect0)
  1979  		v0 := b.NewValue0(v.Pos, OpMIPSMULTU, types.NewTuple(typ.UInt32, typ.UInt32))
  1980  		v0.AddArg(x)
  1981  		v0.AddArg(y)
  1982  		v.AddArg(v0)
  1983  		return true
  1984  	}
  1985  }
  1986  func rewriteValueMIPS_OpInterCall_0(v *Value) bool {
  1987  	// match: (InterCall [argwid] entry mem)
  1988  	// cond:
  1989  	// result: (CALLinter [argwid] entry mem)
  1990  	for {
  1991  		argwid := v.AuxInt
  1992  		_ = v.Args[1]
  1993  		entry := v.Args[0]
  1994  		mem := v.Args[1]
  1995  		v.reset(OpMIPSCALLinter)
  1996  		v.AuxInt = argwid
  1997  		v.AddArg(entry)
  1998  		v.AddArg(mem)
  1999  		return true
  2000  	}
  2001  }
  2002  func rewriteValueMIPS_OpIsInBounds_0(v *Value) bool {
  2003  	// match: (IsInBounds idx len)
  2004  	// cond:
  2005  	// result: (SGTU len idx)
  2006  	for {
  2007  		_ = v.Args[1]
  2008  		idx := v.Args[0]
  2009  		len := v.Args[1]
  2010  		v.reset(OpMIPSSGTU)
  2011  		v.AddArg(len)
  2012  		v.AddArg(idx)
  2013  		return true
  2014  	}
  2015  }
  2016  func rewriteValueMIPS_OpIsNonNil_0(v *Value) bool {
  2017  	b := v.Block
  2018  	_ = b
  2019  	typ := &b.Func.Config.Types
  2020  	_ = typ
  2021  	// match: (IsNonNil ptr)
  2022  	// cond:
  2023  	// result: (SGTU ptr (MOVWconst [0]))
  2024  	for {
  2025  		ptr := v.Args[0]
  2026  		v.reset(OpMIPSSGTU)
  2027  		v.AddArg(ptr)
  2028  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2029  		v0.AuxInt = 0
  2030  		v.AddArg(v0)
  2031  		return true
  2032  	}
  2033  }
  2034  func rewriteValueMIPS_OpIsSliceInBounds_0(v *Value) bool {
  2035  	b := v.Block
  2036  	_ = b
  2037  	typ := &b.Func.Config.Types
  2038  	_ = typ
  2039  	// match: (IsSliceInBounds idx len)
  2040  	// cond:
  2041  	// result: (XORconst [1] (SGTU idx len))
  2042  	for {
  2043  		_ = v.Args[1]
  2044  		idx := v.Args[0]
  2045  		len := v.Args[1]
  2046  		v.reset(OpMIPSXORconst)
  2047  		v.AuxInt = 1
  2048  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2049  		v0.AddArg(idx)
  2050  		v0.AddArg(len)
  2051  		v.AddArg(v0)
  2052  		return true
  2053  	}
  2054  }
  2055  func rewriteValueMIPS_OpLeq16_0(v *Value) bool {
  2056  	b := v.Block
  2057  	_ = b
  2058  	typ := &b.Func.Config.Types
  2059  	_ = typ
  2060  	// match: (Leq16 x y)
  2061  	// cond:
  2062  	// result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y)))
  2063  	for {
  2064  		_ = v.Args[1]
  2065  		x := v.Args[0]
  2066  		y := v.Args[1]
  2067  		v.reset(OpMIPSXORconst)
  2068  		v.AuxInt = 1
  2069  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  2070  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2071  		v1.AddArg(x)
  2072  		v0.AddArg(v1)
  2073  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2074  		v2.AddArg(y)
  2075  		v0.AddArg(v2)
  2076  		v.AddArg(v0)
  2077  		return true
  2078  	}
  2079  }
  2080  func rewriteValueMIPS_OpLeq16U_0(v *Value) bool {
  2081  	b := v.Block
  2082  	_ = b
  2083  	typ := &b.Func.Config.Types
  2084  	_ = typ
  2085  	// match: (Leq16U x y)
  2086  	// cond:
  2087  	// result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  2088  	for {
  2089  		_ = v.Args[1]
  2090  		x := v.Args[0]
  2091  		y := v.Args[1]
  2092  		v.reset(OpMIPSXORconst)
  2093  		v.AuxInt = 1
  2094  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2095  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2096  		v1.AddArg(x)
  2097  		v0.AddArg(v1)
  2098  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2099  		v2.AddArg(y)
  2100  		v0.AddArg(v2)
  2101  		v.AddArg(v0)
  2102  		return true
  2103  	}
  2104  }
  2105  func rewriteValueMIPS_OpLeq32_0(v *Value) bool {
  2106  	b := v.Block
  2107  	_ = b
  2108  	typ := &b.Func.Config.Types
  2109  	_ = typ
  2110  	// match: (Leq32 x y)
  2111  	// cond:
  2112  	// result: (XORconst [1] (SGT x y))
  2113  	for {
  2114  		_ = v.Args[1]
  2115  		x := v.Args[0]
  2116  		y := v.Args[1]
  2117  		v.reset(OpMIPSXORconst)
  2118  		v.AuxInt = 1
  2119  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  2120  		v0.AddArg(x)
  2121  		v0.AddArg(y)
  2122  		v.AddArg(v0)
  2123  		return true
  2124  	}
  2125  }
  2126  func rewriteValueMIPS_OpLeq32F_0(v *Value) bool {
  2127  	b := v.Block
  2128  	_ = b
  2129  	// match: (Leq32F x y)
  2130  	// cond:
  2131  	// result: (FPFlagTrue (CMPGEF y x))
  2132  	for {
  2133  		_ = v.Args[1]
  2134  		x := v.Args[0]
  2135  		y := v.Args[1]
  2136  		v.reset(OpMIPSFPFlagTrue)
  2137  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags)
  2138  		v0.AddArg(y)
  2139  		v0.AddArg(x)
  2140  		v.AddArg(v0)
  2141  		return true
  2142  	}
  2143  }
  2144  func rewriteValueMIPS_OpLeq32U_0(v *Value) bool {
  2145  	b := v.Block
  2146  	_ = b
  2147  	typ := &b.Func.Config.Types
  2148  	_ = typ
  2149  	// match: (Leq32U x y)
  2150  	// cond:
  2151  	// result: (XORconst [1] (SGTU x y))
  2152  	for {
  2153  		_ = v.Args[1]
  2154  		x := v.Args[0]
  2155  		y := v.Args[1]
  2156  		v.reset(OpMIPSXORconst)
  2157  		v.AuxInt = 1
  2158  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2159  		v0.AddArg(x)
  2160  		v0.AddArg(y)
  2161  		v.AddArg(v0)
  2162  		return true
  2163  	}
  2164  }
  2165  func rewriteValueMIPS_OpLeq64F_0(v *Value) bool {
  2166  	b := v.Block
  2167  	_ = b
  2168  	// match: (Leq64F x y)
  2169  	// cond:
  2170  	// result: (FPFlagTrue (CMPGED y x))
  2171  	for {
  2172  		_ = v.Args[1]
  2173  		x := v.Args[0]
  2174  		y := v.Args[1]
  2175  		v.reset(OpMIPSFPFlagTrue)
  2176  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags)
  2177  		v0.AddArg(y)
  2178  		v0.AddArg(x)
  2179  		v.AddArg(v0)
  2180  		return true
  2181  	}
  2182  }
  2183  func rewriteValueMIPS_OpLeq8_0(v *Value) bool {
  2184  	b := v.Block
  2185  	_ = b
  2186  	typ := &b.Func.Config.Types
  2187  	_ = typ
  2188  	// match: (Leq8 x y)
  2189  	// cond:
  2190  	// result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y)))
  2191  	for {
  2192  		_ = v.Args[1]
  2193  		x := v.Args[0]
  2194  		y := v.Args[1]
  2195  		v.reset(OpMIPSXORconst)
  2196  		v.AuxInt = 1
  2197  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  2198  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2199  		v1.AddArg(x)
  2200  		v0.AddArg(v1)
  2201  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2202  		v2.AddArg(y)
  2203  		v0.AddArg(v2)
  2204  		v.AddArg(v0)
  2205  		return true
  2206  	}
  2207  }
  2208  func rewriteValueMIPS_OpLeq8U_0(v *Value) bool {
  2209  	b := v.Block
  2210  	_ = b
  2211  	typ := &b.Func.Config.Types
  2212  	_ = typ
  2213  	// match: (Leq8U x y)
  2214  	// cond:
  2215  	// result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  2216  	for {
  2217  		_ = v.Args[1]
  2218  		x := v.Args[0]
  2219  		y := v.Args[1]
  2220  		v.reset(OpMIPSXORconst)
  2221  		v.AuxInt = 1
  2222  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2223  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2224  		v1.AddArg(x)
  2225  		v0.AddArg(v1)
  2226  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2227  		v2.AddArg(y)
  2228  		v0.AddArg(v2)
  2229  		v.AddArg(v0)
  2230  		return true
  2231  	}
  2232  }
  2233  func rewriteValueMIPS_OpLess16_0(v *Value) bool {
  2234  	b := v.Block
  2235  	_ = b
  2236  	typ := &b.Func.Config.Types
  2237  	_ = typ
  2238  	// match: (Less16 x y)
  2239  	// cond:
  2240  	// result: (SGT (SignExt16to32 y) (SignExt16to32 x))
  2241  	for {
  2242  		_ = v.Args[1]
  2243  		x := v.Args[0]
  2244  		y := v.Args[1]
  2245  		v.reset(OpMIPSSGT)
  2246  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2247  		v0.AddArg(y)
  2248  		v.AddArg(v0)
  2249  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2250  		v1.AddArg(x)
  2251  		v.AddArg(v1)
  2252  		return true
  2253  	}
  2254  }
  2255  func rewriteValueMIPS_OpLess16U_0(v *Value) bool {
  2256  	b := v.Block
  2257  	_ = b
  2258  	typ := &b.Func.Config.Types
  2259  	_ = typ
  2260  	// match: (Less16U x y)
  2261  	// cond:
  2262  	// result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x))
  2263  	for {
  2264  		_ = v.Args[1]
  2265  		x := v.Args[0]
  2266  		y := v.Args[1]
  2267  		v.reset(OpMIPSSGTU)
  2268  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2269  		v0.AddArg(y)
  2270  		v.AddArg(v0)
  2271  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2272  		v1.AddArg(x)
  2273  		v.AddArg(v1)
  2274  		return true
  2275  	}
  2276  }
  2277  func rewriteValueMIPS_OpLess32_0(v *Value) bool {
  2278  	// match: (Less32 x y)
  2279  	// cond:
  2280  	// result: (SGT y x)
  2281  	for {
  2282  		_ = v.Args[1]
  2283  		x := v.Args[0]
  2284  		y := v.Args[1]
  2285  		v.reset(OpMIPSSGT)
  2286  		v.AddArg(y)
  2287  		v.AddArg(x)
  2288  		return true
  2289  	}
  2290  }
  2291  func rewriteValueMIPS_OpLess32F_0(v *Value) bool {
  2292  	b := v.Block
  2293  	_ = b
  2294  	// match: (Less32F x y)
  2295  	// cond:
  2296  	// result: (FPFlagTrue (CMPGTF y x))
  2297  	for {
  2298  		_ = v.Args[1]
  2299  		x := v.Args[0]
  2300  		y := v.Args[1]
  2301  		v.reset(OpMIPSFPFlagTrue)
  2302  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags)
  2303  		v0.AddArg(y)
  2304  		v0.AddArg(x)
  2305  		v.AddArg(v0)
  2306  		return true
  2307  	}
  2308  }
  2309  func rewriteValueMIPS_OpLess32U_0(v *Value) bool {
  2310  	// match: (Less32U x y)
  2311  	// cond:
  2312  	// result: (SGTU y x)
  2313  	for {
  2314  		_ = v.Args[1]
  2315  		x := v.Args[0]
  2316  		y := v.Args[1]
  2317  		v.reset(OpMIPSSGTU)
  2318  		v.AddArg(y)
  2319  		v.AddArg(x)
  2320  		return true
  2321  	}
  2322  }
  2323  func rewriteValueMIPS_OpLess64F_0(v *Value) bool {
  2324  	b := v.Block
  2325  	_ = b
  2326  	// match: (Less64F x y)
  2327  	// cond:
  2328  	// result: (FPFlagTrue (CMPGTD y x))
  2329  	for {
  2330  		_ = v.Args[1]
  2331  		x := v.Args[0]
  2332  		y := v.Args[1]
  2333  		v.reset(OpMIPSFPFlagTrue)
  2334  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags)
  2335  		v0.AddArg(y)
  2336  		v0.AddArg(x)
  2337  		v.AddArg(v0)
  2338  		return true
  2339  	}
  2340  }
  2341  func rewriteValueMIPS_OpLess8_0(v *Value) bool {
  2342  	b := v.Block
  2343  	_ = b
  2344  	typ := &b.Func.Config.Types
  2345  	_ = typ
  2346  	// match: (Less8 x y)
  2347  	// cond:
  2348  	// result: (SGT (SignExt8to32 y) (SignExt8to32 x))
  2349  	for {
  2350  		_ = v.Args[1]
  2351  		x := v.Args[0]
  2352  		y := v.Args[1]
  2353  		v.reset(OpMIPSSGT)
  2354  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2355  		v0.AddArg(y)
  2356  		v.AddArg(v0)
  2357  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2358  		v1.AddArg(x)
  2359  		v.AddArg(v1)
  2360  		return true
  2361  	}
  2362  }
  2363  func rewriteValueMIPS_OpLess8U_0(v *Value) bool {
  2364  	b := v.Block
  2365  	_ = b
  2366  	typ := &b.Func.Config.Types
  2367  	_ = typ
  2368  	// match: (Less8U x y)
  2369  	// cond:
  2370  	// result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x))
  2371  	for {
  2372  		_ = v.Args[1]
  2373  		x := v.Args[0]
  2374  		y := v.Args[1]
  2375  		v.reset(OpMIPSSGTU)
  2376  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2377  		v0.AddArg(y)
  2378  		v.AddArg(v0)
  2379  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2380  		v1.AddArg(x)
  2381  		v.AddArg(v1)
  2382  		return true
  2383  	}
  2384  }
  2385  func rewriteValueMIPS_OpLoad_0(v *Value) bool {
  2386  	// match: (Load <t> ptr mem)
  2387  	// cond: t.IsBoolean()
  2388  	// result: (MOVBUload ptr mem)
  2389  	for {
  2390  		t := v.Type
  2391  		_ = v.Args[1]
  2392  		ptr := v.Args[0]
  2393  		mem := v.Args[1]
  2394  		if !(t.IsBoolean()) {
  2395  			break
  2396  		}
  2397  		v.reset(OpMIPSMOVBUload)
  2398  		v.AddArg(ptr)
  2399  		v.AddArg(mem)
  2400  		return true
  2401  	}
  2402  	// match: (Load <t> ptr mem)
  2403  	// cond: (is8BitInt(t) && isSigned(t))
  2404  	// result: (MOVBload ptr mem)
  2405  	for {
  2406  		t := v.Type
  2407  		_ = v.Args[1]
  2408  		ptr := v.Args[0]
  2409  		mem := v.Args[1]
  2410  		if !(is8BitInt(t) && isSigned(t)) {
  2411  			break
  2412  		}
  2413  		v.reset(OpMIPSMOVBload)
  2414  		v.AddArg(ptr)
  2415  		v.AddArg(mem)
  2416  		return true
  2417  	}
  2418  	// match: (Load <t> ptr mem)
  2419  	// cond: (is8BitInt(t) && !isSigned(t))
  2420  	// result: (MOVBUload ptr mem)
  2421  	for {
  2422  		t := v.Type
  2423  		_ = v.Args[1]
  2424  		ptr := v.Args[0]
  2425  		mem := v.Args[1]
  2426  		if !(is8BitInt(t) && !isSigned(t)) {
  2427  			break
  2428  		}
  2429  		v.reset(OpMIPSMOVBUload)
  2430  		v.AddArg(ptr)
  2431  		v.AddArg(mem)
  2432  		return true
  2433  	}
  2434  	// match: (Load <t> ptr mem)
  2435  	// cond: (is16BitInt(t) && isSigned(t))
  2436  	// result: (MOVHload ptr mem)
  2437  	for {
  2438  		t := v.Type
  2439  		_ = v.Args[1]
  2440  		ptr := v.Args[0]
  2441  		mem := v.Args[1]
  2442  		if !(is16BitInt(t) && isSigned(t)) {
  2443  			break
  2444  		}
  2445  		v.reset(OpMIPSMOVHload)
  2446  		v.AddArg(ptr)
  2447  		v.AddArg(mem)
  2448  		return true
  2449  	}
  2450  	// match: (Load <t> ptr mem)
  2451  	// cond: (is16BitInt(t) && !isSigned(t))
  2452  	// result: (MOVHUload ptr mem)
  2453  	for {
  2454  		t := v.Type
  2455  		_ = v.Args[1]
  2456  		ptr := v.Args[0]
  2457  		mem := v.Args[1]
  2458  		if !(is16BitInt(t) && !isSigned(t)) {
  2459  			break
  2460  		}
  2461  		v.reset(OpMIPSMOVHUload)
  2462  		v.AddArg(ptr)
  2463  		v.AddArg(mem)
  2464  		return true
  2465  	}
  2466  	// match: (Load <t> ptr mem)
  2467  	// cond: (is32BitInt(t) || isPtr(t))
  2468  	// result: (MOVWload ptr mem)
  2469  	for {
  2470  		t := v.Type
  2471  		_ = v.Args[1]
  2472  		ptr := v.Args[0]
  2473  		mem := v.Args[1]
  2474  		if !(is32BitInt(t) || isPtr(t)) {
  2475  			break
  2476  		}
  2477  		v.reset(OpMIPSMOVWload)
  2478  		v.AddArg(ptr)
  2479  		v.AddArg(mem)
  2480  		return true
  2481  	}
  2482  	// match: (Load <t> ptr mem)
  2483  	// cond: is32BitFloat(t)
  2484  	// result: (MOVFload ptr mem)
  2485  	for {
  2486  		t := v.Type
  2487  		_ = v.Args[1]
  2488  		ptr := v.Args[0]
  2489  		mem := v.Args[1]
  2490  		if !(is32BitFloat(t)) {
  2491  			break
  2492  		}
  2493  		v.reset(OpMIPSMOVFload)
  2494  		v.AddArg(ptr)
  2495  		v.AddArg(mem)
  2496  		return true
  2497  	}
  2498  	// match: (Load <t> ptr mem)
  2499  	// cond: is64BitFloat(t)
  2500  	// result: (MOVDload ptr mem)
  2501  	for {
  2502  		t := v.Type
  2503  		_ = v.Args[1]
  2504  		ptr := v.Args[0]
  2505  		mem := v.Args[1]
  2506  		if !(is64BitFloat(t)) {
  2507  			break
  2508  		}
  2509  		v.reset(OpMIPSMOVDload)
  2510  		v.AddArg(ptr)
  2511  		v.AddArg(mem)
  2512  		return true
  2513  	}
  2514  	return false
  2515  }
  2516  func rewriteValueMIPS_OpLocalAddr_0(v *Value) bool {
  2517  	// match: (LocalAddr {sym} base _)
  2518  	// cond:
  2519  	// result: (MOVWaddr {sym} base)
  2520  	for {
  2521  		sym := v.Aux
  2522  		_ = v.Args[1]
  2523  		base := v.Args[0]
  2524  		v.reset(OpMIPSMOVWaddr)
  2525  		v.Aux = sym
  2526  		v.AddArg(base)
  2527  		return true
  2528  	}
  2529  }
  2530  func rewriteValueMIPS_OpLsh16x16_0(v *Value) bool {
  2531  	b := v.Block
  2532  	_ = b
  2533  	typ := &b.Func.Config.Types
  2534  	_ = typ
  2535  	// match: (Lsh16x16 <t> x y)
  2536  	// cond:
  2537  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2538  	for {
  2539  		t := v.Type
  2540  		_ = v.Args[1]
  2541  		x := v.Args[0]
  2542  		y := v.Args[1]
  2543  		v.reset(OpMIPSCMOVZ)
  2544  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2545  		v0.AddArg(x)
  2546  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2547  		v1.AddArg(y)
  2548  		v0.AddArg(v1)
  2549  		v.AddArg(v0)
  2550  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2551  		v2.AuxInt = 0
  2552  		v.AddArg(v2)
  2553  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2554  		v3.AuxInt = 32
  2555  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2556  		v4.AddArg(y)
  2557  		v3.AddArg(v4)
  2558  		v.AddArg(v3)
  2559  		return true
  2560  	}
  2561  }
  2562  func rewriteValueMIPS_OpLsh16x32_0(v *Value) bool {
  2563  	b := v.Block
  2564  	_ = b
  2565  	typ := &b.Func.Config.Types
  2566  	_ = typ
  2567  	// match: (Lsh16x32 <t> x y)
  2568  	// cond:
  2569  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2570  	for {
  2571  		t := v.Type
  2572  		_ = v.Args[1]
  2573  		x := v.Args[0]
  2574  		y := v.Args[1]
  2575  		v.reset(OpMIPSCMOVZ)
  2576  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2577  		v0.AddArg(x)
  2578  		v0.AddArg(y)
  2579  		v.AddArg(v0)
  2580  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2581  		v1.AuxInt = 0
  2582  		v.AddArg(v1)
  2583  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2584  		v2.AuxInt = 32
  2585  		v2.AddArg(y)
  2586  		v.AddArg(v2)
  2587  		return true
  2588  	}
  2589  }
  2590  func rewriteValueMIPS_OpLsh16x64_0(v *Value) bool {
  2591  	// match: (Lsh16x64 x (Const64 [c]))
  2592  	// cond: uint32(c) < 16
  2593  	// result: (SLLconst x [c])
  2594  	for {
  2595  		_ = v.Args[1]
  2596  		x := v.Args[0]
  2597  		v_1 := v.Args[1]
  2598  		if v_1.Op != OpConst64 {
  2599  			break
  2600  		}
  2601  		c := v_1.AuxInt
  2602  		if !(uint32(c) < 16) {
  2603  			break
  2604  		}
  2605  		v.reset(OpMIPSSLLconst)
  2606  		v.AuxInt = c
  2607  		v.AddArg(x)
  2608  		return true
  2609  	}
  2610  	// match: (Lsh16x64 _ (Const64 [c]))
  2611  	// cond: uint32(c) >= 16
  2612  	// result: (MOVWconst [0])
  2613  	for {
  2614  		_ = v.Args[1]
  2615  		v_1 := v.Args[1]
  2616  		if v_1.Op != OpConst64 {
  2617  			break
  2618  		}
  2619  		c := v_1.AuxInt
  2620  		if !(uint32(c) >= 16) {
  2621  			break
  2622  		}
  2623  		v.reset(OpMIPSMOVWconst)
  2624  		v.AuxInt = 0
  2625  		return true
  2626  	}
  2627  	return false
  2628  }
  2629  func rewriteValueMIPS_OpLsh16x8_0(v *Value) bool {
  2630  	b := v.Block
  2631  	_ = b
  2632  	typ := &b.Func.Config.Types
  2633  	_ = typ
  2634  	// match: (Lsh16x8 <t> x y)
  2635  	// cond:
  2636  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2637  	for {
  2638  		t := v.Type
  2639  		_ = v.Args[1]
  2640  		x := v.Args[0]
  2641  		y := v.Args[1]
  2642  		v.reset(OpMIPSCMOVZ)
  2643  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2644  		v0.AddArg(x)
  2645  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2646  		v1.AddArg(y)
  2647  		v0.AddArg(v1)
  2648  		v.AddArg(v0)
  2649  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2650  		v2.AuxInt = 0
  2651  		v.AddArg(v2)
  2652  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2653  		v3.AuxInt = 32
  2654  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2655  		v4.AddArg(y)
  2656  		v3.AddArg(v4)
  2657  		v.AddArg(v3)
  2658  		return true
  2659  	}
  2660  }
  2661  func rewriteValueMIPS_OpLsh32x16_0(v *Value) bool {
  2662  	b := v.Block
  2663  	_ = b
  2664  	typ := &b.Func.Config.Types
  2665  	_ = typ
  2666  	// match: (Lsh32x16 <t> x y)
  2667  	// cond:
  2668  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2669  	for {
  2670  		t := v.Type
  2671  		_ = v.Args[1]
  2672  		x := v.Args[0]
  2673  		y := v.Args[1]
  2674  		v.reset(OpMIPSCMOVZ)
  2675  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2676  		v0.AddArg(x)
  2677  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2678  		v1.AddArg(y)
  2679  		v0.AddArg(v1)
  2680  		v.AddArg(v0)
  2681  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2682  		v2.AuxInt = 0
  2683  		v.AddArg(v2)
  2684  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2685  		v3.AuxInt = 32
  2686  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2687  		v4.AddArg(y)
  2688  		v3.AddArg(v4)
  2689  		v.AddArg(v3)
  2690  		return true
  2691  	}
  2692  }
  2693  func rewriteValueMIPS_OpLsh32x32_0(v *Value) bool {
  2694  	b := v.Block
  2695  	_ = b
  2696  	typ := &b.Func.Config.Types
  2697  	_ = typ
  2698  	// match: (Lsh32x32 <t> x y)
  2699  	// cond:
  2700  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2701  	for {
  2702  		t := v.Type
  2703  		_ = v.Args[1]
  2704  		x := v.Args[0]
  2705  		y := v.Args[1]
  2706  		v.reset(OpMIPSCMOVZ)
  2707  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2708  		v0.AddArg(x)
  2709  		v0.AddArg(y)
  2710  		v.AddArg(v0)
  2711  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2712  		v1.AuxInt = 0
  2713  		v.AddArg(v1)
  2714  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2715  		v2.AuxInt = 32
  2716  		v2.AddArg(y)
  2717  		v.AddArg(v2)
  2718  		return true
  2719  	}
  2720  }
  2721  func rewriteValueMIPS_OpLsh32x64_0(v *Value) bool {
  2722  	// match: (Lsh32x64 x (Const64 [c]))
  2723  	// cond: uint32(c) < 32
  2724  	// result: (SLLconst x [c])
  2725  	for {
  2726  		_ = v.Args[1]
  2727  		x := v.Args[0]
  2728  		v_1 := v.Args[1]
  2729  		if v_1.Op != OpConst64 {
  2730  			break
  2731  		}
  2732  		c := v_1.AuxInt
  2733  		if !(uint32(c) < 32) {
  2734  			break
  2735  		}
  2736  		v.reset(OpMIPSSLLconst)
  2737  		v.AuxInt = c
  2738  		v.AddArg(x)
  2739  		return true
  2740  	}
  2741  	// match: (Lsh32x64 _ (Const64 [c]))
  2742  	// cond: uint32(c) >= 32
  2743  	// result: (MOVWconst [0])
  2744  	for {
  2745  		_ = v.Args[1]
  2746  		v_1 := v.Args[1]
  2747  		if v_1.Op != OpConst64 {
  2748  			break
  2749  		}
  2750  		c := v_1.AuxInt
  2751  		if !(uint32(c) >= 32) {
  2752  			break
  2753  		}
  2754  		v.reset(OpMIPSMOVWconst)
  2755  		v.AuxInt = 0
  2756  		return true
  2757  	}
  2758  	return false
  2759  }
  2760  func rewriteValueMIPS_OpLsh32x8_0(v *Value) bool {
  2761  	b := v.Block
  2762  	_ = b
  2763  	typ := &b.Func.Config.Types
  2764  	_ = typ
  2765  	// match: (Lsh32x8 <t> x y)
  2766  	// cond:
  2767  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2768  	for {
  2769  		t := v.Type
  2770  		_ = v.Args[1]
  2771  		x := v.Args[0]
  2772  		y := v.Args[1]
  2773  		v.reset(OpMIPSCMOVZ)
  2774  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2775  		v0.AddArg(x)
  2776  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2777  		v1.AddArg(y)
  2778  		v0.AddArg(v1)
  2779  		v.AddArg(v0)
  2780  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2781  		v2.AuxInt = 0
  2782  		v.AddArg(v2)
  2783  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2784  		v3.AuxInt = 32
  2785  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2786  		v4.AddArg(y)
  2787  		v3.AddArg(v4)
  2788  		v.AddArg(v3)
  2789  		return true
  2790  	}
  2791  }
  2792  func rewriteValueMIPS_OpLsh8x16_0(v *Value) bool {
  2793  	b := v.Block
  2794  	_ = b
  2795  	typ := &b.Func.Config.Types
  2796  	_ = typ
  2797  	// match: (Lsh8x16 <t> x y)
  2798  	// cond:
  2799  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2800  	for {
  2801  		t := v.Type
  2802  		_ = v.Args[1]
  2803  		x := v.Args[0]
  2804  		y := v.Args[1]
  2805  		v.reset(OpMIPSCMOVZ)
  2806  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2807  		v0.AddArg(x)
  2808  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2809  		v1.AddArg(y)
  2810  		v0.AddArg(v1)
  2811  		v.AddArg(v0)
  2812  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2813  		v2.AuxInt = 0
  2814  		v.AddArg(v2)
  2815  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2816  		v3.AuxInt = 32
  2817  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2818  		v4.AddArg(y)
  2819  		v3.AddArg(v4)
  2820  		v.AddArg(v3)
  2821  		return true
  2822  	}
  2823  }
  2824  func rewriteValueMIPS_OpLsh8x32_0(v *Value) bool {
  2825  	b := v.Block
  2826  	_ = b
  2827  	typ := &b.Func.Config.Types
  2828  	_ = typ
  2829  	// match: (Lsh8x32 <t> x y)
  2830  	// cond:
  2831  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2832  	for {
  2833  		t := v.Type
  2834  		_ = v.Args[1]
  2835  		x := v.Args[0]
  2836  		y := v.Args[1]
  2837  		v.reset(OpMIPSCMOVZ)
  2838  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2839  		v0.AddArg(x)
  2840  		v0.AddArg(y)
  2841  		v.AddArg(v0)
  2842  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2843  		v1.AuxInt = 0
  2844  		v.AddArg(v1)
  2845  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2846  		v2.AuxInt = 32
  2847  		v2.AddArg(y)
  2848  		v.AddArg(v2)
  2849  		return true
  2850  	}
  2851  }
  2852  func rewriteValueMIPS_OpLsh8x64_0(v *Value) bool {
  2853  	// match: (Lsh8x64 x (Const64 [c]))
  2854  	// cond: uint32(c) < 8
  2855  	// result: (SLLconst x [c])
  2856  	for {
  2857  		_ = v.Args[1]
  2858  		x := v.Args[0]
  2859  		v_1 := v.Args[1]
  2860  		if v_1.Op != OpConst64 {
  2861  			break
  2862  		}
  2863  		c := v_1.AuxInt
  2864  		if !(uint32(c) < 8) {
  2865  			break
  2866  		}
  2867  		v.reset(OpMIPSSLLconst)
  2868  		v.AuxInt = c
  2869  		v.AddArg(x)
  2870  		return true
  2871  	}
  2872  	// match: (Lsh8x64 _ (Const64 [c]))
  2873  	// cond: uint32(c) >= 8
  2874  	// result: (MOVWconst [0])
  2875  	for {
  2876  		_ = v.Args[1]
  2877  		v_1 := v.Args[1]
  2878  		if v_1.Op != OpConst64 {
  2879  			break
  2880  		}
  2881  		c := v_1.AuxInt
  2882  		if !(uint32(c) >= 8) {
  2883  			break
  2884  		}
  2885  		v.reset(OpMIPSMOVWconst)
  2886  		v.AuxInt = 0
  2887  		return true
  2888  	}
  2889  	return false
  2890  }
  2891  func rewriteValueMIPS_OpLsh8x8_0(v *Value) bool {
  2892  	b := v.Block
  2893  	_ = b
  2894  	typ := &b.Func.Config.Types
  2895  	_ = typ
  2896  	// match: (Lsh8x8 <t> x y)
  2897  	// cond:
  2898  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2899  	for {
  2900  		t := v.Type
  2901  		_ = v.Args[1]
  2902  		x := v.Args[0]
  2903  		y := v.Args[1]
  2904  		v.reset(OpMIPSCMOVZ)
  2905  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2906  		v0.AddArg(x)
  2907  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2908  		v1.AddArg(y)
  2909  		v0.AddArg(v1)
  2910  		v.AddArg(v0)
  2911  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2912  		v2.AuxInt = 0
  2913  		v.AddArg(v2)
  2914  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2915  		v3.AuxInt = 32
  2916  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2917  		v4.AddArg(y)
  2918  		v3.AddArg(v4)
  2919  		v.AddArg(v3)
  2920  		return true
  2921  	}
  2922  }
  2923  func rewriteValueMIPS_OpMIPSADD_0(v *Value) bool {
  2924  	// match: (ADD x (MOVWconst [c]))
  2925  	// cond:
  2926  	// result: (ADDconst [c] x)
  2927  	for {
  2928  		_ = v.Args[1]
  2929  		x := v.Args[0]
  2930  		v_1 := v.Args[1]
  2931  		if v_1.Op != OpMIPSMOVWconst {
  2932  			break
  2933  		}
  2934  		c := v_1.AuxInt
  2935  		v.reset(OpMIPSADDconst)
  2936  		v.AuxInt = c
  2937  		v.AddArg(x)
  2938  		return true
  2939  	}
  2940  	// match: (ADD (MOVWconst [c]) x)
  2941  	// cond:
  2942  	// result: (ADDconst [c] x)
  2943  	for {
  2944  		_ = v.Args[1]
  2945  		v_0 := v.Args[0]
  2946  		if v_0.Op != OpMIPSMOVWconst {
  2947  			break
  2948  		}
  2949  		c := v_0.AuxInt
  2950  		x := v.Args[1]
  2951  		v.reset(OpMIPSADDconst)
  2952  		v.AuxInt = c
  2953  		v.AddArg(x)
  2954  		return true
  2955  	}
  2956  	// match: (ADD x (NEG y))
  2957  	// cond:
  2958  	// result: (SUB x y)
  2959  	for {
  2960  		_ = v.Args[1]
  2961  		x := v.Args[0]
  2962  		v_1 := v.Args[1]
  2963  		if v_1.Op != OpMIPSNEG {
  2964  			break
  2965  		}
  2966  		y := v_1.Args[0]
  2967  		v.reset(OpMIPSSUB)
  2968  		v.AddArg(x)
  2969  		v.AddArg(y)
  2970  		return true
  2971  	}
  2972  	// match: (ADD (NEG y) x)
  2973  	// cond:
  2974  	// result: (SUB x y)
  2975  	for {
  2976  		_ = v.Args[1]
  2977  		v_0 := v.Args[0]
  2978  		if v_0.Op != OpMIPSNEG {
  2979  			break
  2980  		}
  2981  		y := v_0.Args[0]
  2982  		x := v.Args[1]
  2983  		v.reset(OpMIPSSUB)
  2984  		v.AddArg(x)
  2985  		v.AddArg(y)
  2986  		return true
  2987  	}
  2988  	return false
  2989  }
  2990  func rewriteValueMIPS_OpMIPSADDconst_0(v *Value) bool {
  2991  	// match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr))
  2992  	// cond:
  2993  	// result: (MOVWaddr [off1+off2] {sym} ptr)
  2994  	for {
  2995  		off1 := v.AuxInt
  2996  		v_0 := v.Args[0]
  2997  		if v_0.Op != OpMIPSMOVWaddr {
  2998  			break
  2999  		}
  3000  		off2 := v_0.AuxInt
  3001  		sym := v_0.Aux
  3002  		ptr := v_0.Args[0]
  3003  		v.reset(OpMIPSMOVWaddr)
  3004  		v.AuxInt = off1 + off2
  3005  		v.Aux = sym
  3006  		v.AddArg(ptr)
  3007  		return true
  3008  	}
  3009  	// match: (ADDconst [0] x)
  3010  	// cond:
  3011  	// result: x
  3012  	for {
  3013  		if v.AuxInt != 0 {
  3014  			break
  3015  		}
  3016  		x := v.Args[0]
  3017  		v.reset(OpCopy)
  3018  		v.Type = x.Type
  3019  		v.AddArg(x)
  3020  		return true
  3021  	}
  3022  	// match: (ADDconst [c] (MOVWconst [d]))
  3023  	// cond:
  3024  	// result: (MOVWconst [int64(int32(c+d))])
  3025  	for {
  3026  		c := v.AuxInt
  3027  		v_0 := v.Args[0]
  3028  		if v_0.Op != OpMIPSMOVWconst {
  3029  			break
  3030  		}
  3031  		d := v_0.AuxInt
  3032  		v.reset(OpMIPSMOVWconst)
  3033  		v.AuxInt = int64(int32(c + d))
  3034  		return true
  3035  	}
  3036  	// match: (ADDconst [c] (ADDconst [d] x))
  3037  	// cond:
  3038  	// result: (ADDconst [int64(int32(c+d))] x)
  3039  	for {
  3040  		c := v.AuxInt
  3041  		v_0 := v.Args[0]
  3042  		if v_0.Op != OpMIPSADDconst {
  3043  			break
  3044  		}
  3045  		d := v_0.AuxInt
  3046  		x := v_0.Args[0]
  3047  		v.reset(OpMIPSADDconst)
  3048  		v.AuxInt = int64(int32(c + d))
  3049  		v.AddArg(x)
  3050  		return true
  3051  	}
  3052  	// match: (ADDconst [c] (SUBconst [d] x))
  3053  	// cond:
  3054  	// result: (ADDconst [int64(int32(c-d))] x)
  3055  	for {
  3056  		c := v.AuxInt
  3057  		v_0 := v.Args[0]
  3058  		if v_0.Op != OpMIPSSUBconst {
  3059  			break
  3060  		}
  3061  		d := v_0.AuxInt
  3062  		x := v_0.Args[0]
  3063  		v.reset(OpMIPSADDconst)
  3064  		v.AuxInt = int64(int32(c - d))
  3065  		v.AddArg(x)
  3066  		return true
  3067  	}
  3068  	return false
  3069  }
  3070  func rewriteValueMIPS_OpMIPSAND_0(v *Value) bool {
  3071  	b := v.Block
  3072  	_ = b
  3073  	// match: (AND x (MOVWconst [c]))
  3074  	// cond:
  3075  	// result: (ANDconst [c] x)
  3076  	for {
  3077  		_ = v.Args[1]
  3078  		x := v.Args[0]
  3079  		v_1 := v.Args[1]
  3080  		if v_1.Op != OpMIPSMOVWconst {
  3081  			break
  3082  		}
  3083  		c := v_1.AuxInt
  3084  		v.reset(OpMIPSANDconst)
  3085  		v.AuxInt = c
  3086  		v.AddArg(x)
  3087  		return true
  3088  	}
  3089  	// match: (AND (MOVWconst [c]) x)
  3090  	// cond:
  3091  	// result: (ANDconst [c] x)
  3092  	for {
  3093  		_ = v.Args[1]
  3094  		v_0 := v.Args[0]
  3095  		if v_0.Op != OpMIPSMOVWconst {
  3096  			break
  3097  		}
  3098  		c := v_0.AuxInt
  3099  		x := v.Args[1]
  3100  		v.reset(OpMIPSANDconst)
  3101  		v.AuxInt = c
  3102  		v.AddArg(x)
  3103  		return true
  3104  	}
  3105  	// match: (AND x x)
  3106  	// cond:
  3107  	// result: x
  3108  	for {
  3109  		_ = v.Args[1]
  3110  		x := v.Args[0]
  3111  		if x != v.Args[1] {
  3112  			break
  3113  		}
  3114  		v.reset(OpCopy)
  3115  		v.Type = x.Type
  3116  		v.AddArg(x)
  3117  		return true
  3118  	}
  3119  	// match: (AND (SGTUconst [1] x) (SGTUconst [1] y))
  3120  	// cond:
  3121  	// result: (SGTUconst [1] (OR <x.Type> x y))
  3122  	for {
  3123  		_ = v.Args[1]
  3124  		v_0 := v.Args[0]
  3125  		if v_0.Op != OpMIPSSGTUconst {
  3126  			break
  3127  		}
  3128  		if v_0.AuxInt != 1 {
  3129  			break
  3130  		}
  3131  		x := v_0.Args[0]
  3132  		v_1 := v.Args[1]
  3133  		if v_1.Op != OpMIPSSGTUconst {
  3134  			break
  3135  		}
  3136  		if v_1.AuxInt != 1 {
  3137  			break
  3138  		}
  3139  		y := v_1.Args[0]
  3140  		v.reset(OpMIPSSGTUconst)
  3141  		v.AuxInt = 1
  3142  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  3143  		v0.AddArg(x)
  3144  		v0.AddArg(y)
  3145  		v.AddArg(v0)
  3146  		return true
  3147  	}
  3148  	// match: (AND (SGTUconst [1] y) (SGTUconst [1] x))
  3149  	// cond:
  3150  	// result: (SGTUconst [1] (OR <x.Type> x y))
  3151  	for {
  3152  		_ = v.Args[1]
  3153  		v_0 := v.Args[0]
  3154  		if v_0.Op != OpMIPSSGTUconst {
  3155  			break
  3156  		}
  3157  		if v_0.AuxInt != 1 {
  3158  			break
  3159  		}
  3160  		y := v_0.Args[0]
  3161  		v_1 := v.Args[1]
  3162  		if v_1.Op != OpMIPSSGTUconst {
  3163  			break
  3164  		}
  3165  		if v_1.AuxInt != 1 {
  3166  			break
  3167  		}
  3168  		x := v_1.Args[0]
  3169  		v.reset(OpMIPSSGTUconst)
  3170  		v.AuxInt = 1
  3171  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  3172  		v0.AddArg(x)
  3173  		v0.AddArg(y)
  3174  		v.AddArg(v0)
  3175  		return true
  3176  	}
  3177  	return false
  3178  }
  3179  func rewriteValueMIPS_OpMIPSANDconst_0(v *Value) bool {
  3180  	// match: (ANDconst [0] _)
  3181  	// cond:
  3182  	// result: (MOVWconst [0])
  3183  	for {
  3184  		if v.AuxInt != 0 {
  3185  			break
  3186  		}
  3187  		v.reset(OpMIPSMOVWconst)
  3188  		v.AuxInt = 0
  3189  		return true
  3190  	}
  3191  	// match: (ANDconst [-1] x)
  3192  	// cond:
  3193  	// result: x
  3194  	for {
  3195  		if v.AuxInt != -1 {
  3196  			break
  3197  		}
  3198  		x := v.Args[0]
  3199  		v.reset(OpCopy)
  3200  		v.Type = x.Type
  3201  		v.AddArg(x)
  3202  		return true
  3203  	}
  3204  	// match: (ANDconst [c] (MOVWconst [d]))
  3205  	// cond:
  3206  	// result: (MOVWconst [c&d])
  3207  	for {
  3208  		c := v.AuxInt
  3209  		v_0 := v.Args[0]
  3210  		if v_0.Op != OpMIPSMOVWconst {
  3211  			break
  3212  		}
  3213  		d := v_0.AuxInt
  3214  		v.reset(OpMIPSMOVWconst)
  3215  		v.AuxInt = c & d
  3216  		return true
  3217  	}
  3218  	// match: (ANDconst [c] (ANDconst [d] x))
  3219  	// cond:
  3220  	// result: (ANDconst [c&d] x)
  3221  	for {
  3222  		c := v.AuxInt
  3223  		v_0 := v.Args[0]
  3224  		if v_0.Op != OpMIPSANDconst {
  3225  			break
  3226  		}
  3227  		d := v_0.AuxInt
  3228  		x := v_0.Args[0]
  3229  		v.reset(OpMIPSANDconst)
  3230  		v.AuxInt = c & d
  3231  		v.AddArg(x)
  3232  		return true
  3233  	}
  3234  	return false
  3235  }
  3236  func rewriteValueMIPS_OpMIPSCMOVZ_0(v *Value) bool {
  3237  	b := v.Block
  3238  	_ = b
  3239  	// match: (CMOVZ _ b (MOVWconst [0]))
  3240  	// cond:
  3241  	// result: b
  3242  	for {
  3243  		_ = v.Args[2]
  3244  		b := v.Args[1]
  3245  		v_2 := v.Args[2]
  3246  		if v_2.Op != OpMIPSMOVWconst {
  3247  			break
  3248  		}
  3249  		if v_2.AuxInt != 0 {
  3250  			break
  3251  		}
  3252  		v.reset(OpCopy)
  3253  		v.Type = b.Type
  3254  		v.AddArg(b)
  3255  		return true
  3256  	}
  3257  	// match: (CMOVZ a _ (MOVWconst [c]))
  3258  	// cond: c!=0
  3259  	// result: a
  3260  	for {
  3261  		_ = v.Args[2]
  3262  		a := v.Args[0]
  3263  		v_2 := v.Args[2]
  3264  		if v_2.Op != OpMIPSMOVWconst {
  3265  			break
  3266  		}
  3267  		c := v_2.AuxInt
  3268  		if !(c != 0) {
  3269  			break
  3270  		}
  3271  		v.reset(OpCopy)
  3272  		v.Type = a.Type
  3273  		v.AddArg(a)
  3274  		return true
  3275  	}
  3276  	// match: (CMOVZ a (MOVWconst [0]) c)
  3277  	// cond:
  3278  	// result: (CMOVZzero a c)
  3279  	for {
  3280  		_ = v.Args[2]
  3281  		a := v.Args[0]
  3282  		v_1 := v.Args[1]
  3283  		if v_1.Op != OpMIPSMOVWconst {
  3284  			break
  3285  		}
  3286  		if v_1.AuxInt != 0 {
  3287  			break
  3288  		}
  3289  		c := v.Args[2]
  3290  		v.reset(OpMIPSCMOVZzero)
  3291  		v.AddArg(a)
  3292  		v.AddArg(c)
  3293  		return true
  3294  	}
  3295  	return false
  3296  }
  3297  func rewriteValueMIPS_OpMIPSCMOVZzero_0(v *Value) bool {
  3298  	// match: (CMOVZzero _ (MOVWconst [0]))
  3299  	// cond:
  3300  	// result: (MOVWconst [0])
  3301  	for {
  3302  		_ = v.Args[1]
  3303  		v_1 := v.Args[1]
  3304  		if v_1.Op != OpMIPSMOVWconst {
  3305  			break
  3306  		}
  3307  		if v_1.AuxInt != 0 {
  3308  			break
  3309  		}
  3310  		v.reset(OpMIPSMOVWconst)
  3311  		v.AuxInt = 0
  3312  		return true
  3313  	}
  3314  	// match: (CMOVZzero a (MOVWconst [c]))
  3315  	// cond: c!=0
  3316  	// result: a
  3317  	for {
  3318  		_ = v.Args[1]
  3319  		a := v.Args[0]
  3320  		v_1 := v.Args[1]
  3321  		if v_1.Op != OpMIPSMOVWconst {
  3322  			break
  3323  		}
  3324  		c := v_1.AuxInt
  3325  		if !(c != 0) {
  3326  			break
  3327  		}
  3328  		v.reset(OpCopy)
  3329  		v.Type = a.Type
  3330  		v.AddArg(a)
  3331  		return true
  3332  	}
  3333  	return false
  3334  }
  3335  func rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v *Value) bool {
  3336  	// match: (LoweredAtomicAdd ptr (MOVWconst [c]) mem)
  3337  	// cond: is16Bit(c)
  3338  	// result: (LoweredAtomicAddconst [c] ptr mem)
  3339  	for {
  3340  		_ = v.Args[2]
  3341  		ptr := v.Args[0]
  3342  		v_1 := v.Args[1]
  3343  		if v_1.Op != OpMIPSMOVWconst {
  3344  			break
  3345  		}
  3346  		c := v_1.AuxInt
  3347  		mem := v.Args[2]
  3348  		if !(is16Bit(c)) {
  3349  			break
  3350  		}
  3351  		v.reset(OpMIPSLoweredAtomicAddconst)
  3352  		v.AuxInt = c
  3353  		v.AddArg(ptr)
  3354  		v.AddArg(mem)
  3355  		return true
  3356  	}
  3357  	return false
  3358  }
  3359  func rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v *Value) bool {
  3360  	// match: (LoweredAtomicStore ptr (MOVWconst [0]) mem)
  3361  	// cond:
  3362  	// result: (LoweredAtomicStorezero ptr mem)
  3363  	for {
  3364  		_ = v.Args[2]
  3365  		ptr := v.Args[0]
  3366  		v_1 := v.Args[1]
  3367  		if v_1.Op != OpMIPSMOVWconst {
  3368  			break
  3369  		}
  3370  		if v_1.AuxInt != 0 {
  3371  			break
  3372  		}
  3373  		mem := v.Args[2]
  3374  		v.reset(OpMIPSLoweredAtomicStorezero)
  3375  		v.AddArg(ptr)
  3376  		v.AddArg(mem)
  3377  		return true
  3378  	}
  3379  	return false
  3380  }
  3381  func rewriteValueMIPS_OpMIPSMOVBUload_0(v *Value) bool {
  3382  	// match: (MOVBUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3383  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3384  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3385  	for {
  3386  		off1 := v.AuxInt
  3387  		sym := v.Aux
  3388  		_ = v.Args[1]
  3389  		x := v.Args[0]
  3390  		if x.Op != OpMIPSADDconst {
  3391  			break
  3392  		}
  3393  		off2 := x.AuxInt
  3394  		ptr := x.Args[0]
  3395  		mem := v.Args[1]
  3396  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3397  			break
  3398  		}
  3399  		v.reset(OpMIPSMOVBUload)
  3400  		v.AuxInt = off1 + off2
  3401  		v.Aux = sym
  3402  		v.AddArg(ptr)
  3403  		v.AddArg(mem)
  3404  		return true
  3405  	}
  3406  	// match: (MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3407  	// cond: canMergeSym(sym1,sym2)
  3408  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3409  	for {
  3410  		off1 := v.AuxInt
  3411  		sym1 := v.Aux
  3412  		_ = v.Args[1]
  3413  		v_0 := v.Args[0]
  3414  		if v_0.Op != OpMIPSMOVWaddr {
  3415  			break
  3416  		}
  3417  		off2 := v_0.AuxInt
  3418  		sym2 := v_0.Aux
  3419  		ptr := v_0.Args[0]
  3420  		mem := v.Args[1]
  3421  		if !(canMergeSym(sym1, sym2)) {
  3422  			break
  3423  		}
  3424  		v.reset(OpMIPSMOVBUload)
  3425  		v.AuxInt = off1 + off2
  3426  		v.Aux = mergeSym(sym1, sym2)
  3427  		v.AddArg(ptr)
  3428  		v.AddArg(mem)
  3429  		return true
  3430  	}
  3431  	// match: (MOVBUload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3432  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3433  	// result: (MOVBUreg x)
  3434  	for {
  3435  		off := v.AuxInt
  3436  		sym := v.Aux
  3437  		_ = v.Args[1]
  3438  		ptr := v.Args[0]
  3439  		v_1 := v.Args[1]
  3440  		if v_1.Op != OpMIPSMOVBstore {
  3441  			break
  3442  		}
  3443  		off2 := v_1.AuxInt
  3444  		sym2 := v_1.Aux
  3445  		_ = v_1.Args[2]
  3446  		ptr2 := v_1.Args[0]
  3447  		x := v_1.Args[1]
  3448  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3449  			break
  3450  		}
  3451  		v.reset(OpMIPSMOVBUreg)
  3452  		v.AddArg(x)
  3453  		return true
  3454  	}
  3455  	return false
  3456  }
  3457  func rewriteValueMIPS_OpMIPSMOVBUreg_0(v *Value) bool {
  3458  	b := v.Block
  3459  	_ = b
  3460  	// match: (MOVBUreg x:(MOVBUload _ _))
  3461  	// cond:
  3462  	// result: (MOVWreg x)
  3463  	for {
  3464  		x := v.Args[0]
  3465  		if x.Op != OpMIPSMOVBUload {
  3466  			break
  3467  		}
  3468  		_ = x.Args[1]
  3469  		v.reset(OpMIPSMOVWreg)
  3470  		v.AddArg(x)
  3471  		return true
  3472  	}
  3473  	// match: (MOVBUreg x:(MOVBUreg _))
  3474  	// cond:
  3475  	// result: (MOVWreg x)
  3476  	for {
  3477  		x := v.Args[0]
  3478  		if x.Op != OpMIPSMOVBUreg {
  3479  			break
  3480  		}
  3481  		v.reset(OpMIPSMOVWreg)
  3482  		v.AddArg(x)
  3483  		return true
  3484  	}
  3485  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  3486  	// cond: x.Uses == 1 && clobber(x)
  3487  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  3488  	for {
  3489  		t := v.Type
  3490  		x := v.Args[0]
  3491  		if x.Op != OpMIPSMOVBload {
  3492  			break
  3493  		}
  3494  		off := x.AuxInt
  3495  		sym := x.Aux
  3496  		_ = x.Args[1]
  3497  		ptr := x.Args[0]
  3498  		mem := x.Args[1]
  3499  		if !(x.Uses == 1 && clobber(x)) {
  3500  			break
  3501  		}
  3502  		b = x.Block
  3503  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, t)
  3504  		v.reset(OpCopy)
  3505  		v.AddArg(v0)
  3506  		v0.AuxInt = off
  3507  		v0.Aux = sym
  3508  		v0.AddArg(ptr)
  3509  		v0.AddArg(mem)
  3510  		return true
  3511  	}
  3512  	// match: (MOVBUreg (ANDconst [c] x))
  3513  	// cond:
  3514  	// result: (ANDconst [c&0xff] x)
  3515  	for {
  3516  		v_0 := v.Args[0]
  3517  		if v_0.Op != OpMIPSANDconst {
  3518  			break
  3519  		}
  3520  		c := v_0.AuxInt
  3521  		x := v_0.Args[0]
  3522  		v.reset(OpMIPSANDconst)
  3523  		v.AuxInt = c & 0xff
  3524  		v.AddArg(x)
  3525  		return true
  3526  	}
  3527  	// match: (MOVBUreg (MOVWconst [c]))
  3528  	// cond:
  3529  	// result: (MOVWconst [int64(uint8(c))])
  3530  	for {
  3531  		v_0 := v.Args[0]
  3532  		if v_0.Op != OpMIPSMOVWconst {
  3533  			break
  3534  		}
  3535  		c := v_0.AuxInt
  3536  		v.reset(OpMIPSMOVWconst)
  3537  		v.AuxInt = int64(uint8(c))
  3538  		return true
  3539  	}
  3540  	return false
  3541  }
  3542  func rewriteValueMIPS_OpMIPSMOVBload_0(v *Value) bool {
  3543  	// match: (MOVBload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3544  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3545  	// result: (MOVBload [off1+off2] {sym} ptr mem)
  3546  	for {
  3547  		off1 := v.AuxInt
  3548  		sym := v.Aux
  3549  		_ = v.Args[1]
  3550  		x := v.Args[0]
  3551  		if x.Op != OpMIPSADDconst {
  3552  			break
  3553  		}
  3554  		off2 := x.AuxInt
  3555  		ptr := x.Args[0]
  3556  		mem := v.Args[1]
  3557  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3558  			break
  3559  		}
  3560  		v.reset(OpMIPSMOVBload)
  3561  		v.AuxInt = off1 + off2
  3562  		v.Aux = sym
  3563  		v.AddArg(ptr)
  3564  		v.AddArg(mem)
  3565  		return true
  3566  	}
  3567  	// match: (MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3568  	// cond: canMergeSym(sym1,sym2)
  3569  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3570  	for {
  3571  		off1 := v.AuxInt
  3572  		sym1 := v.Aux
  3573  		_ = v.Args[1]
  3574  		v_0 := v.Args[0]
  3575  		if v_0.Op != OpMIPSMOVWaddr {
  3576  			break
  3577  		}
  3578  		off2 := v_0.AuxInt
  3579  		sym2 := v_0.Aux
  3580  		ptr := v_0.Args[0]
  3581  		mem := v.Args[1]
  3582  		if !(canMergeSym(sym1, sym2)) {
  3583  			break
  3584  		}
  3585  		v.reset(OpMIPSMOVBload)
  3586  		v.AuxInt = off1 + off2
  3587  		v.Aux = mergeSym(sym1, sym2)
  3588  		v.AddArg(ptr)
  3589  		v.AddArg(mem)
  3590  		return true
  3591  	}
  3592  	// match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3593  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3594  	// result: (MOVBreg x)
  3595  	for {
  3596  		off := v.AuxInt
  3597  		sym := v.Aux
  3598  		_ = v.Args[1]
  3599  		ptr := v.Args[0]
  3600  		v_1 := v.Args[1]
  3601  		if v_1.Op != OpMIPSMOVBstore {
  3602  			break
  3603  		}
  3604  		off2 := v_1.AuxInt
  3605  		sym2 := v_1.Aux
  3606  		_ = v_1.Args[2]
  3607  		ptr2 := v_1.Args[0]
  3608  		x := v_1.Args[1]
  3609  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3610  			break
  3611  		}
  3612  		v.reset(OpMIPSMOVBreg)
  3613  		v.AddArg(x)
  3614  		return true
  3615  	}
  3616  	return false
  3617  }
  3618  func rewriteValueMIPS_OpMIPSMOVBreg_0(v *Value) bool {
  3619  	b := v.Block
  3620  	_ = b
  3621  	// match: (MOVBreg x:(MOVBload _ _))
  3622  	// cond:
  3623  	// result: (MOVWreg x)
  3624  	for {
  3625  		x := v.Args[0]
  3626  		if x.Op != OpMIPSMOVBload {
  3627  			break
  3628  		}
  3629  		_ = x.Args[1]
  3630  		v.reset(OpMIPSMOVWreg)
  3631  		v.AddArg(x)
  3632  		return true
  3633  	}
  3634  	// match: (MOVBreg x:(MOVBreg _))
  3635  	// cond:
  3636  	// result: (MOVWreg x)
  3637  	for {
  3638  		x := v.Args[0]
  3639  		if x.Op != OpMIPSMOVBreg {
  3640  			break
  3641  		}
  3642  		v.reset(OpMIPSMOVWreg)
  3643  		v.AddArg(x)
  3644  		return true
  3645  	}
  3646  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  3647  	// cond: x.Uses == 1 && clobber(x)
  3648  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  3649  	for {
  3650  		t := v.Type
  3651  		x := v.Args[0]
  3652  		if x.Op != OpMIPSMOVBUload {
  3653  			break
  3654  		}
  3655  		off := x.AuxInt
  3656  		sym := x.Aux
  3657  		_ = x.Args[1]
  3658  		ptr := x.Args[0]
  3659  		mem := x.Args[1]
  3660  		if !(x.Uses == 1 && clobber(x)) {
  3661  			break
  3662  		}
  3663  		b = x.Block
  3664  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBload, t)
  3665  		v.reset(OpCopy)
  3666  		v.AddArg(v0)
  3667  		v0.AuxInt = off
  3668  		v0.Aux = sym
  3669  		v0.AddArg(ptr)
  3670  		v0.AddArg(mem)
  3671  		return true
  3672  	}
  3673  	// match: (MOVBreg (ANDconst [c] x))
  3674  	// cond: c & 0x80 == 0
  3675  	// result: (ANDconst [c&0x7f] x)
  3676  	for {
  3677  		v_0 := v.Args[0]
  3678  		if v_0.Op != OpMIPSANDconst {
  3679  			break
  3680  		}
  3681  		c := v_0.AuxInt
  3682  		x := v_0.Args[0]
  3683  		if !(c&0x80 == 0) {
  3684  			break
  3685  		}
  3686  		v.reset(OpMIPSANDconst)
  3687  		v.AuxInt = c & 0x7f
  3688  		v.AddArg(x)
  3689  		return true
  3690  	}
  3691  	// match: (MOVBreg (MOVWconst [c]))
  3692  	// cond:
  3693  	// result: (MOVWconst [int64(int8(c))])
  3694  	for {
  3695  		v_0 := v.Args[0]
  3696  		if v_0.Op != OpMIPSMOVWconst {
  3697  			break
  3698  		}
  3699  		c := v_0.AuxInt
  3700  		v.reset(OpMIPSMOVWconst)
  3701  		v.AuxInt = int64(int8(c))
  3702  		return true
  3703  	}
  3704  	return false
  3705  }
  3706  func rewriteValueMIPS_OpMIPSMOVBstore_0(v *Value) bool {
  3707  	// match: (MOVBstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3708  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3709  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3710  	for {
  3711  		off1 := v.AuxInt
  3712  		sym := v.Aux
  3713  		_ = v.Args[2]
  3714  		x := v.Args[0]
  3715  		if x.Op != OpMIPSADDconst {
  3716  			break
  3717  		}
  3718  		off2 := x.AuxInt
  3719  		ptr := x.Args[0]
  3720  		val := v.Args[1]
  3721  		mem := v.Args[2]
  3722  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3723  			break
  3724  		}
  3725  		v.reset(OpMIPSMOVBstore)
  3726  		v.AuxInt = off1 + off2
  3727  		v.Aux = sym
  3728  		v.AddArg(ptr)
  3729  		v.AddArg(val)
  3730  		v.AddArg(mem)
  3731  		return true
  3732  	}
  3733  	// match: (MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3734  	// cond: canMergeSym(sym1,sym2)
  3735  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3736  	for {
  3737  		off1 := v.AuxInt
  3738  		sym1 := v.Aux
  3739  		_ = v.Args[2]
  3740  		v_0 := v.Args[0]
  3741  		if v_0.Op != OpMIPSMOVWaddr {
  3742  			break
  3743  		}
  3744  		off2 := v_0.AuxInt
  3745  		sym2 := v_0.Aux
  3746  		ptr := v_0.Args[0]
  3747  		val := v.Args[1]
  3748  		mem := v.Args[2]
  3749  		if !(canMergeSym(sym1, sym2)) {
  3750  			break
  3751  		}
  3752  		v.reset(OpMIPSMOVBstore)
  3753  		v.AuxInt = off1 + off2
  3754  		v.Aux = mergeSym(sym1, sym2)
  3755  		v.AddArg(ptr)
  3756  		v.AddArg(val)
  3757  		v.AddArg(mem)
  3758  		return true
  3759  	}
  3760  	// match: (MOVBstore [off] {sym} ptr (MOVWconst [0]) mem)
  3761  	// cond:
  3762  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3763  	for {
  3764  		off := v.AuxInt
  3765  		sym := v.Aux
  3766  		_ = v.Args[2]
  3767  		ptr := v.Args[0]
  3768  		v_1 := v.Args[1]
  3769  		if v_1.Op != OpMIPSMOVWconst {
  3770  			break
  3771  		}
  3772  		if v_1.AuxInt != 0 {
  3773  			break
  3774  		}
  3775  		mem := v.Args[2]
  3776  		v.reset(OpMIPSMOVBstorezero)
  3777  		v.AuxInt = off
  3778  		v.Aux = sym
  3779  		v.AddArg(ptr)
  3780  		v.AddArg(mem)
  3781  		return true
  3782  	}
  3783  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3784  	// cond:
  3785  	// result: (MOVBstore [off] {sym} ptr x mem)
  3786  	for {
  3787  		off := v.AuxInt
  3788  		sym := v.Aux
  3789  		_ = v.Args[2]
  3790  		ptr := v.Args[0]
  3791  		v_1 := v.Args[1]
  3792  		if v_1.Op != OpMIPSMOVBreg {
  3793  			break
  3794  		}
  3795  		x := v_1.Args[0]
  3796  		mem := v.Args[2]
  3797  		v.reset(OpMIPSMOVBstore)
  3798  		v.AuxInt = off
  3799  		v.Aux = sym
  3800  		v.AddArg(ptr)
  3801  		v.AddArg(x)
  3802  		v.AddArg(mem)
  3803  		return true
  3804  	}
  3805  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3806  	// cond:
  3807  	// result: (MOVBstore [off] {sym} ptr x mem)
  3808  	for {
  3809  		off := v.AuxInt
  3810  		sym := v.Aux
  3811  		_ = v.Args[2]
  3812  		ptr := v.Args[0]
  3813  		v_1 := v.Args[1]
  3814  		if v_1.Op != OpMIPSMOVBUreg {
  3815  			break
  3816  		}
  3817  		x := v_1.Args[0]
  3818  		mem := v.Args[2]
  3819  		v.reset(OpMIPSMOVBstore)
  3820  		v.AuxInt = off
  3821  		v.Aux = sym
  3822  		v.AddArg(ptr)
  3823  		v.AddArg(x)
  3824  		v.AddArg(mem)
  3825  		return true
  3826  	}
  3827  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3828  	// cond:
  3829  	// result: (MOVBstore [off] {sym} ptr x mem)
  3830  	for {
  3831  		off := v.AuxInt
  3832  		sym := v.Aux
  3833  		_ = v.Args[2]
  3834  		ptr := v.Args[0]
  3835  		v_1 := v.Args[1]
  3836  		if v_1.Op != OpMIPSMOVHreg {
  3837  			break
  3838  		}
  3839  		x := v_1.Args[0]
  3840  		mem := v.Args[2]
  3841  		v.reset(OpMIPSMOVBstore)
  3842  		v.AuxInt = off
  3843  		v.Aux = sym
  3844  		v.AddArg(ptr)
  3845  		v.AddArg(x)
  3846  		v.AddArg(mem)
  3847  		return true
  3848  	}
  3849  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3850  	// cond:
  3851  	// result: (MOVBstore [off] {sym} ptr x mem)
  3852  	for {
  3853  		off := v.AuxInt
  3854  		sym := v.Aux
  3855  		_ = v.Args[2]
  3856  		ptr := v.Args[0]
  3857  		v_1 := v.Args[1]
  3858  		if v_1.Op != OpMIPSMOVHUreg {
  3859  			break
  3860  		}
  3861  		x := v_1.Args[0]
  3862  		mem := v.Args[2]
  3863  		v.reset(OpMIPSMOVBstore)
  3864  		v.AuxInt = off
  3865  		v.Aux = sym
  3866  		v.AddArg(ptr)
  3867  		v.AddArg(x)
  3868  		v.AddArg(mem)
  3869  		return true
  3870  	}
  3871  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3872  	// cond:
  3873  	// result: (MOVBstore [off] {sym} ptr x mem)
  3874  	for {
  3875  		off := v.AuxInt
  3876  		sym := v.Aux
  3877  		_ = v.Args[2]
  3878  		ptr := v.Args[0]
  3879  		v_1 := v.Args[1]
  3880  		if v_1.Op != OpMIPSMOVWreg {
  3881  			break
  3882  		}
  3883  		x := v_1.Args[0]
  3884  		mem := v.Args[2]
  3885  		v.reset(OpMIPSMOVBstore)
  3886  		v.AuxInt = off
  3887  		v.Aux = sym
  3888  		v.AddArg(ptr)
  3889  		v.AddArg(x)
  3890  		v.AddArg(mem)
  3891  		return true
  3892  	}
  3893  	return false
  3894  }
  3895  func rewriteValueMIPS_OpMIPSMOVBstorezero_0(v *Value) bool {
  3896  	// match: (MOVBstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3897  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3898  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3899  	for {
  3900  		off1 := v.AuxInt
  3901  		sym := v.Aux
  3902  		_ = v.Args[1]
  3903  		x := v.Args[0]
  3904  		if x.Op != OpMIPSADDconst {
  3905  			break
  3906  		}
  3907  		off2 := x.AuxInt
  3908  		ptr := x.Args[0]
  3909  		mem := v.Args[1]
  3910  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3911  			break
  3912  		}
  3913  		v.reset(OpMIPSMOVBstorezero)
  3914  		v.AuxInt = off1 + off2
  3915  		v.Aux = sym
  3916  		v.AddArg(ptr)
  3917  		v.AddArg(mem)
  3918  		return true
  3919  	}
  3920  	// match: (MOVBstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3921  	// cond: canMergeSym(sym1,sym2)
  3922  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3923  	for {
  3924  		off1 := v.AuxInt
  3925  		sym1 := v.Aux
  3926  		_ = v.Args[1]
  3927  		v_0 := v.Args[0]
  3928  		if v_0.Op != OpMIPSMOVWaddr {
  3929  			break
  3930  		}
  3931  		off2 := v_0.AuxInt
  3932  		sym2 := v_0.Aux
  3933  		ptr := v_0.Args[0]
  3934  		mem := v.Args[1]
  3935  		if !(canMergeSym(sym1, sym2)) {
  3936  			break
  3937  		}
  3938  		v.reset(OpMIPSMOVBstorezero)
  3939  		v.AuxInt = off1 + off2
  3940  		v.Aux = mergeSym(sym1, sym2)
  3941  		v.AddArg(ptr)
  3942  		v.AddArg(mem)
  3943  		return true
  3944  	}
  3945  	return false
  3946  }
  3947  func rewriteValueMIPS_OpMIPSMOVDload_0(v *Value) bool {
  3948  	// match: (MOVDload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3949  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3950  	// result: (MOVDload [off1+off2] {sym} ptr mem)
  3951  	for {
  3952  		off1 := v.AuxInt
  3953  		sym := v.Aux
  3954  		_ = v.Args[1]
  3955  		x := v.Args[0]
  3956  		if x.Op != OpMIPSADDconst {
  3957  			break
  3958  		}
  3959  		off2 := x.AuxInt
  3960  		ptr := x.Args[0]
  3961  		mem := v.Args[1]
  3962  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3963  			break
  3964  		}
  3965  		v.reset(OpMIPSMOVDload)
  3966  		v.AuxInt = off1 + off2
  3967  		v.Aux = sym
  3968  		v.AddArg(ptr)
  3969  		v.AddArg(mem)
  3970  		return true
  3971  	}
  3972  	// match: (MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3973  	// cond: canMergeSym(sym1,sym2)
  3974  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3975  	for {
  3976  		off1 := v.AuxInt
  3977  		sym1 := v.Aux
  3978  		_ = v.Args[1]
  3979  		v_0 := v.Args[0]
  3980  		if v_0.Op != OpMIPSMOVWaddr {
  3981  			break
  3982  		}
  3983  		off2 := v_0.AuxInt
  3984  		sym2 := v_0.Aux
  3985  		ptr := v_0.Args[0]
  3986  		mem := v.Args[1]
  3987  		if !(canMergeSym(sym1, sym2)) {
  3988  			break
  3989  		}
  3990  		v.reset(OpMIPSMOVDload)
  3991  		v.AuxInt = off1 + off2
  3992  		v.Aux = mergeSym(sym1, sym2)
  3993  		v.AddArg(ptr)
  3994  		v.AddArg(mem)
  3995  		return true
  3996  	}
  3997  	// match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _))
  3998  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3999  	// result: x
  4000  	for {
  4001  		off := v.AuxInt
  4002  		sym := v.Aux
  4003  		_ = v.Args[1]
  4004  		ptr := v.Args[0]
  4005  		v_1 := v.Args[1]
  4006  		if v_1.Op != OpMIPSMOVDstore {
  4007  			break
  4008  		}
  4009  		off2 := v_1.AuxInt
  4010  		sym2 := v_1.Aux
  4011  		_ = v_1.Args[2]
  4012  		ptr2 := v_1.Args[0]
  4013  		x := v_1.Args[1]
  4014  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4015  			break
  4016  		}
  4017  		v.reset(OpCopy)
  4018  		v.Type = x.Type
  4019  		v.AddArg(x)
  4020  		return true
  4021  	}
  4022  	return false
  4023  }
  4024  func rewriteValueMIPS_OpMIPSMOVDstore_0(v *Value) bool {
  4025  	// match: (MOVDstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4026  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4027  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  4028  	for {
  4029  		off1 := v.AuxInt
  4030  		sym := v.Aux
  4031  		_ = v.Args[2]
  4032  		x := v.Args[0]
  4033  		if x.Op != OpMIPSADDconst {
  4034  			break
  4035  		}
  4036  		off2 := x.AuxInt
  4037  		ptr := x.Args[0]
  4038  		val := v.Args[1]
  4039  		mem := v.Args[2]
  4040  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4041  			break
  4042  		}
  4043  		v.reset(OpMIPSMOVDstore)
  4044  		v.AuxInt = off1 + off2
  4045  		v.Aux = sym
  4046  		v.AddArg(ptr)
  4047  		v.AddArg(val)
  4048  		v.AddArg(mem)
  4049  		return true
  4050  	}
  4051  	// match: (MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4052  	// cond: canMergeSym(sym1,sym2)
  4053  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4054  	for {
  4055  		off1 := v.AuxInt
  4056  		sym1 := v.Aux
  4057  		_ = v.Args[2]
  4058  		v_0 := v.Args[0]
  4059  		if v_0.Op != OpMIPSMOVWaddr {
  4060  			break
  4061  		}
  4062  		off2 := v_0.AuxInt
  4063  		sym2 := v_0.Aux
  4064  		ptr := v_0.Args[0]
  4065  		val := v.Args[1]
  4066  		mem := v.Args[2]
  4067  		if !(canMergeSym(sym1, sym2)) {
  4068  			break
  4069  		}
  4070  		v.reset(OpMIPSMOVDstore)
  4071  		v.AuxInt = off1 + off2
  4072  		v.Aux = mergeSym(sym1, sym2)
  4073  		v.AddArg(ptr)
  4074  		v.AddArg(val)
  4075  		v.AddArg(mem)
  4076  		return true
  4077  	}
  4078  	return false
  4079  }
  4080  func rewriteValueMIPS_OpMIPSMOVFload_0(v *Value) bool {
  4081  	// match: (MOVFload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4082  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4083  	// result: (MOVFload [off1+off2] {sym} ptr mem)
  4084  	for {
  4085  		off1 := v.AuxInt
  4086  		sym := v.Aux
  4087  		_ = v.Args[1]
  4088  		x := v.Args[0]
  4089  		if x.Op != OpMIPSADDconst {
  4090  			break
  4091  		}
  4092  		off2 := x.AuxInt
  4093  		ptr := x.Args[0]
  4094  		mem := v.Args[1]
  4095  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4096  			break
  4097  		}
  4098  		v.reset(OpMIPSMOVFload)
  4099  		v.AuxInt = off1 + off2
  4100  		v.Aux = sym
  4101  		v.AddArg(ptr)
  4102  		v.AddArg(mem)
  4103  		return true
  4104  	}
  4105  	// match: (MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4106  	// cond: canMergeSym(sym1,sym2)
  4107  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4108  	for {
  4109  		off1 := v.AuxInt
  4110  		sym1 := v.Aux
  4111  		_ = v.Args[1]
  4112  		v_0 := v.Args[0]
  4113  		if v_0.Op != OpMIPSMOVWaddr {
  4114  			break
  4115  		}
  4116  		off2 := v_0.AuxInt
  4117  		sym2 := v_0.Aux
  4118  		ptr := v_0.Args[0]
  4119  		mem := v.Args[1]
  4120  		if !(canMergeSym(sym1, sym2)) {
  4121  			break
  4122  		}
  4123  		v.reset(OpMIPSMOVFload)
  4124  		v.AuxInt = off1 + off2
  4125  		v.Aux = mergeSym(sym1, sym2)
  4126  		v.AddArg(ptr)
  4127  		v.AddArg(mem)
  4128  		return true
  4129  	}
  4130  	// match: (MOVFload [off] {sym} ptr (MOVFstore [off2] {sym2} ptr2 x _))
  4131  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4132  	// result: x
  4133  	for {
  4134  		off := v.AuxInt
  4135  		sym := v.Aux
  4136  		_ = v.Args[1]
  4137  		ptr := v.Args[0]
  4138  		v_1 := v.Args[1]
  4139  		if v_1.Op != OpMIPSMOVFstore {
  4140  			break
  4141  		}
  4142  		off2 := v_1.AuxInt
  4143  		sym2 := v_1.Aux
  4144  		_ = v_1.Args[2]
  4145  		ptr2 := v_1.Args[0]
  4146  		x := v_1.Args[1]
  4147  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4148  			break
  4149  		}
  4150  		v.reset(OpCopy)
  4151  		v.Type = x.Type
  4152  		v.AddArg(x)
  4153  		return true
  4154  	}
  4155  	return false
  4156  }
  4157  func rewriteValueMIPS_OpMIPSMOVFstore_0(v *Value) bool {
  4158  	// match: (MOVFstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4159  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4160  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  4161  	for {
  4162  		off1 := v.AuxInt
  4163  		sym := v.Aux
  4164  		_ = v.Args[2]
  4165  		x := v.Args[0]
  4166  		if x.Op != OpMIPSADDconst {
  4167  			break
  4168  		}
  4169  		off2 := x.AuxInt
  4170  		ptr := x.Args[0]
  4171  		val := v.Args[1]
  4172  		mem := v.Args[2]
  4173  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4174  			break
  4175  		}
  4176  		v.reset(OpMIPSMOVFstore)
  4177  		v.AuxInt = off1 + off2
  4178  		v.Aux = sym
  4179  		v.AddArg(ptr)
  4180  		v.AddArg(val)
  4181  		v.AddArg(mem)
  4182  		return true
  4183  	}
  4184  	// match: (MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4185  	// cond: canMergeSym(sym1,sym2)
  4186  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4187  	for {
  4188  		off1 := v.AuxInt
  4189  		sym1 := v.Aux
  4190  		_ = v.Args[2]
  4191  		v_0 := v.Args[0]
  4192  		if v_0.Op != OpMIPSMOVWaddr {
  4193  			break
  4194  		}
  4195  		off2 := v_0.AuxInt
  4196  		sym2 := v_0.Aux
  4197  		ptr := v_0.Args[0]
  4198  		val := v.Args[1]
  4199  		mem := v.Args[2]
  4200  		if !(canMergeSym(sym1, sym2)) {
  4201  			break
  4202  		}
  4203  		v.reset(OpMIPSMOVFstore)
  4204  		v.AuxInt = off1 + off2
  4205  		v.Aux = mergeSym(sym1, sym2)
  4206  		v.AddArg(ptr)
  4207  		v.AddArg(val)
  4208  		v.AddArg(mem)
  4209  		return true
  4210  	}
  4211  	return false
  4212  }
  4213  func rewriteValueMIPS_OpMIPSMOVHUload_0(v *Value) bool {
  4214  	// match: (MOVHUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4215  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4216  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4217  	for {
  4218  		off1 := v.AuxInt
  4219  		sym := v.Aux
  4220  		_ = v.Args[1]
  4221  		x := v.Args[0]
  4222  		if x.Op != OpMIPSADDconst {
  4223  			break
  4224  		}
  4225  		off2 := x.AuxInt
  4226  		ptr := x.Args[0]
  4227  		mem := v.Args[1]
  4228  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4229  			break
  4230  		}
  4231  		v.reset(OpMIPSMOVHUload)
  4232  		v.AuxInt = off1 + off2
  4233  		v.Aux = sym
  4234  		v.AddArg(ptr)
  4235  		v.AddArg(mem)
  4236  		return true
  4237  	}
  4238  	// match: (MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4239  	// cond: canMergeSym(sym1,sym2)
  4240  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4241  	for {
  4242  		off1 := v.AuxInt
  4243  		sym1 := v.Aux
  4244  		_ = v.Args[1]
  4245  		v_0 := v.Args[0]
  4246  		if v_0.Op != OpMIPSMOVWaddr {
  4247  			break
  4248  		}
  4249  		off2 := v_0.AuxInt
  4250  		sym2 := v_0.Aux
  4251  		ptr := v_0.Args[0]
  4252  		mem := v.Args[1]
  4253  		if !(canMergeSym(sym1, sym2)) {
  4254  			break
  4255  		}
  4256  		v.reset(OpMIPSMOVHUload)
  4257  		v.AuxInt = off1 + off2
  4258  		v.Aux = mergeSym(sym1, sym2)
  4259  		v.AddArg(ptr)
  4260  		v.AddArg(mem)
  4261  		return true
  4262  	}
  4263  	// match: (MOVHUload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4264  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4265  	// result: (MOVHUreg x)
  4266  	for {
  4267  		off := v.AuxInt
  4268  		sym := v.Aux
  4269  		_ = v.Args[1]
  4270  		ptr := v.Args[0]
  4271  		v_1 := v.Args[1]
  4272  		if v_1.Op != OpMIPSMOVHstore {
  4273  			break
  4274  		}
  4275  		off2 := v_1.AuxInt
  4276  		sym2 := v_1.Aux
  4277  		_ = v_1.Args[2]
  4278  		ptr2 := v_1.Args[0]
  4279  		x := v_1.Args[1]
  4280  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4281  			break
  4282  		}
  4283  		v.reset(OpMIPSMOVHUreg)
  4284  		v.AddArg(x)
  4285  		return true
  4286  	}
  4287  	return false
  4288  }
  4289  func rewriteValueMIPS_OpMIPSMOVHUreg_0(v *Value) bool {
  4290  	b := v.Block
  4291  	_ = b
  4292  	// match: (MOVHUreg x:(MOVBUload _ _))
  4293  	// cond:
  4294  	// result: (MOVWreg x)
  4295  	for {
  4296  		x := v.Args[0]
  4297  		if x.Op != OpMIPSMOVBUload {
  4298  			break
  4299  		}
  4300  		_ = x.Args[1]
  4301  		v.reset(OpMIPSMOVWreg)
  4302  		v.AddArg(x)
  4303  		return true
  4304  	}
  4305  	// match: (MOVHUreg x:(MOVHUload _ _))
  4306  	// cond:
  4307  	// result: (MOVWreg x)
  4308  	for {
  4309  		x := v.Args[0]
  4310  		if x.Op != OpMIPSMOVHUload {
  4311  			break
  4312  		}
  4313  		_ = x.Args[1]
  4314  		v.reset(OpMIPSMOVWreg)
  4315  		v.AddArg(x)
  4316  		return true
  4317  	}
  4318  	// match: (MOVHUreg x:(MOVBUreg _))
  4319  	// cond:
  4320  	// result: (MOVWreg x)
  4321  	for {
  4322  		x := v.Args[0]
  4323  		if x.Op != OpMIPSMOVBUreg {
  4324  			break
  4325  		}
  4326  		v.reset(OpMIPSMOVWreg)
  4327  		v.AddArg(x)
  4328  		return true
  4329  	}
  4330  	// match: (MOVHUreg x:(MOVHUreg _))
  4331  	// cond:
  4332  	// result: (MOVWreg x)
  4333  	for {
  4334  		x := v.Args[0]
  4335  		if x.Op != OpMIPSMOVHUreg {
  4336  			break
  4337  		}
  4338  		v.reset(OpMIPSMOVWreg)
  4339  		v.AddArg(x)
  4340  		return true
  4341  	}
  4342  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  4343  	// cond: x.Uses == 1 && clobber(x)
  4344  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  4345  	for {
  4346  		t := v.Type
  4347  		x := v.Args[0]
  4348  		if x.Op != OpMIPSMOVHload {
  4349  			break
  4350  		}
  4351  		off := x.AuxInt
  4352  		sym := x.Aux
  4353  		_ = x.Args[1]
  4354  		ptr := x.Args[0]
  4355  		mem := x.Args[1]
  4356  		if !(x.Uses == 1 && clobber(x)) {
  4357  			break
  4358  		}
  4359  		b = x.Block
  4360  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, t)
  4361  		v.reset(OpCopy)
  4362  		v.AddArg(v0)
  4363  		v0.AuxInt = off
  4364  		v0.Aux = sym
  4365  		v0.AddArg(ptr)
  4366  		v0.AddArg(mem)
  4367  		return true
  4368  	}
  4369  	// match: (MOVHUreg (ANDconst [c] x))
  4370  	// cond:
  4371  	// result: (ANDconst [c&0xffff] x)
  4372  	for {
  4373  		v_0 := v.Args[0]
  4374  		if v_0.Op != OpMIPSANDconst {
  4375  			break
  4376  		}
  4377  		c := v_0.AuxInt
  4378  		x := v_0.Args[0]
  4379  		v.reset(OpMIPSANDconst)
  4380  		v.AuxInt = c & 0xffff
  4381  		v.AddArg(x)
  4382  		return true
  4383  	}
  4384  	// match: (MOVHUreg (MOVWconst [c]))
  4385  	// cond:
  4386  	// result: (MOVWconst [int64(uint16(c))])
  4387  	for {
  4388  		v_0 := v.Args[0]
  4389  		if v_0.Op != OpMIPSMOVWconst {
  4390  			break
  4391  		}
  4392  		c := v_0.AuxInt
  4393  		v.reset(OpMIPSMOVWconst)
  4394  		v.AuxInt = int64(uint16(c))
  4395  		return true
  4396  	}
  4397  	return false
  4398  }
  4399  func rewriteValueMIPS_OpMIPSMOVHload_0(v *Value) bool {
  4400  	// match: (MOVHload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4401  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4402  	// result: (MOVHload [off1+off2] {sym} ptr mem)
  4403  	for {
  4404  		off1 := v.AuxInt
  4405  		sym := v.Aux
  4406  		_ = v.Args[1]
  4407  		x := v.Args[0]
  4408  		if x.Op != OpMIPSADDconst {
  4409  			break
  4410  		}
  4411  		off2 := x.AuxInt
  4412  		ptr := x.Args[0]
  4413  		mem := v.Args[1]
  4414  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4415  			break
  4416  		}
  4417  		v.reset(OpMIPSMOVHload)
  4418  		v.AuxInt = off1 + off2
  4419  		v.Aux = sym
  4420  		v.AddArg(ptr)
  4421  		v.AddArg(mem)
  4422  		return true
  4423  	}
  4424  	// match: (MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4425  	// cond: canMergeSym(sym1,sym2)
  4426  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4427  	for {
  4428  		off1 := v.AuxInt
  4429  		sym1 := v.Aux
  4430  		_ = v.Args[1]
  4431  		v_0 := v.Args[0]
  4432  		if v_0.Op != OpMIPSMOVWaddr {
  4433  			break
  4434  		}
  4435  		off2 := v_0.AuxInt
  4436  		sym2 := v_0.Aux
  4437  		ptr := v_0.Args[0]
  4438  		mem := v.Args[1]
  4439  		if !(canMergeSym(sym1, sym2)) {
  4440  			break
  4441  		}
  4442  		v.reset(OpMIPSMOVHload)
  4443  		v.AuxInt = off1 + off2
  4444  		v.Aux = mergeSym(sym1, sym2)
  4445  		v.AddArg(ptr)
  4446  		v.AddArg(mem)
  4447  		return true
  4448  	}
  4449  	// match: (MOVHload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4450  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4451  	// result: (MOVHreg x)
  4452  	for {
  4453  		off := v.AuxInt
  4454  		sym := v.Aux
  4455  		_ = v.Args[1]
  4456  		ptr := v.Args[0]
  4457  		v_1 := v.Args[1]
  4458  		if v_1.Op != OpMIPSMOVHstore {
  4459  			break
  4460  		}
  4461  		off2 := v_1.AuxInt
  4462  		sym2 := v_1.Aux
  4463  		_ = v_1.Args[2]
  4464  		ptr2 := v_1.Args[0]
  4465  		x := v_1.Args[1]
  4466  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4467  			break
  4468  		}
  4469  		v.reset(OpMIPSMOVHreg)
  4470  		v.AddArg(x)
  4471  		return true
  4472  	}
  4473  	return false
  4474  }
  4475  func rewriteValueMIPS_OpMIPSMOVHreg_0(v *Value) bool {
  4476  	b := v.Block
  4477  	_ = b
  4478  	// match: (MOVHreg x:(MOVBload _ _))
  4479  	// cond:
  4480  	// result: (MOVWreg x)
  4481  	for {
  4482  		x := v.Args[0]
  4483  		if x.Op != OpMIPSMOVBload {
  4484  			break
  4485  		}
  4486  		_ = x.Args[1]
  4487  		v.reset(OpMIPSMOVWreg)
  4488  		v.AddArg(x)
  4489  		return true
  4490  	}
  4491  	// match: (MOVHreg x:(MOVBUload _ _))
  4492  	// cond:
  4493  	// result: (MOVWreg x)
  4494  	for {
  4495  		x := v.Args[0]
  4496  		if x.Op != OpMIPSMOVBUload {
  4497  			break
  4498  		}
  4499  		_ = x.Args[1]
  4500  		v.reset(OpMIPSMOVWreg)
  4501  		v.AddArg(x)
  4502  		return true
  4503  	}
  4504  	// match: (MOVHreg x:(MOVHload _ _))
  4505  	// cond:
  4506  	// result: (MOVWreg x)
  4507  	for {
  4508  		x := v.Args[0]
  4509  		if x.Op != OpMIPSMOVHload {
  4510  			break
  4511  		}
  4512  		_ = x.Args[1]
  4513  		v.reset(OpMIPSMOVWreg)
  4514  		v.AddArg(x)
  4515  		return true
  4516  	}
  4517  	// match: (MOVHreg x:(MOVBreg _))
  4518  	// cond:
  4519  	// result: (MOVWreg x)
  4520  	for {
  4521  		x := v.Args[0]
  4522  		if x.Op != OpMIPSMOVBreg {
  4523  			break
  4524  		}
  4525  		v.reset(OpMIPSMOVWreg)
  4526  		v.AddArg(x)
  4527  		return true
  4528  	}
  4529  	// match: (MOVHreg x:(MOVBUreg _))
  4530  	// cond:
  4531  	// result: (MOVWreg x)
  4532  	for {
  4533  		x := v.Args[0]
  4534  		if x.Op != OpMIPSMOVBUreg {
  4535  			break
  4536  		}
  4537  		v.reset(OpMIPSMOVWreg)
  4538  		v.AddArg(x)
  4539  		return true
  4540  	}
  4541  	// match: (MOVHreg x:(MOVHreg _))
  4542  	// cond:
  4543  	// result: (MOVWreg x)
  4544  	for {
  4545  		x := v.Args[0]
  4546  		if x.Op != OpMIPSMOVHreg {
  4547  			break
  4548  		}
  4549  		v.reset(OpMIPSMOVWreg)
  4550  		v.AddArg(x)
  4551  		return true
  4552  	}
  4553  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  4554  	// cond: x.Uses == 1 && clobber(x)
  4555  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  4556  	for {
  4557  		t := v.Type
  4558  		x := v.Args[0]
  4559  		if x.Op != OpMIPSMOVHUload {
  4560  			break
  4561  		}
  4562  		off := x.AuxInt
  4563  		sym := x.Aux
  4564  		_ = x.Args[1]
  4565  		ptr := x.Args[0]
  4566  		mem := x.Args[1]
  4567  		if !(x.Uses == 1 && clobber(x)) {
  4568  			break
  4569  		}
  4570  		b = x.Block
  4571  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, t)
  4572  		v.reset(OpCopy)
  4573  		v.AddArg(v0)
  4574  		v0.AuxInt = off
  4575  		v0.Aux = sym
  4576  		v0.AddArg(ptr)
  4577  		v0.AddArg(mem)
  4578  		return true
  4579  	}
  4580  	// match: (MOVHreg (ANDconst [c] x))
  4581  	// cond: c & 0x8000 == 0
  4582  	// result: (ANDconst [c&0x7fff] x)
  4583  	for {
  4584  		v_0 := v.Args[0]
  4585  		if v_0.Op != OpMIPSANDconst {
  4586  			break
  4587  		}
  4588  		c := v_0.AuxInt
  4589  		x := v_0.Args[0]
  4590  		if !(c&0x8000 == 0) {
  4591  			break
  4592  		}
  4593  		v.reset(OpMIPSANDconst)
  4594  		v.AuxInt = c & 0x7fff
  4595  		v.AddArg(x)
  4596  		return true
  4597  	}
  4598  	// match: (MOVHreg (MOVWconst [c]))
  4599  	// cond:
  4600  	// result: (MOVWconst [int64(int16(c))])
  4601  	for {
  4602  		v_0 := v.Args[0]
  4603  		if v_0.Op != OpMIPSMOVWconst {
  4604  			break
  4605  		}
  4606  		c := v_0.AuxInt
  4607  		v.reset(OpMIPSMOVWconst)
  4608  		v.AuxInt = int64(int16(c))
  4609  		return true
  4610  	}
  4611  	return false
  4612  }
  4613  func rewriteValueMIPS_OpMIPSMOVHstore_0(v *Value) bool {
  4614  	// match: (MOVHstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4615  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4616  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4617  	for {
  4618  		off1 := v.AuxInt
  4619  		sym := v.Aux
  4620  		_ = v.Args[2]
  4621  		x := v.Args[0]
  4622  		if x.Op != OpMIPSADDconst {
  4623  			break
  4624  		}
  4625  		off2 := x.AuxInt
  4626  		ptr := x.Args[0]
  4627  		val := v.Args[1]
  4628  		mem := v.Args[2]
  4629  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4630  			break
  4631  		}
  4632  		v.reset(OpMIPSMOVHstore)
  4633  		v.AuxInt = off1 + off2
  4634  		v.Aux = sym
  4635  		v.AddArg(ptr)
  4636  		v.AddArg(val)
  4637  		v.AddArg(mem)
  4638  		return true
  4639  	}
  4640  	// match: (MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4641  	// cond: canMergeSym(sym1,sym2)
  4642  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4643  	for {
  4644  		off1 := v.AuxInt
  4645  		sym1 := v.Aux
  4646  		_ = v.Args[2]
  4647  		v_0 := v.Args[0]
  4648  		if v_0.Op != OpMIPSMOVWaddr {
  4649  			break
  4650  		}
  4651  		off2 := v_0.AuxInt
  4652  		sym2 := v_0.Aux
  4653  		ptr := v_0.Args[0]
  4654  		val := v.Args[1]
  4655  		mem := v.Args[2]
  4656  		if !(canMergeSym(sym1, sym2)) {
  4657  			break
  4658  		}
  4659  		v.reset(OpMIPSMOVHstore)
  4660  		v.AuxInt = off1 + off2
  4661  		v.Aux = mergeSym(sym1, sym2)
  4662  		v.AddArg(ptr)
  4663  		v.AddArg(val)
  4664  		v.AddArg(mem)
  4665  		return true
  4666  	}
  4667  	// match: (MOVHstore [off] {sym} ptr (MOVWconst [0]) mem)
  4668  	// cond:
  4669  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4670  	for {
  4671  		off := v.AuxInt
  4672  		sym := v.Aux
  4673  		_ = v.Args[2]
  4674  		ptr := v.Args[0]
  4675  		v_1 := v.Args[1]
  4676  		if v_1.Op != OpMIPSMOVWconst {
  4677  			break
  4678  		}
  4679  		if v_1.AuxInt != 0 {
  4680  			break
  4681  		}
  4682  		mem := v.Args[2]
  4683  		v.reset(OpMIPSMOVHstorezero)
  4684  		v.AuxInt = off
  4685  		v.Aux = sym
  4686  		v.AddArg(ptr)
  4687  		v.AddArg(mem)
  4688  		return true
  4689  	}
  4690  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4691  	// cond:
  4692  	// result: (MOVHstore [off] {sym} ptr x mem)
  4693  	for {
  4694  		off := v.AuxInt
  4695  		sym := v.Aux
  4696  		_ = v.Args[2]
  4697  		ptr := v.Args[0]
  4698  		v_1 := v.Args[1]
  4699  		if v_1.Op != OpMIPSMOVHreg {
  4700  			break
  4701  		}
  4702  		x := v_1.Args[0]
  4703  		mem := v.Args[2]
  4704  		v.reset(OpMIPSMOVHstore)
  4705  		v.AuxInt = off
  4706  		v.Aux = sym
  4707  		v.AddArg(ptr)
  4708  		v.AddArg(x)
  4709  		v.AddArg(mem)
  4710  		return true
  4711  	}
  4712  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4713  	// cond:
  4714  	// result: (MOVHstore [off] {sym} ptr x mem)
  4715  	for {
  4716  		off := v.AuxInt
  4717  		sym := v.Aux
  4718  		_ = v.Args[2]
  4719  		ptr := v.Args[0]
  4720  		v_1 := v.Args[1]
  4721  		if v_1.Op != OpMIPSMOVHUreg {
  4722  			break
  4723  		}
  4724  		x := v_1.Args[0]
  4725  		mem := v.Args[2]
  4726  		v.reset(OpMIPSMOVHstore)
  4727  		v.AuxInt = off
  4728  		v.Aux = sym
  4729  		v.AddArg(ptr)
  4730  		v.AddArg(x)
  4731  		v.AddArg(mem)
  4732  		return true
  4733  	}
  4734  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4735  	// cond:
  4736  	// result: (MOVHstore [off] {sym} ptr x mem)
  4737  	for {
  4738  		off := v.AuxInt
  4739  		sym := v.Aux
  4740  		_ = v.Args[2]
  4741  		ptr := v.Args[0]
  4742  		v_1 := v.Args[1]
  4743  		if v_1.Op != OpMIPSMOVWreg {
  4744  			break
  4745  		}
  4746  		x := v_1.Args[0]
  4747  		mem := v.Args[2]
  4748  		v.reset(OpMIPSMOVHstore)
  4749  		v.AuxInt = off
  4750  		v.Aux = sym
  4751  		v.AddArg(ptr)
  4752  		v.AddArg(x)
  4753  		v.AddArg(mem)
  4754  		return true
  4755  	}
  4756  	return false
  4757  }
  4758  func rewriteValueMIPS_OpMIPSMOVHstorezero_0(v *Value) bool {
  4759  	// match: (MOVHstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4760  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4761  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4762  	for {
  4763  		off1 := v.AuxInt
  4764  		sym := v.Aux
  4765  		_ = v.Args[1]
  4766  		x := v.Args[0]
  4767  		if x.Op != OpMIPSADDconst {
  4768  			break
  4769  		}
  4770  		off2 := x.AuxInt
  4771  		ptr := x.Args[0]
  4772  		mem := v.Args[1]
  4773  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4774  			break
  4775  		}
  4776  		v.reset(OpMIPSMOVHstorezero)
  4777  		v.AuxInt = off1 + off2
  4778  		v.Aux = sym
  4779  		v.AddArg(ptr)
  4780  		v.AddArg(mem)
  4781  		return true
  4782  	}
  4783  	// match: (MOVHstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4784  	// cond: canMergeSym(sym1,sym2)
  4785  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4786  	for {
  4787  		off1 := v.AuxInt
  4788  		sym1 := v.Aux
  4789  		_ = v.Args[1]
  4790  		v_0 := v.Args[0]
  4791  		if v_0.Op != OpMIPSMOVWaddr {
  4792  			break
  4793  		}
  4794  		off2 := v_0.AuxInt
  4795  		sym2 := v_0.Aux
  4796  		ptr := v_0.Args[0]
  4797  		mem := v.Args[1]
  4798  		if !(canMergeSym(sym1, sym2)) {
  4799  			break
  4800  		}
  4801  		v.reset(OpMIPSMOVHstorezero)
  4802  		v.AuxInt = off1 + off2
  4803  		v.Aux = mergeSym(sym1, sym2)
  4804  		v.AddArg(ptr)
  4805  		v.AddArg(mem)
  4806  		return true
  4807  	}
  4808  	return false
  4809  }
  4810  func rewriteValueMIPS_OpMIPSMOVWload_0(v *Value) bool {
  4811  	// match: (MOVWload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4812  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4813  	// result: (MOVWload [off1+off2] {sym} ptr mem)
  4814  	for {
  4815  		off1 := v.AuxInt
  4816  		sym := v.Aux
  4817  		_ = v.Args[1]
  4818  		x := v.Args[0]
  4819  		if x.Op != OpMIPSADDconst {
  4820  			break
  4821  		}
  4822  		off2 := x.AuxInt
  4823  		ptr := x.Args[0]
  4824  		mem := v.Args[1]
  4825  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4826  			break
  4827  		}
  4828  		v.reset(OpMIPSMOVWload)
  4829  		v.AuxInt = off1 + off2
  4830  		v.Aux = sym
  4831  		v.AddArg(ptr)
  4832  		v.AddArg(mem)
  4833  		return true
  4834  	}
  4835  	// match: (MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4836  	// cond: canMergeSym(sym1,sym2)
  4837  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4838  	for {
  4839  		off1 := v.AuxInt
  4840  		sym1 := v.Aux
  4841  		_ = v.Args[1]
  4842  		v_0 := v.Args[0]
  4843  		if v_0.Op != OpMIPSMOVWaddr {
  4844  			break
  4845  		}
  4846  		off2 := v_0.AuxInt
  4847  		sym2 := v_0.Aux
  4848  		ptr := v_0.Args[0]
  4849  		mem := v.Args[1]
  4850  		if !(canMergeSym(sym1, sym2)) {
  4851  			break
  4852  		}
  4853  		v.reset(OpMIPSMOVWload)
  4854  		v.AuxInt = off1 + off2
  4855  		v.Aux = mergeSym(sym1, sym2)
  4856  		v.AddArg(ptr)
  4857  		v.AddArg(mem)
  4858  		return true
  4859  	}
  4860  	// match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
  4861  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4862  	// result: x
  4863  	for {
  4864  		off := v.AuxInt
  4865  		sym := v.Aux
  4866  		_ = v.Args[1]
  4867  		ptr := v.Args[0]
  4868  		v_1 := v.Args[1]
  4869  		if v_1.Op != OpMIPSMOVWstore {
  4870  			break
  4871  		}
  4872  		off2 := v_1.AuxInt
  4873  		sym2 := v_1.Aux
  4874  		_ = v_1.Args[2]
  4875  		ptr2 := v_1.Args[0]
  4876  		x := v_1.Args[1]
  4877  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4878  			break
  4879  		}
  4880  		v.reset(OpCopy)
  4881  		v.Type = x.Type
  4882  		v.AddArg(x)
  4883  		return true
  4884  	}
  4885  	return false
  4886  }
  4887  func rewriteValueMIPS_OpMIPSMOVWreg_0(v *Value) bool {
  4888  	// match: (MOVWreg x)
  4889  	// cond: x.Uses == 1
  4890  	// result: (MOVWnop x)
  4891  	for {
  4892  		x := v.Args[0]
  4893  		if !(x.Uses == 1) {
  4894  			break
  4895  		}
  4896  		v.reset(OpMIPSMOVWnop)
  4897  		v.AddArg(x)
  4898  		return true
  4899  	}
  4900  	// match: (MOVWreg (MOVWconst [c]))
  4901  	// cond:
  4902  	// result: (MOVWconst [c])
  4903  	for {
  4904  		v_0 := v.Args[0]
  4905  		if v_0.Op != OpMIPSMOVWconst {
  4906  			break
  4907  		}
  4908  		c := v_0.AuxInt
  4909  		v.reset(OpMIPSMOVWconst)
  4910  		v.AuxInt = c
  4911  		return true
  4912  	}
  4913  	return false
  4914  }
  4915  func rewriteValueMIPS_OpMIPSMOVWstore_0(v *Value) bool {
  4916  	// match: (MOVWstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4917  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4918  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  4919  	for {
  4920  		off1 := v.AuxInt
  4921  		sym := v.Aux
  4922  		_ = v.Args[2]
  4923  		x := v.Args[0]
  4924  		if x.Op != OpMIPSADDconst {
  4925  			break
  4926  		}
  4927  		off2 := x.AuxInt
  4928  		ptr := x.Args[0]
  4929  		val := v.Args[1]
  4930  		mem := v.Args[2]
  4931  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4932  			break
  4933  		}
  4934  		v.reset(OpMIPSMOVWstore)
  4935  		v.AuxInt = off1 + off2
  4936  		v.Aux = sym
  4937  		v.AddArg(ptr)
  4938  		v.AddArg(val)
  4939  		v.AddArg(mem)
  4940  		return true
  4941  	}
  4942  	// match: (MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4943  	// cond: canMergeSym(sym1,sym2)
  4944  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4945  	for {
  4946  		off1 := v.AuxInt
  4947  		sym1 := v.Aux
  4948  		_ = v.Args[2]
  4949  		v_0 := v.Args[0]
  4950  		if v_0.Op != OpMIPSMOVWaddr {
  4951  			break
  4952  		}
  4953  		off2 := v_0.AuxInt
  4954  		sym2 := v_0.Aux
  4955  		ptr := v_0.Args[0]
  4956  		val := v.Args[1]
  4957  		mem := v.Args[2]
  4958  		if !(canMergeSym(sym1, sym2)) {
  4959  			break
  4960  		}
  4961  		v.reset(OpMIPSMOVWstore)
  4962  		v.AuxInt = off1 + off2
  4963  		v.Aux = mergeSym(sym1, sym2)
  4964  		v.AddArg(ptr)
  4965  		v.AddArg(val)
  4966  		v.AddArg(mem)
  4967  		return true
  4968  	}
  4969  	// match: (MOVWstore [off] {sym} ptr (MOVWconst [0]) mem)
  4970  	// cond:
  4971  	// result: (MOVWstorezero [off] {sym} ptr mem)
  4972  	for {
  4973  		off := v.AuxInt
  4974  		sym := v.Aux
  4975  		_ = v.Args[2]
  4976  		ptr := v.Args[0]
  4977  		v_1 := v.Args[1]
  4978  		if v_1.Op != OpMIPSMOVWconst {
  4979  			break
  4980  		}
  4981  		if v_1.AuxInt != 0 {
  4982  			break
  4983  		}
  4984  		mem := v.Args[2]
  4985  		v.reset(OpMIPSMOVWstorezero)
  4986  		v.AuxInt = off
  4987  		v.Aux = sym
  4988  		v.AddArg(ptr)
  4989  		v.AddArg(mem)
  4990  		return true
  4991  	}
  4992  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4993  	// cond:
  4994  	// result: (MOVWstore [off] {sym} ptr x mem)
  4995  	for {
  4996  		off := v.AuxInt
  4997  		sym := v.Aux
  4998  		_ = v.Args[2]
  4999  		ptr := v.Args[0]
  5000  		v_1 := v.Args[1]
  5001  		if v_1.Op != OpMIPSMOVWreg {
  5002  			break
  5003  		}
  5004  		x := v_1.Args[0]
  5005  		mem := v.Args[2]
  5006  		v.reset(OpMIPSMOVWstore)
  5007  		v.AuxInt = off
  5008  		v.Aux = sym
  5009  		v.AddArg(ptr)
  5010  		v.AddArg(x)
  5011  		v.AddArg(mem)
  5012  		return true
  5013  	}
  5014  	return false
  5015  }
  5016  func rewriteValueMIPS_OpMIPSMOVWstorezero_0(v *Value) bool {
  5017  	// match: (MOVWstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  5018  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  5019  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  5020  	for {
  5021  		off1 := v.AuxInt
  5022  		sym := v.Aux
  5023  		_ = v.Args[1]
  5024  		x := v.Args[0]
  5025  		if x.Op != OpMIPSADDconst {
  5026  			break
  5027  		}
  5028  		off2 := x.AuxInt
  5029  		ptr := x.Args[0]
  5030  		mem := v.Args[1]
  5031  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  5032  			break
  5033  		}
  5034  		v.reset(OpMIPSMOVWstorezero)
  5035  		v.AuxInt = off1 + off2
  5036  		v.Aux = sym
  5037  		v.AddArg(ptr)
  5038  		v.AddArg(mem)
  5039  		return true
  5040  	}
  5041  	// match: (MOVWstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  5042  	// cond: canMergeSym(sym1,sym2)
  5043  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5044  	for {
  5045  		off1 := v.AuxInt
  5046  		sym1 := v.Aux
  5047  		_ = v.Args[1]
  5048  		v_0 := v.Args[0]
  5049  		if v_0.Op != OpMIPSMOVWaddr {
  5050  			break
  5051  		}
  5052  		off2 := v_0.AuxInt
  5053  		sym2 := v_0.Aux
  5054  		ptr := v_0.Args[0]
  5055  		mem := v.Args[1]
  5056  		if !(canMergeSym(sym1, sym2)) {
  5057  			break
  5058  		}
  5059  		v.reset(OpMIPSMOVWstorezero)
  5060  		v.AuxInt = off1 + off2
  5061  		v.Aux = mergeSym(sym1, sym2)
  5062  		v.AddArg(ptr)
  5063  		v.AddArg(mem)
  5064  		return true
  5065  	}
  5066  	return false
  5067  }
  5068  func rewriteValueMIPS_OpMIPSMUL_0(v *Value) bool {
  5069  	// match: (MUL (MOVWconst [0]) _)
  5070  	// cond:
  5071  	// result: (MOVWconst [0])
  5072  	for {
  5073  		_ = v.Args[1]
  5074  		v_0 := v.Args[0]
  5075  		if v_0.Op != OpMIPSMOVWconst {
  5076  			break
  5077  		}
  5078  		if v_0.AuxInt != 0 {
  5079  			break
  5080  		}
  5081  		v.reset(OpMIPSMOVWconst)
  5082  		v.AuxInt = 0
  5083  		return true
  5084  	}
  5085  	// match: (MUL _ (MOVWconst [0]))
  5086  	// cond:
  5087  	// result: (MOVWconst [0])
  5088  	for {
  5089  		_ = v.Args[1]
  5090  		v_1 := v.Args[1]
  5091  		if v_1.Op != OpMIPSMOVWconst {
  5092  			break
  5093  		}
  5094  		if v_1.AuxInt != 0 {
  5095  			break
  5096  		}
  5097  		v.reset(OpMIPSMOVWconst)
  5098  		v.AuxInt = 0
  5099  		return true
  5100  	}
  5101  	// match: (MUL (MOVWconst [1]) x)
  5102  	// cond:
  5103  	// result: x
  5104  	for {
  5105  		_ = v.Args[1]
  5106  		v_0 := v.Args[0]
  5107  		if v_0.Op != OpMIPSMOVWconst {
  5108  			break
  5109  		}
  5110  		if v_0.AuxInt != 1 {
  5111  			break
  5112  		}
  5113  		x := v.Args[1]
  5114  		v.reset(OpCopy)
  5115  		v.Type = x.Type
  5116  		v.AddArg(x)
  5117  		return true
  5118  	}
  5119  	// match: (MUL x (MOVWconst [1]))
  5120  	// cond:
  5121  	// result: x
  5122  	for {
  5123  		_ = v.Args[1]
  5124  		x := v.Args[0]
  5125  		v_1 := v.Args[1]
  5126  		if v_1.Op != OpMIPSMOVWconst {
  5127  			break
  5128  		}
  5129  		if v_1.AuxInt != 1 {
  5130  			break
  5131  		}
  5132  		v.reset(OpCopy)
  5133  		v.Type = x.Type
  5134  		v.AddArg(x)
  5135  		return true
  5136  	}
  5137  	// match: (MUL (MOVWconst [-1]) x)
  5138  	// cond:
  5139  	// result: (NEG x)
  5140  	for {
  5141  		_ = v.Args[1]
  5142  		v_0 := v.Args[0]
  5143  		if v_0.Op != OpMIPSMOVWconst {
  5144  			break
  5145  		}
  5146  		if v_0.AuxInt != -1 {
  5147  			break
  5148  		}
  5149  		x := v.Args[1]
  5150  		v.reset(OpMIPSNEG)
  5151  		v.AddArg(x)
  5152  		return true
  5153  	}
  5154  	// match: (MUL x (MOVWconst [-1]))
  5155  	// cond:
  5156  	// result: (NEG x)
  5157  	for {
  5158  		_ = v.Args[1]
  5159  		x := v.Args[0]
  5160  		v_1 := v.Args[1]
  5161  		if v_1.Op != OpMIPSMOVWconst {
  5162  			break
  5163  		}
  5164  		if v_1.AuxInt != -1 {
  5165  			break
  5166  		}
  5167  		v.reset(OpMIPSNEG)
  5168  		v.AddArg(x)
  5169  		return true
  5170  	}
  5171  	// match: (MUL (MOVWconst [c]) x)
  5172  	// cond: isPowerOfTwo(int64(uint32(c)))
  5173  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  5174  	for {
  5175  		_ = v.Args[1]
  5176  		v_0 := v.Args[0]
  5177  		if v_0.Op != OpMIPSMOVWconst {
  5178  			break
  5179  		}
  5180  		c := v_0.AuxInt
  5181  		x := v.Args[1]
  5182  		if !(isPowerOfTwo(int64(uint32(c)))) {
  5183  			break
  5184  		}
  5185  		v.reset(OpMIPSSLLconst)
  5186  		v.AuxInt = log2(int64(uint32(c)))
  5187  		v.AddArg(x)
  5188  		return true
  5189  	}
  5190  	// match: (MUL x (MOVWconst [c]))
  5191  	// cond: isPowerOfTwo(int64(uint32(c)))
  5192  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  5193  	for {
  5194  		_ = v.Args[1]
  5195  		x := v.Args[0]
  5196  		v_1 := v.Args[1]
  5197  		if v_1.Op != OpMIPSMOVWconst {
  5198  			break
  5199  		}
  5200  		c := v_1.AuxInt
  5201  		if !(isPowerOfTwo(int64(uint32(c)))) {
  5202  			break
  5203  		}
  5204  		v.reset(OpMIPSSLLconst)
  5205  		v.AuxInt = log2(int64(uint32(c)))
  5206  		v.AddArg(x)
  5207  		return true
  5208  	}
  5209  	// match: (MUL (MOVWconst [c]) (MOVWconst [d]))
  5210  	// cond:
  5211  	// result: (MOVWconst [int64(int32(c)*int32(d))])
  5212  	for {
  5213  		_ = v.Args[1]
  5214  		v_0 := v.Args[0]
  5215  		if v_0.Op != OpMIPSMOVWconst {
  5216  			break
  5217  		}
  5218  		c := v_0.AuxInt
  5219  		v_1 := v.Args[1]
  5220  		if v_1.Op != OpMIPSMOVWconst {
  5221  			break
  5222  		}
  5223  		d := v_1.AuxInt
  5224  		v.reset(OpMIPSMOVWconst)
  5225  		v.AuxInt = int64(int32(c) * int32(d))
  5226  		return true
  5227  	}
  5228  	// match: (MUL (MOVWconst [d]) (MOVWconst [c]))
  5229  	// cond:
  5230  	// result: (MOVWconst [int64(int32(c)*int32(d))])
  5231  	for {
  5232  		_ = v.Args[1]
  5233  		v_0 := v.Args[0]
  5234  		if v_0.Op != OpMIPSMOVWconst {
  5235  			break
  5236  		}
  5237  		d := v_0.AuxInt
  5238  		v_1 := v.Args[1]
  5239  		if v_1.Op != OpMIPSMOVWconst {
  5240  			break
  5241  		}
  5242  		c := v_1.AuxInt
  5243  		v.reset(OpMIPSMOVWconst)
  5244  		v.AuxInt = int64(int32(c) * int32(d))
  5245  		return true
  5246  	}
  5247  	return false
  5248  }
  5249  func rewriteValueMIPS_OpMIPSNEG_0(v *Value) bool {
  5250  	// match: (NEG (MOVWconst [c]))
  5251  	// cond:
  5252  	// result: (MOVWconst [int64(int32(-c))])
  5253  	for {
  5254  		v_0 := v.Args[0]
  5255  		if v_0.Op != OpMIPSMOVWconst {
  5256  			break
  5257  		}
  5258  		c := v_0.AuxInt
  5259  		v.reset(OpMIPSMOVWconst)
  5260  		v.AuxInt = int64(int32(-c))
  5261  		return true
  5262  	}
  5263  	return false
  5264  }
  5265  func rewriteValueMIPS_OpMIPSNOR_0(v *Value) bool {
  5266  	// match: (NOR x (MOVWconst [c]))
  5267  	// cond:
  5268  	// result: (NORconst [c] x)
  5269  	for {
  5270  		_ = v.Args[1]
  5271  		x := v.Args[0]
  5272  		v_1 := v.Args[1]
  5273  		if v_1.Op != OpMIPSMOVWconst {
  5274  			break
  5275  		}
  5276  		c := v_1.AuxInt
  5277  		v.reset(OpMIPSNORconst)
  5278  		v.AuxInt = c
  5279  		v.AddArg(x)
  5280  		return true
  5281  	}
  5282  	// match: (NOR (MOVWconst [c]) x)
  5283  	// cond:
  5284  	// result: (NORconst [c] x)
  5285  	for {
  5286  		_ = v.Args[1]
  5287  		v_0 := v.Args[0]
  5288  		if v_0.Op != OpMIPSMOVWconst {
  5289  			break
  5290  		}
  5291  		c := v_0.AuxInt
  5292  		x := v.Args[1]
  5293  		v.reset(OpMIPSNORconst)
  5294  		v.AuxInt = c
  5295  		v.AddArg(x)
  5296  		return true
  5297  	}
  5298  	return false
  5299  }
  5300  func rewriteValueMIPS_OpMIPSNORconst_0(v *Value) bool {
  5301  	// match: (NORconst [c] (MOVWconst [d]))
  5302  	// cond:
  5303  	// result: (MOVWconst [^(c|d)])
  5304  	for {
  5305  		c := v.AuxInt
  5306  		v_0 := v.Args[0]
  5307  		if v_0.Op != OpMIPSMOVWconst {
  5308  			break
  5309  		}
  5310  		d := v_0.AuxInt
  5311  		v.reset(OpMIPSMOVWconst)
  5312  		v.AuxInt = ^(c | d)
  5313  		return true
  5314  	}
  5315  	return false
  5316  }
  5317  func rewriteValueMIPS_OpMIPSOR_0(v *Value) bool {
  5318  	b := v.Block
  5319  	_ = b
  5320  	// match: (OR x (MOVWconst [c]))
  5321  	// cond:
  5322  	// result: (ORconst [c] x)
  5323  	for {
  5324  		_ = v.Args[1]
  5325  		x := v.Args[0]
  5326  		v_1 := v.Args[1]
  5327  		if v_1.Op != OpMIPSMOVWconst {
  5328  			break
  5329  		}
  5330  		c := v_1.AuxInt
  5331  		v.reset(OpMIPSORconst)
  5332  		v.AuxInt = c
  5333  		v.AddArg(x)
  5334  		return true
  5335  	}
  5336  	// match: (OR (MOVWconst [c]) x)
  5337  	// cond:
  5338  	// result: (ORconst [c] x)
  5339  	for {
  5340  		_ = v.Args[1]
  5341  		v_0 := v.Args[0]
  5342  		if v_0.Op != OpMIPSMOVWconst {
  5343  			break
  5344  		}
  5345  		c := v_0.AuxInt
  5346  		x := v.Args[1]
  5347  		v.reset(OpMIPSORconst)
  5348  		v.AuxInt = c
  5349  		v.AddArg(x)
  5350  		return true
  5351  	}
  5352  	// match: (OR x x)
  5353  	// cond:
  5354  	// result: x
  5355  	for {
  5356  		_ = v.Args[1]
  5357  		x := v.Args[0]
  5358  		if x != v.Args[1] {
  5359  			break
  5360  		}
  5361  		v.reset(OpCopy)
  5362  		v.Type = x.Type
  5363  		v.AddArg(x)
  5364  		return true
  5365  	}
  5366  	// match: (OR (SGTUzero x) (SGTUzero y))
  5367  	// cond:
  5368  	// result: (SGTUzero (OR <x.Type> x y))
  5369  	for {
  5370  		_ = v.Args[1]
  5371  		v_0 := v.Args[0]
  5372  		if v_0.Op != OpMIPSSGTUzero {
  5373  			break
  5374  		}
  5375  		x := v_0.Args[0]
  5376  		v_1 := v.Args[1]
  5377  		if v_1.Op != OpMIPSSGTUzero {
  5378  			break
  5379  		}
  5380  		y := v_1.Args[0]
  5381  		v.reset(OpMIPSSGTUzero)
  5382  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5383  		v0.AddArg(x)
  5384  		v0.AddArg(y)
  5385  		v.AddArg(v0)
  5386  		return true
  5387  	}
  5388  	// match: (OR (SGTUzero y) (SGTUzero x))
  5389  	// cond:
  5390  	// result: (SGTUzero (OR <x.Type> x y))
  5391  	for {
  5392  		_ = v.Args[1]
  5393  		v_0 := v.Args[0]
  5394  		if v_0.Op != OpMIPSSGTUzero {
  5395  			break
  5396  		}
  5397  		y := v_0.Args[0]
  5398  		v_1 := v.Args[1]
  5399  		if v_1.Op != OpMIPSSGTUzero {
  5400  			break
  5401  		}
  5402  		x := v_1.Args[0]
  5403  		v.reset(OpMIPSSGTUzero)
  5404  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5405  		v0.AddArg(x)
  5406  		v0.AddArg(y)
  5407  		v.AddArg(v0)
  5408  		return true
  5409  	}
  5410  	return false
  5411  }
  5412  func rewriteValueMIPS_OpMIPSORconst_0(v *Value) bool {
  5413  	// match: (ORconst [0] x)
  5414  	// cond:
  5415  	// result: x
  5416  	for {
  5417  		if v.AuxInt != 0 {
  5418  			break
  5419  		}
  5420  		x := v.Args[0]
  5421  		v.reset(OpCopy)
  5422  		v.Type = x.Type
  5423  		v.AddArg(x)
  5424  		return true
  5425  	}
  5426  	// match: (ORconst [-1] _)
  5427  	// cond:
  5428  	// result: (MOVWconst [-1])
  5429  	for {
  5430  		if v.AuxInt != -1 {
  5431  			break
  5432  		}
  5433  		v.reset(OpMIPSMOVWconst)
  5434  		v.AuxInt = -1
  5435  		return true
  5436  	}
  5437  	// match: (ORconst [c] (MOVWconst [d]))
  5438  	// cond:
  5439  	// result: (MOVWconst [c|d])
  5440  	for {
  5441  		c := v.AuxInt
  5442  		v_0 := v.Args[0]
  5443  		if v_0.Op != OpMIPSMOVWconst {
  5444  			break
  5445  		}
  5446  		d := v_0.AuxInt
  5447  		v.reset(OpMIPSMOVWconst)
  5448  		v.AuxInt = c | d
  5449  		return true
  5450  	}
  5451  	// match: (ORconst [c] (ORconst [d] x))
  5452  	// cond:
  5453  	// result: (ORconst [c|d] x)
  5454  	for {
  5455  		c := v.AuxInt
  5456  		v_0 := v.Args[0]
  5457  		if v_0.Op != OpMIPSORconst {
  5458  			break
  5459  		}
  5460  		d := v_0.AuxInt
  5461  		x := v_0.Args[0]
  5462  		v.reset(OpMIPSORconst)
  5463  		v.AuxInt = c | d
  5464  		v.AddArg(x)
  5465  		return true
  5466  	}
  5467  	return false
  5468  }
  5469  func rewriteValueMIPS_OpMIPSSGT_0(v *Value) bool {
  5470  	// match: (SGT (MOVWconst [c]) x)
  5471  	// cond:
  5472  	// result: (SGTconst [c] x)
  5473  	for {
  5474  		_ = v.Args[1]
  5475  		v_0 := v.Args[0]
  5476  		if v_0.Op != OpMIPSMOVWconst {
  5477  			break
  5478  		}
  5479  		c := v_0.AuxInt
  5480  		x := v.Args[1]
  5481  		v.reset(OpMIPSSGTconst)
  5482  		v.AuxInt = c
  5483  		v.AddArg(x)
  5484  		return true
  5485  	}
  5486  	// match: (SGT x (MOVWconst [0]))
  5487  	// cond:
  5488  	// result: (SGTzero x)
  5489  	for {
  5490  		_ = v.Args[1]
  5491  		x := v.Args[0]
  5492  		v_1 := v.Args[1]
  5493  		if v_1.Op != OpMIPSMOVWconst {
  5494  			break
  5495  		}
  5496  		if v_1.AuxInt != 0 {
  5497  			break
  5498  		}
  5499  		v.reset(OpMIPSSGTzero)
  5500  		v.AddArg(x)
  5501  		return true
  5502  	}
  5503  	return false
  5504  }
  5505  func rewriteValueMIPS_OpMIPSSGTU_0(v *Value) bool {
  5506  	// match: (SGTU (MOVWconst [c]) x)
  5507  	// cond:
  5508  	// result: (SGTUconst [c] x)
  5509  	for {
  5510  		_ = v.Args[1]
  5511  		v_0 := v.Args[0]
  5512  		if v_0.Op != OpMIPSMOVWconst {
  5513  			break
  5514  		}
  5515  		c := v_0.AuxInt
  5516  		x := v.Args[1]
  5517  		v.reset(OpMIPSSGTUconst)
  5518  		v.AuxInt = c
  5519  		v.AddArg(x)
  5520  		return true
  5521  	}
  5522  	// match: (SGTU x (MOVWconst [0]))
  5523  	// cond:
  5524  	// result: (SGTUzero x)
  5525  	for {
  5526  		_ = v.Args[1]
  5527  		x := v.Args[0]
  5528  		v_1 := v.Args[1]
  5529  		if v_1.Op != OpMIPSMOVWconst {
  5530  			break
  5531  		}
  5532  		if v_1.AuxInt != 0 {
  5533  			break
  5534  		}
  5535  		v.reset(OpMIPSSGTUzero)
  5536  		v.AddArg(x)
  5537  		return true
  5538  	}
  5539  	return false
  5540  }
  5541  func rewriteValueMIPS_OpMIPSSGTUconst_0(v *Value) bool {
  5542  	// match: (SGTUconst [c] (MOVWconst [d]))
  5543  	// cond: uint32(c)>uint32(d)
  5544  	// result: (MOVWconst [1])
  5545  	for {
  5546  		c := v.AuxInt
  5547  		v_0 := v.Args[0]
  5548  		if v_0.Op != OpMIPSMOVWconst {
  5549  			break
  5550  		}
  5551  		d := v_0.AuxInt
  5552  		if !(uint32(c) > uint32(d)) {
  5553  			break
  5554  		}
  5555  		v.reset(OpMIPSMOVWconst)
  5556  		v.AuxInt = 1
  5557  		return true
  5558  	}
  5559  	// match: (SGTUconst [c] (MOVWconst [d]))
  5560  	// cond: uint32(c)<=uint32(d)
  5561  	// result: (MOVWconst [0])
  5562  	for {
  5563  		c := v.AuxInt
  5564  		v_0 := v.Args[0]
  5565  		if v_0.Op != OpMIPSMOVWconst {
  5566  			break
  5567  		}
  5568  		d := v_0.AuxInt
  5569  		if !(uint32(c) <= uint32(d)) {
  5570  			break
  5571  		}
  5572  		v.reset(OpMIPSMOVWconst)
  5573  		v.AuxInt = 0
  5574  		return true
  5575  	}
  5576  	// match: (SGTUconst [c] (MOVBUreg _))
  5577  	// cond: 0xff < uint32(c)
  5578  	// result: (MOVWconst [1])
  5579  	for {
  5580  		c := v.AuxInt
  5581  		v_0 := v.Args[0]
  5582  		if v_0.Op != OpMIPSMOVBUreg {
  5583  			break
  5584  		}
  5585  		if !(0xff < uint32(c)) {
  5586  			break
  5587  		}
  5588  		v.reset(OpMIPSMOVWconst)
  5589  		v.AuxInt = 1
  5590  		return true
  5591  	}
  5592  	// match: (SGTUconst [c] (MOVHUreg _))
  5593  	// cond: 0xffff < uint32(c)
  5594  	// result: (MOVWconst [1])
  5595  	for {
  5596  		c := v.AuxInt
  5597  		v_0 := v.Args[0]
  5598  		if v_0.Op != OpMIPSMOVHUreg {
  5599  			break
  5600  		}
  5601  		if !(0xffff < uint32(c)) {
  5602  			break
  5603  		}
  5604  		v.reset(OpMIPSMOVWconst)
  5605  		v.AuxInt = 1
  5606  		return true
  5607  	}
  5608  	// match: (SGTUconst [c] (ANDconst [m] _))
  5609  	// cond: uint32(m) < uint32(c)
  5610  	// result: (MOVWconst [1])
  5611  	for {
  5612  		c := v.AuxInt
  5613  		v_0 := v.Args[0]
  5614  		if v_0.Op != OpMIPSANDconst {
  5615  			break
  5616  		}
  5617  		m := v_0.AuxInt
  5618  		if !(uint32(m) < uint32(c)) {
  5619  			break
  5620  		}
  5621  		v.reset(OpMIPSMOVWconst)
  5622  		v.AuxInt = 1
  5623  		return true
  5624  	}
  5625  	// match: (SGTUconst [c] (SRLconst _ [d]))
  5626  	// cond: uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)
  5627  	// result: (MOVWconst [1])
  5628  	for {
  5629  		c := v.AuxInt
  5630  		v_0 := v.Args[0]
  5631  		if v_0.Op != OpMIPSSRLconst {
  5632  			break
  5633  		}
  5634  		d := v_0.AuxInt
  5635  		if !(uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)) {
  5636  			break
  5637  		}
  5638  		v.reset(OpMIPSMOVWconst)
  5639  		v.AuxInt = 1
  5640  		return true
  5641  	}
  5642  	return false
  5643  }
  5644  func rewriteValueMIPS_OpMIPSSGTUzero_0(v *Value) bool {
  5645  	// match: (SGTUzero (MOVWconst [d]))
  5646  	// cond: uint32(d) != 0
  5647  	// result: (MOVWconst [1])
  5648  	for {
  5649  		v_0 := v.Args[0]
  5650  		if v_0.Op != OpMIPSMOVWconst {
  5651  			break
  5652  		}
  5653  		d := v_0.AuxInt
  5654  		if !(uint32(d) != 0) {
  5655  			break
  5656  		}
  5657  		v.reset(OpMIPSMOVWconst)
  5658  		v.AuxInt = 1
  5659  		return true
  5660  	}
  5661  	// match: (SGTUzero (MOVWconst [d]))
  5662  	// cond: uint32(d) == 0
  5663  	// result: (MOVWconst [0])
  5664  	for {
  5665  		v_0 := v.Args[0]
  5666  		if v_0.Op != OpMIPSMOVWconst {
  5667  			break
  5668  		}
  5669  		d := v_0.AuxInt
  5670  		if !(uint32(d) == 0) {
  5671  			break
  5672  		}
  5673  		v.reset(OpMIPSMOVWconst)
  5674  		v.AuxInt = 0
  5675  		return true
  5676  	}
  5677  	return false
  5678  }
  5679  func rewriteValueMIPS_OpMIPSSGTconst_0(v *Value) bool {
  5680  	// match: (SGTconst [c] (MOVWconst [d]))
  5681  	// cond: int32(c) > int32(d)
  5682  	// result: (MOVWconst [1])
  5683  	for {
  5684  		c := v.AuxInt
  5685  		v_0 := v.Args[0]
  5686  		if v_0.Op != OpMIPSMOVWconst {
  5687  			break
  5688  		}
  5689  		d := v_0.AuxInt
  5690  		if !(int32(c) > int32(d)) {
  5691  			break
  5692  		}
  5693  		v.reset(OpMIPSMOVWconst)
  5694  		v.AuxInt = 1
  5695  		return true
  5696  	}
  5697  	// match: (SGTconst [c] (MOVWconst [d]))
  5698  	// cond: int32(c) <= int32(d)
  5699  	// result: (MOVWconst [0])
  5700  	for {
  5701  		c := v.AuxInt
  5702  		v_0 := v.Args[0]
  5703  		if v_0.Op != OpMIPSMOVWconst {
  5704  			break
  5705  		}
  5706  		d := v_0.AuxInt
  5707  		if !(int32(c) <= int32(d)) {
  5708  			break
  5709  		}
  5710  		v.reset(OpMIPSMOVWconst)
  5711  		v.AuxInt = 0
  5712  		return true
  5713  	}
  5714  	// match: (SGTconst [c] (MOVBreg _))
  5715  	// cond: 0x7f < int32(c)
  5716  	// result: (MOVWconst [1])
  5717  	for {
  5718  		c := v.AuxInt
  5719  		v_0 := v.Args[0]
  5720  		if v_0.Op != OpMIPSMOVBreg {
  5721  			break
  5722  		}
  5723  		if !(0x7f < int32(c)) {
  5724  			break
  5725  		}
  5726  		v.reset(OpMIPSMOVWconst)
  5727  		v.AuxInt = 1
  5728  		return true
  5729  	}
  5730  	// match: (SGTconst [c] (MOVBreg _))
  5731  	// cond: int32(c) <= -0x80
  5732  	// result: (MOVWconst [0])
  5733  	for {
  5734  		c := v.AuxInt
  5735  		v_0 := v.Args[0]
  5736  		if v_0.Op != OpMIPSMOVBreg {
  5737  			break
  5738  		}
  5739  		if !(int32(c) <= -0x80) {
  5740  			break
  5741  		}
  5742  		v.reset(OpMIPSMOVWconst)
  5743  		v.AuxInt = 0
  5744  		return true
  5745  	}
  5746  	// match: (SGTconst [c] (MOVBUreg _))
  5747  	// cond: 0xff < int32(c)
  5748  	// result: (MOVWconst [1])
  5749  	for {
  5750  		c := v.AuxInt
  5751  		v_0 := v.Args[0]
  5752  		if v_0.Op != OpMIPSMOVBUreg {
  5753  			break
  5754  		}
  5755  		if !(0xff < int32(c)) {
  5756  			break
  5757  		}
  5758  		v.reset(OpMIPSMOVWconst)
  5759  		v.AuxInt = 1
  5760  		return true
  5761  	}
  5762  	// match: (SGTconst [c] (MOVBUreg _))
  5763  	// cond: int32(c) < 0
  5764  	// result: (MOVWconst [0])
  5765  	for {
  5766  		c := v.AuxInt
  5767  		v_0 := v.Args[0]
  5768  		if v_0.Op != OpMIPSMOVBUreg {
  5769  			break
  5770  		}
  5771  		if !(int32(c) < 0) {
  5772  			break
  5773  		}
  5774  		v.reset(OpMIPSMOVWconst)
  5775  		v.AuxInt = 0
  5776  		return true
  5777  	}
  5778  	// match: (SGTconst [c] (MOVHreg _))
  5779  	// cond: 0x7fff < int32(c)
  5780  	// result: (MOVWconst [1])
  5781  	for {
  5782  		c := v.AuxInt
  5783  		v_0 := v.Args[0]
  5784  		if v_0.Op != OpMIPSMOVHreg {
  5785  			break
  5786  		}
  5787  		if !(0x7fff < int32(c)) {
  5788  			break
  5789  		}
  5790  		v.reset(OpMIPSMOVWconst)
  5791  		v.AuxInt = 1
  5792  		return true
  5793  	}
  5794  	// match: (SGTconst [c] (MOVHreg _))
  5795  	// cond: int32(c) <= -0x8000
  5796  	// result: (MOVWconst [0])
  5797  	for {
  5798  		c := v.AuxInt
  5799  		v_0 := v.Args[0]
  5800  		if v_0.Op != OpMIPSMOVHreg {
  5801  			break
  5802  		}
  5803  		if !(int32(c) <= -0x8000) {
  5804  			break
  5805  		}
  5806  		v.reset(OpMIPSMOVWconst)
  5807  		v.AuxInt = 0
  5808  		return true
  5809  	}
  5810  	// match: (SGTconst [c] (MOVHUreg _))
  5811  	// cond: 0xffff < int32(c)
  5812  	// result: (MOVWconst [1])
  5813  	for {
  5814  		c := v.AuxInt
  5815  		v_0 := v.Args[0]
  5816  		if v_0.Op != OpMIPSMOVHUreg {
  5817  			break
  5818  		}
  5819  		if !(0xffff < int32(c)) {
  5820  			break
  5821  		}
  5822  		v.reset(OpMIPSMOVWconst)
  5823  		v.AuxInt = 1
  5824  		return true
  5825  	}
  5826  	// match: (SGTconst [c] (MOVHUreg _))
  5827  	// cond: int32(c) < 0
  5828  	// result: (MOVWconst [0])
  5829  	for {
  5830  		c := v.AuxInt
  5831  		v_0 := v.Args[0]
  5832  		if v_0.Op != OpMIPSMOVHUreg {
  5833  			break
  5834  		}
  5835  		if !(int32(c) < 0) {
  5836  			break
  5837  		}
  5838  		v.reset(OpMIPSMOVWconst)
  5839  		v.AuxInt = 0
  5840  		return true
  5841  	}
  5842  	return false
  5843  }
  5844  func rewriteValueMIPS_OpMIPSSGTconst_10(v *Value) bool {
  5845  	// match: (SGTconst [c] (ANDconst [m] _))
  5846  	// cond: 0 <= int32(m) && int32(m) < int32(c)
  5847  	// result: (MOVWconst [1])
  5848  	for {
  5849  		c := v.AuxInt
  5850  		v_0 := v.Args[0]
  5851  		if v_0.Op != OpMIPSANDconst {
  5852  			break
  5853  		}
  5854  		m := v_0.AuxInt
  5855  		if !(0 <= int32(m) && int32(m) < int32(c)) {
  5856  			break
  5857  		}
  5858  		v.reset(OpMIPSMOVWconst)
  5859  		v.AuxInt = 1
  5860  		return true
  5861  	}
  5862  	// match: (SGTconst [c] (SRLconst _ [d]))
  5863  	// cond: 0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)
  5864  	// result: (MOVWconst [1])
  5865  	for {
  5866  		c := v.AuxInt
  5867  		v_0 := v.Args[0]
  5868  		if v_0.Op != OpMIPSSRLconst {
  5869  			break
  5870  		}
  5871  		d := v_0.AuxInt
  5872  		if !(0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)) {
  5873  			break
  5874  		}
  5875  		v.reset(OpMIPSMOVWconst)
  5876  		v.AuxInt = 1
  5877  		return true
  5878  	}
  5879  	return false
  5880  }
  5881  func rewriteValueMIPS_OpMIPSSGTzero_0(v *Value) bool {
  5882  	// match: (SGTzero (MOVWconst [d]))
  5883  	// cond: int32(d) > 0
  5884  	// result: (MOVWconst [1])
  5885  	for {
  5886  		v_0 := v.Args[0]
  5887  		if v_0.Op != OpMIPSMOVWconst {
  5888  			break
  5889  		}
  5890  		d := v_0.AuxInt
  5891  		if !(int32(d) > 0) {
  5892  			break
  5893  		}
  5894  		v.reset(OpMIPSMOVWconst)
  5895  		v.AuxInt = 1
  5896  		return true
  5897  	}
  5898  	// match: (SGTzero (MOVWconst [d]))
  5899  	// cond: int32(d) <= 0
  5900  	// result: (MOVWconst [0])
  5901  	for {
  5902  		v_0 := v.Args[0]
  5903  		if v_0.Op != OpMIPSMOVWconst {
  5904  			break
  5905  		}
  5906  		d := v_0.AuxInt
  5907  		if !(int32(d) <= 0) {
  5908  			break
  5909  		}
  5910  		v.reset(OpMIPSMOVWconst)
  5911  		v.AuxInt = 0
  5912  		return true
  5913  	}
  5914  	return false
  5915  }
  5916  func rewriteValueMIPS_OpMIPSSLL_0(v *Value) bool {
  5917  	// match: (SLL _ (MOVWconst [c]))
  5918  	// cond: uint32(c)>=32
  5919  	// result: (MOVWconst [0])
  5920  	for {
  5921  		_ = v.Args[1]
  5922  		v_1 := v.Args[1]
  5923  		if v_1.Op != OpMIPSMOVWconst {
  5924  			break
  5925  		}
  5926  		c := v_1.AuxInt
  5927  		if !(uint32(c) >= 32) {
  5928  			break
  5929  		}
  5930  		v.reset(OpMIPSMOVWconst)
  5931  		v.AuxInt = 0
  5932  		return true
  5933  	}
  5934  	// match: (SLL x (MOVWconst [c]))
  5935  	// cond:
  5936  	// result: (SLLconst x [c])
  5937  	for {
  5938  		_ = v.Args[1]
  5939  		x := v.Args[0]
  5940  		v_1 := v.Args[1]
  5941  		if v_1.Op != OpMIPSMOVWconst {
  5942  			break
  5943  		}
  5944  		c := v_1.AuxInt
  5945  		v.reset(OpMIPSSLLconst)
  5946  		v.AuxInt = c
  5947  		v.AddArg(x)
  5948  		return true
  5949  	}
  5950  	return false
  5951  }
  5952  func rewriteValueMIPS_OpMIPSSLLconst_0(v *Value) bool {
  5953  	// match: (SLLconst [c] (MOVWconst [d]))
  5954  	// cond:
  5955  	// result: (MOVWconst [int64(int32(uint32(d)<<uint32(c)))])
  5956  	for {
  5957  		c := v.AuxInt
  5958  		v_0 := v.Args[0]
  5959  		if v_0.Op != OpMIPSMOVWconst {
  5960  			break
  5961  		}
  5962  		d := v_0.AuxInt
  5963  		v.reset(OpMIPSMOVWconst)
  5964  		v.AuxInt = int64(int32(uint32(d) << uint32(c)))
  5965  		return true
  5966  	}
  5967  	return false
  5968  }
  5969  func rewriteValueMIPS_OpMIPSSRA_0(v *Value) bool {
  5970  	// match: (SRA x (MOVWconst [c]))
  5971  	// cond: uint32(c)>=32
  5972  	// result: (SRAconst x [31])
  5973  	for {
  5974  		_ = v.Args[1]
  5975  		x := v.Args[0]
  5976  		v_1 := v.Args[1]
  5977  		if v_1.Op != OpMIPSMOVWconst {
  5978  			break
  5979  		}
  5980  		c := v_1.AuxInt
  5981  		if !(uint32(c) >= 32) {
  5982  			break
  5983  		}
  5984  		v.reset(OpMIPSSRAconst)
  5985  		v.AuxInt = 31
  5986  		v.AddArg(x)
  5987  		return true
  5988  	}
  5989  	// match: (SRA x (MOVWconst [c]))
  5990  	// cond:
  5991  	// result: (SRAconst x [c])
  5992  	for {
  5993  		_ = v.Args[1]
  5994  		x := v.Args[0]
  5995  		v_1 := v.Args[1]
  5996  		if v_1.Op != OpMIPSMOVWconst {
  5997  			break
  5998  		}
  5999  		c := v_1.AuxInt
  6000  		v.reset(OpMIPSSRAconst)
  6001  		v.AuxInt = c
  6002  		v.AddArg(x)
  6003  		return true
  6004  	}
  6005  	return false
  6006  }
  6007  func rewriteValueMIPS_OpMIPSSRAconst_0(v *Value) bool {
  6008  	// match: (SRAconst [c] (MOVWconst [d]))
  6009  	// cond:
  6010  	// result: (MOVWconst [int64(int32(d)>>uint32(c))])
  6011  	for {
  6012  		c := v.AuxInt
  6013  		v_0 := v.Args[0]
  6014  		if v_0.Op != OpMIPSMOVWconst {
  6015  			break
  6016  		}
  6017  		d := v_0.AuxInt
  6018  		v.reset(OpMIPSMOVWconst)
  6019  		v.AuxInt = int64(int32(d) >> uint32(c))
  6020  		return true
  6021  	}
  6022  	return false
  6023  }
  6024  func rewriteValueMIPS_OpMIPSSRL_0(v *Value) bool {
  6025  	// match: (SRL _ (MOVWconst [c]))
  6026  	// cond: uint32(c)>=32
  6027  	// result: (MOVWconst [0])
  6028  	for {
  6029  		_ = v.Args[1]
  6030  		v_1 := v.Args[1]
  6031  		if v_1.Op != OpMIPSMOVWconst {
  6032  			break
  6033  		}
  6034  		c := v_1.AuxInt
  6035  		if !(uint32(c) >= 32) {
  6036  			break
  6037  		}
  6038  		v.reset(OpMIPSMOVWconst)
  6039  		v.AuxInt = 0
  6040  		return true
  6041  	}
  6042  	// match: (SRL x (MOVWconst [c]))
  6043  	// cond:
  6044  	// result: (SRLconst x [c])
  6045  	for {
  6046  		_ = v.Args[1]
  6047  		x := v.Args[0]
  6048  		v_1 := v.Args[1]
  6049  		if v_1.Op != OpMIPSMOVWconst {
  6050  			break
  6051  		}
  6052  		c := v_1.AuxInt
  6053  		v.reset(OpMIPSSRLconst)
  6054  		v.AuxInt = c
  6055  		v.AddArg(x)
  6056  		return true
  6057  	}
  6058  	return false
  6059  }
  6060  func rewriteValueMIPS_OpMIPSSRLconst_0(v *Value) bool {
  6061  	// match: (SRLconst [c] (MOVWconst [d]))
  6062  	// cond:
  6063  	// result: (MOVWconst [int64(uint32(d)>>uint32(c))])
  6064  	for {
  6065  		c := v.AuxInt
  6066  		v_0 := v.Args[0]
  6067  		if v_0.Op != OpMIPSMOVWconst {
  6068  			break
  6069  		}
  6070  		d := v_0.AuxInt
  6071  		v.reset(OpMIPSMOVWconst)
  6072  		v.AuxInt = int64(uint32(d) >> uint32(c))
  6073  		return true
  6074  	}
  6075  	return false
  6076  }
  6077  func rewriteValueMIPS_OpMIPSSUB_0(v *Value) bool {
  6078  	// match: (SUB x (MOVWconst [c]))
  6079  	// cond:
  6080  	// result: (SUBconst [c] x)
  6081  	for {
  6082  		_ = v.Args[1]
  6083  		x := v.Args[0]
  6084  		v_1 := v.Args[1]
  6085  		if v_1.Op != OpMIPSMOVWconst {
  6086  			break
  6087  		}
  6088  		c := v_1.AuxInt
  6089  		v.reset(OpMIPSSUBconst)
  6090  		v.AuxInt = c
  6091  		v.AddArg(x)
  6092  		return true
  6093  	}
  6094  	// match: (SUB x x)
  6095  	// cond:
  6096  	// result: (MOVWconst [0])
  6097  	for {
  6098  		_ = v.Args[1]
  6099  		x := v.Args[0]
  6100  		if x != v.Args[1] {
  6101  			break
  6102  		}
  6103  		v.reset(OpMIPSMOVWconst)
  6104  		v.AuxInt = 0
  6105  		return true
  6106  	}
  6107  	// match: (SUB (MOVWconst [0]) x)
  6108  	// cond:
  6109  	// result: (NEG x)
  6110  	for {
  6111  		_ = v.Args[1]
  6112  		v_0 := v.Args[0]
  6113  		if v_0.Op != OpMIPSMOVWconst {
  6114  			break
  6115  		}
  6116  		if v_0.AuxInt != 0 {
  6117  			break
  6118  		}
  6119  		x := v.Args[1]
  6120  		v.reset(OpMIPSNEG)
  6121  		v.AddArg(x)
  6122  		return true
  6123  	}
  6124  	return false
  6125  }
  6126  func rewriteValueMIPS_OpMIPSSUBconst_0(v *Value) bool {
  6127  	// match: (SUBconst [0] x)
  6128  	// cond:
  6129  	// result: x
  6130  	for {
  6131  		if v.AuxInt != 0 {
  6132  			break
  6133  		}
  6134  		x := v.Args[0]
  6135  		v.reset(OpCopy)
  6136  		v.Type = x.Type
  6137  		v.AddArg(x)
  6138  		return true
  6139  	}
  6140  	// match: (SUBconst [c] (MOVWconst [d]))
  6141  	// cond:
  6142  	// result: (MOVWconst [int64(int32(d-c))])
  6143  	for {
  6144  		c := v.AuxInt
  6145  		v_0 := v.Args[0]
  6146  		if v_0.Op != OpMIPSMOVWconst {
  6147  			break
  6148  		}
  6149  		d := v_0.AuxInt
  6150  		v.reset(OpMIPSMOVWconst)
  6151  		v.AuxInt = int64(int32(d - c))
  6152  		return true
  6153  	}
  6154  	// match: (SUBconst [c] (SUBconst [d] x))
  6155  	// cond:
  6156  	// result: (ADDconst [int64(int32(-c-d))] x)
  6157  	for {
  6158  		c := v.AuxInt
  6159  		v_0 := v.Args[0]
  6160  		if v_0.Op != OpMIPSSUBconst {
  6161  			break
  6162  		}
  6163  		d := v_0.AuxInt
  6164  		x := v_0.Args[0]
  6165  		v.reset(OpMIPSADDconst)
  6166  		v.AuxInt = int64(int32(-c - d))
  6167  		v.AddArg(x)
  6168  		return true
  6169  	}
  6170  	// match: (SUBconst [c] (ADDconst [d] x))
  6171  	// cond:
  6172  	// result: (ADDconst [int64(int32(-c+d))] x)
  6173  	for {
  6174  		c := v.AuxInt
  6175  		v_0 := v.Args[0]
  6176  		if v_0.Op != OpMIPSADDconst {
  6177  			break
  6178  		}
  6179  		d := v_0.AuxInt
  6180  		x := v_0.Args[0]
  6181  		v.reset(OpMIPSADDconst)
  6182  		v.AuxInt = int64(int32(-c + d))
  6183  		v.AddArg(x)
  6184  		return true
  6185  	}
  6186  	return false
  6187  }
  6188  func rewriteValueMIPS_OpMIPSXOR_0(v *Value) bool {
  6189  	// match: (XOR x (MOVWconst [c]))
  6190  	// cond:
  6191  	// result: (XORconst [c] x)
  6192  	for {
  6193  		_ = v.Args[1]
  6194  		x := v.Args[0]
  6195  		v_1 := v.Args[1]
  6196  		if v_1.Op != OpMIPSMOVWconst {
  6197  			break
  6198  		}
  6199  		c := v_1.AuxInt
  6200  		v.reset(OpMIPSXORconst)
  6201  		v.AuxInt = c
  6202  		v.AddArg(x)
  6203  		return true
  6204  	}
  6205  	// match: (XOR (MOVWconst [c]) x)
  6206  	// cond:
  6207  	// result: (XORconst [c] x)
  6208  	for {
  6209  		_ = v.Args[1]
  6210  		v_0 := v.Args[0]
  6211  		if v_0.Op != OpMIPSMOVWconst {
  6212  			break
  6213  		}
  6214  		c := v_0.AuxInt
  6215  		x := v.Args[1]
  6216  		v.reset(OpMIPSXORconst)
  6217  		v.AuxInt = c
  6218  		v.AddArg(x)
  6219  		return true
  6220  	}
  6221  	// match: (XOR x x)
  6222  	// cond:
  6223  	// result: (MOVWconst [0])
  6224  	for {
  6225  		_ = v.Args[1]
  6226  		x := v.Args[0]
  6227  		if x != v.Args[1] {
  6228  			break
  6229  		}
  6230  		v.reset(OpMIPSMOVWconst)
  6231  		v.AuxInt = 0
  6232  		return true
  6233  	}
  6234  	return false
  6235  }
  6236  func rewriteValueMIPS_OpMIPSXORconst_0(v *Value) bool {
  6237  	// match: (XORconst [0] x)
  6238  	// cond:
  6239  	// result: x
  6240  	for {
  6241  		if v.AuxInt != 0 {
  6242  			break
  6243  		}
  6244  		x := v.Args[0]
  6245  		v.reset(OpCopy)
  6246  		v.Type = x.Type
  6247  		v.AddArg(x)
  6248  		return true
  6249  	}
  6250  	// match: (XORconst [-1] x)
  6251  	// cond:
  6252  	// result: (NORconst [0] x)
  6253  	for {
  6254  		if v.AuxInt != -1 {
  6255  			break
  6256  		}
  6257  		x := v.Args[0]
  6258  		v.reset(OpMIPSNORconst)
  6259  		v.AuxInt = 0
  6260  		v.AddArg(x)
  6261  		return true
  6262  	}
  6263  	// match: (XORconst [c] (MOVWconst [d]))
  6264  	// cond:
  6265  	// result: (MOVWconst [c^d])
  6266  	for {
  6267  		c := v.AuxInt
  6268  		v_0 := v.Args[0]
  6269  		if v_0.Op != OpMIPSMOVWconst {
  6270  			break
  6271  		}
  6272  		d := v_0.AuxInt
  6273  		v.reset(OpMIPSMOVWconst)
  6274  		v.AuxInt = c ^ d
  6275  		return true
  6276  	}
  6277  	// match: (XORconst [c] (XORconst [d] x))
  6278  	// cond:
  6279  	// result: (XORconst [c^d] x)
  6280  	for {
  6281  		c := v.AuxInt
  6282  		v_0 := v.Args[0]
  6283  		if v_0.Op != OpMIPSXORconst {
  6284  			break
  6285  		}
  6286  		d := v_0.AuxInt
  6287  		x := v_0.Args[0]
  6288  		v.reset(OpMIPSXORconst)
  6289  		v.AuxInt = c ^ d
  6290  		v.AddArg(x)
  6291  		return true
  6292  	}
  6293  	return false
  6294  }
  6295  func rewriteValueMIPS_OpMod16_0(v *Value) bool {
  6296  	b := v.Block
  6297  	_ = b
  6298  	typ := &b.Func.Config.Types
  6299  	_ = typ
  6300  	// match: (Mod16 x y)
  6301  	// cond:
  6302  	// result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  6303  	for {
  6304  		_ = v.Args[1]
  6305  		x := v.Args[0]
  6306  		y := v.Args[1]
  6307  		v.reset(OpSelect0)
  6308  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6309  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  6310  		v1.AddArg(x)
  6311  		v0.AddArg(v1)
  6312  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  6313  		v2.AddArg(y)
  6314  		v0.AddArg(v2)
  6315  		v.AddArg(v0)
  6316  		return true
  6317  	}
  6318  }
  6319  func rewriteValueMIPS_OpMod16u_0(v *Value) bool {
  6320  	b := v.Block
  6321  	_ = b
  6322  	typ := &b.Func.Config.Types
  6323  	_ = typ
  6324  	// match: (Mod16u x y)
  6325  	// cond:
  6326  	// result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  6327  	for {
  6328  		_ = v.Args[1]
  6329  		x := v.Args[0]
  6330  		y := v.Args[1]
  6331  		v.reset(OpSelect0)
  6332  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6333  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6334  		v1.AddArg(x)
  6335  		v0.AddArg(v1)
  6336  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6337  		v2.AddArg(y)
  6338  		v0.AddArg(v2)
  6339  		v.AddArg(v0)
  6340  		return true
  6341  	}
  6342  }
  6343  func rewriteValueMIPS_OpMod32_0(v *Value) bool {
  6344  	b := v.Block
  6345  	_ = b
  6346  	typ := &b.Func.Config.Types
  6347  	_ = typ
  6348  	// match: (Mod32 x y)
  6349  	// cond:
  6350  	// result: (Select0 (DIV x y))
  6351  	for {
  6352  		_ = v.Args[1]
  6353  		x := v.Args[0]
  6354  		y := v.Args[1]
  6355  		v.reset(OpSelect0)
  6356  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6357  		v0.AddArg(x)
  6358  		v0.AddArg(y)
  6359  		v.AddArg(v0)
  6360  		return true
  6361  	}
  6362  }
  6363  func rewriteValueMIPS_OpMod32u_0(v *Value) bool {
  6364  	b := v.Block
  6365  	_ = b
  6366  	typ := &b.Func.Config.Types
  6367  	_ = typ
  6368  	// match: (Mod32u x y)
  6369  	// cond:
  6370  	// result: (Select0 (DIVU x y))
  6371  	for {
  6372  		_ = v.Args[1]
  6373  		x := v.Args[0]
  6374  		y := v.Args[1]
  6375  		v.reset(OpSelect0)
  6376  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6377  		v0.AddArg(x)
  6378  		v0.AddArg(y)
  6379  		v.AddArg(v0)
  6380  		return true
  6381  	}
  6382  }
  6383  func rewriteValueMIPS_OpMod8_0(v *Value) bool {
  6384  	b := v.Block
  6385  	_ = b
  6386  	typ := &b.Func.Config.Types
  6387  	_ = typ
  6388  	// match: (Mod8 x y)
  6389  	// cond:
  6390  	// result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  6391  	for {
  6392  		_ = v.Args[1]
  6393  		x := v.Args[0]
  6394  		y := v.Args[1]
  6395  		v.reset(OpSelect0)
  6396  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6397  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  6398  		v1.AddArg(x)
  6399  		v0.AddArg(v1)
  6400  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  6401  		v2.AddArg(y)
  6402  		v0.AddArg(v2)
  6403  		v.AddArg(v0)
  6404  		return true
  6405  	}
  6406  }
  6407  func rewriteValueMIPS_OpMod8u_0(v *Value) bool {
  6408  	b := v.Block
  6409  	_ = b
  6410  	typ := &b.Func.Config.Types
  6411  	_ = typ
  6412  	// match: (Mod8u x y)
  6413  	// cond:
  6414  	// result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  6415  	for {
  6416  		_ = v.Args[1]
  6417  		x := v.Args[0]
  6418  		y := v.Args[1]
  6419  		v.reset(OpSelect0)
  6420  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6421  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  6422  		v1.AddArg(x)
  6423  		v0.AddArg(v1)
  6424  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  6425  		v2.AddArg(y)
  6426  		v0.AddArg(v2)
  6427  		v.AddArg(v0)
  6428  		return true
  6429  	}
  6430  }
  6431  func rewriteValueMIPS_OpMove_0(v *Value) bool {
  6432  	b := v.Block
  6433  	_ = b
  6434  	typ := &b.Func.Config.Types
  6435  	_ = typ
  6436  	// match: (Move [0] _ _ mem)
  6437  	// cond:
  6438  	// result: mem
  6439  	for {
  6440  		if v.AuxInt != 0 {
  6441  			break
  6442  		}
  6443  		_ = v.Args[2]
  6444  		mem := v.Args[2]
  6445  		v.reset(OpCopy)
  6446  		v.Type = mem.Type
  6447  		v.AddArg(mem)
  6448  		return true
  6449  	}
  6450  	// match: (Move [1] dst src mem)
  6451  	// cond:
  6452  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  6453  	for {
  6454  		if v.AuxInt != 1 {
  6455  			break
  6456  		}
  6457  		_ = v.Args[2]
  6458  		dst := v.Args[0]
  6459  		src := v.Args[1]
  6460  		mem := v.Args[2]
  6461  		v.reset(OpMIPSMOVBstore)
  6462  		v.AddArg(dst)
  6463  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6464  		v0.AddArg(src)
  6465  		v0.AddArg(mem)
  6466  		v.AddArg(v0)
  6467  		v.AddArg(mem)
  6468  		return true
  6469  	}
  6470  	// match: (Move [2] {t} dst src mem)
  6471  	// cond: t.(*types.Type).Alignment()%2 == 0
  6472  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  6473  	for {
  6474  		if v.AuxInt != 2 {
  6475  			break
  6476  		}
  6477  		t := v.Aux
  6478  		_ = v.Args[2]
  6479  		dst := v.Args[0]
  6480  		src := v.Args[1]
  6481  		mem := v.Args[2]
  6482  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6483  			break
  6484  		}
  6485  		v.reset(OpMIPSMOVHstore)
  6486  		v.AddArg(dst)
  6487  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6488  		v0.AddArg(src)
  6489  		v0.AddArg(mem)
  6490  		v.AddArg(v0)
  6491  		v.AddArg(mem)
  6492  		return true
  6493  	}
  6494  	// match: (Move [2] dst src mem)
  6495  	// cond:
  6496  	// result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))
  6497  	for {
  6498  		if v.AuxInt != 2 {
  6499  			break
  6500  		}
  6501  		_ = v.Args[2]
  6502  		dst := v.Args[0]
  6503  		src := v.Args[1]
  6504  		mem := v.Args[2]
  6505  		v.reset(OpMIPSMOVBstore)
  6506  		v.AuxInt = 1
  6507  		v.AddArg(dst)
  6508  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6509  		v0.AuxInt = 1
  6510  		v0.AddArg(src)
  6511  		v0.AddArg(mem)
  6512  		v.AddArg(v0)
  6513  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6514  		v1.AddArg(dst)
  6515  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6516  		v2.AddArg(src)
  6517  		v2.AddArg(mem)
  6518  		v1.AddArg(v2)
  6519  		v1.AddArg(mem)
  6520  		v.AddArg(v1)
  6521  		return true
  6522  	}
  6523  	// match: (Move [4] {t} dst src mem)
  6524  	// cond: t.(*types.Type).Alignment()%4 == 0
  6525  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6526  	for {
  6527  		if v.AuxInt != 4 {
  6528  			break
  6529  		}
  6530  		t := v.Aux
  6531  		_ = v.Args[2]
  6532  		dst := v.Args[0]
  6533  		src := v.Args[1]
  6534  		mem := v.Args[2]
  6535  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6536  			break
  6537  		}
  6538  		v.reset(OpMIPSMOVWstore)
  6539  		v.AddArg(dst)
  6540  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6541  		v0.AddArg(src)
  6542  		v0.AddArg(mem)
  6543  		v.AddArg(v0)
  6544  		v.AddArg(mem)
  6545  		return true
  6546  	}
  6547  	// match: (Move [4] {t} dst src mem)
  6548  	// cond: t.(*types.Type).Alignment()%2 == 0
  6549  	// result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
  6550  	for {
  6551  		if v.AuxInt != 4 {
  6552  			break
  6553  		}
  6554  		t := v.Aux
  6555  		_ = v.Args[2]
  6556  		dst := v.Args[0]
  6557  		src := v.Args[1]
  6558  		mem := v.Args[2]
  6559  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6560  			break
  6561  		}
  6562  		v.reset(OpMIPSMOVHstore)
  6563  		v.AuxInt = 2
  6564  		v.AddArg(dst)
  6565  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6566  		v0.AuxInt = 2
  6567  		v0.AddArg(src)
  6568  		v0.AddArg(mem)
  6569  		v.AddArg(v0)
  6570  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6571  		v1.AddArg(dst)
  6572  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6573  		v2.AddArg(src)
  6574  		v2.AddArg(mem)
  6575  		v1.AddArg(v2)
  6576  		v1.AddArg(mem)
  6577  		v.AddArg(v1)
  6578  		return true
  6579  	}
  6580  	// match: (Move [4] dst src mem)
  6581  	// cond:
  6582  	// result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))))
  6583  	for {
  6584  		if v.AuxInt != 4 {
  6585  			break
  6586  		}
  6587  		_ = v.Args[2]
  6588  		dst := v.Args[0]
  6589  		src := v.Args[1]
  6590  		mem := v.Args[2]
  6591  		v.reset(OpMIPSMOVBstore)
  6592  		v.AuxInt = 3
  6593  		v.AddArg(dst)
  6594  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6595  		v0.AuxInt = 3
  6596  		v0.AddArg(src)
  6597  		v0.AddArg(mem)
  6598  		v.AddArg(v0)
  6599  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6600  		v1.AuxInt = 2
  6601  		v1.AddArg(dst)
  6602  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6603  		v2.AuxInt = 2
  6604  		v2.AddArg(src)
  6605  		v2.AddArg(mem)
  6606  		v1.AddArg(v2)
  6607  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6608  		v3.AuxInt = 1
  6609  		v3.AddArg(dst)
  6610  		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6611  		v4.AuxInt = 1
  6612  		v4.AddArg(src)
  6613  		v4.AddArg(mem)
  6614  		v3.AddArg(v4)
  6615  		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6616  		v5.AddArg(dst)
  6617  		v6 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6618  		v6.AddArg(src)
  6619  		v6.AddArg(mem)
  6620  		v5.AddArg(v6)
  6621  		v5.AddArg(mem)
  6622  		v3.AddArg(v5)
  6623  		v1.AddArg(v3)
  6624  		v.AddArg(v1)
  6625  		return true
  6626  	}
  6627  	// match: (Move [3] dst src mem)
  6628  	// cond:
  6629  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))
  6630  	for {
  6631  		if v.AuxInt != 3 {
  6632  			break
  6633  		}
  6634  		_ = v.Args[2]
  6635  		dst := v.Args[0]
  6636  		src := v.Args[1]
  6637  		mem := v.Args[2]
  6638  		v.reset(OpMIPSMOVBstore)
  6639  		v.AuxInt = 2
  6640  		v.AddArg(dst)
  6641  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6642  		v0.AuxInt = 2
  6643  		v0.AddArg(src)
  6644  		v0.AddArg(mem)
  6645  		v.AddArg(v0)
  6646  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6647  		v1.AuxInt = 1
  6648  		v1.AddArg(dst)
  6649  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6650  		v2.AuxInt = 1
  6651  		v2.AddArg(src)
  6652  		v2.AddArg(mem)
  6653  		v1.AddArg(v2)
  6654  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6655  		v3.AddArg(dst)
  6656  		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6657  		v4.AddArg(src)
  6658  		v4.AddArg(mem)
  6659  		v3.AddArg(v4)
  6660  		v3.AddArg(mem)
  6661  		v1.AddArg(v3)
  6662  		v.AddArg(v1)
  6663  		return true
  6664  	}
  6665  	// match: (Move [8] {t} dst src mem)
  6666  	// cond: t.(*types.Type).Alignment()%4 == 0
  6667  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  6668  	for {
  6669  		if v.AuxInt != 8 {
  6670  			break
  6671  		}
  6672  		t := v.Aux
  6673  		_ = v.Args[2]
  6674  		dst := v.Args[0]
  6675  		src := v.Args[1]
  6676  		mem := v.Args[2]
  6677  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6678  			break
  6679  		}
  6680  		v.reset(OpMIPSMOVWstore)
  6681  		v.AuxInt = 4
  6682  		v.AddArg(dst)
  6683  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6684  		v0.AuxInt = 4
  6685  		v0.AddArg(src)
  6686  		v0.AddArg(mem)
  6687  		v.AddArg(v0)
  6688  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6689  		v1.AddArg(dst)
  6690  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6691  		v2.AddArg(src)
  6692  		v2.AddArg(mem)
  6693  		v1.AddArg(v2)
  6694  		v1.AddArg(mem)
  6695  		v.AddArg(v1)
  6696  		return true
  6697  	}
  6698  	// match: (Move [8] {t} dst src mem)
  6699  	// cond: t.(*types.Type).Alignment()%2 == 0
  6700  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  6701  	for {
  6702  		if v.AuxInt != 8 {
  6703  			break
  6704  		}
  6705  		t := v.Aux
  6706  		_ = v.Args[2]
  6707  		dst := v.Args[0]
  6708  		src := v.Args[1]
  6709  		mem := v.Args[2]
  6710  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6711  			break
  6712  		}
  6713  		v.reset(OpMIPSMOVHstore)
  6714  		v.AuxInt = 6
  6715  		v.AddArg(dst)
  6716  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6717  		v0.AuxInt = 6
  6718  		v0.AddArg(src)
  6719  		v0.AddArg(mem)
  6720  		v.AddArg(v0)
  6721  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6722  		v1.AuxInt = 4
  6723  		v1.AddArg(dst)
  6724  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6725  		v2.AuxInt = 4
  6726  		v2.AddArg(src)
  6727  		v2.AddArg(mem)
  6728  		v1.AddArg(v2)
  6729  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6730  		v3.AuxInt = 2
  6731  		v3.AddArg(dst)
  6732  		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6733  		v4.AuxInt = 2
  6734  		v4.AddArg(src)
  6735  		v4.AddArg(mem)
  6736  		v3.AddArg(v4)
  6737  		v5 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6738  		v5.AddArg(dst)
  6739  		v6 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6740  		v6.AddArg(src)
  6741  		v6.AddArg(mem)
  6742  		v5.AddArg(v6)
  6743  		v5.AddArg(mem)
  6744  		v3.AddArg(v5)
  6745  		v1.AddArg(v3)
  6746  		v.AddArg(v1)
  6747  		return true
  6748  	}
  6749  	return false
  6750  }
  6751  func rewriteValueMIPS_OpMove_10(v *Value) bool {
  6752  	b := v.Block
  6753  	_ = b
  6754  	config := b.Func.Config
  6755  	_ = config
  6756  	typ := &b.Func.Config.Types
  6757  	_ = typ
  6758  	// match: (Move [6] {t} dst src mem)
  6759  	// cond: t.(*types.Type).Alignment()%2 == 0
  6760  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  6761  	for {
  6762  		if v.AuxInt != 6 {
  6763  			break
  6764  		}
  6765  		t := v.Aux
  6766  		_ = v.Args[2]
  6767  		dst := v.Args[0]
  6768  		src := v.Args[1]
  6769  		mem := v.Args[2]
  6770  		if !(t.(*types.Type).Alignment()%2 == 0) {
  6771  			break
  6772  		}
  6773  		v.reset(OpMIPSMOVHstore)
  6774  		v.AuxInt = 4
  6775  		v.AddArg(dst)
  6776  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6777  		v0.AuxInt = 4
  6778  		v0.AddArg(src)
  6779  		v0.AddArg(mem)
  6780  		v.AddArg(v0)
  6781  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6782  		v1.AuxInt = 2
  6783  		v1.AddArg(dst)
  6784  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6785  		v2.AuxInt = 2
  6786  		v2.AddArg(src)
  6787  		v2.AddArg(mem)
  6788  		v1.AddArg(v2)
  6789  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6790  		v3.AddArg(dst)
  6791  		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6792  		v4.AddArg(src)
  6793  		v4.AddArg(mem)
  6794  		v3.AddArg(v4)
  6795  		v3.AddArg(mem)
  6796  		v1.AddArg(v3)
  6797  		v.AddArg(v1)
  6798  		return true
  6799  	}
  6800  	// match: (Move [12] {t} dst src mem)
  6801  	// cond: t.(*types.Type).Alignment()%4 == 0
  6802  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  6803  	for {
  6804  		if v.AuxInt != 12 {
  6805  			break
  6806  		}
  6807  		t := v.Aux
  6808  		_ = v.Args[2]
  6809  		dst := v.Args[0]
  6810  		src := v.Args[1]
  6811  		mem := v.Args[2]
  6812  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6813  			break
  6814  		}
  6815  		v.reset(OpMIPSMOVWstore)
  6816  		v.AuxInt = 8
  6817  		v.AddArg(dst)
  6818  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6819  		v0.AuxInt = 8
  6820  		v0.AddArg(src)
  6821  		v0.AddArg(mem)
  6822  		v.AddArg(v0)
  6823  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6824  		v1.AuxInt = 4
  6825  		v1.AddArg(dst)
  6826  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6827  		v2.AuxInt = 4
  6828  		v2.AddArg(src)
  6829  		v2.AddArg(mem)
  6830  		v1.AddArg(v2)
  6831  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6832  		v3.AddArg(dst)
  6833  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6834  		v4.AddArg(src)
  6835  		v4.AddArg(mem)
  6836  		v3.AddArg(v4)
  6837  		v3.AddArg(mem)
  6838  		v1.AddArg(v3)
  6839  		v.AddArg(v1)
  6840  		return true
  6841  	}
  6842  	// match: (Move [16] {t} dst src mem)
  6843  	// cond: t.(*types.Type).Alignment()%4 == 0
  6844  	// result: (MOVWstore [12] dst (MOVWload [12] src mem) (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))))
  6845  	for {
  6846  		if v.AuxInt != 16 {
  6847  			break
  6848  		}
  6849  		t := v.Aux
  6850  		_ = v.Args[2]
  6851  		dst := v.Args[0]
  6852  		src := v.Args[1]
  6853  		mem := v.Args[2]
  6854  		if !(t.(*types.Type).Alignment()%4 == 0) {
  6855  			break
  6856  		}
  6857  		v.reset(OpMIPSMOVWstore)
  6858  		v.AuxInt = 12
  6859  		v.AddArg(dst)
  6860  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6861  		v0.AuxInt = 12
  6862  		v0.AddArg(src)
  6863  		v0.AddArg(mem)
  6864  		v.AddArg(v0)
  6865  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6866  		v1.AuxInt = 8
  6867  		v1.AddArg(dst)
  6868  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6869  		v2.AuxInt = 8
  6870  		v2.AddArg(src)
  6871  		v2.AddArg(mem)
  6872  		v1.AddArg(v2)
  6873  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6874  		v3.AuxInt = 4
  6875  		v3.AddArg(dst)
  6876  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6877  		v4.AuxInt = 4
  6878  		v4.AddArg(src)
  6879  		v4.AddArg(mem)
  6880  		v3.AddArg(v4)
  6881  		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6882  		v5.AddArg(dst)
  6883  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6884  		v6.AddArg(src)
  6885  		v6.AddArg(mem)
  6886  		v5.AddArg(v6)
  6887  		v5.AddArg(mem)
  6888  		v3.AddArg(v5)
  6889  		v1.AddArg(v3)
  6890  		v.AddArg(v1)
  6891  		return true
  6892  	}
  6893  	// match: (Move [s] {t} dst src mem)
  6894  	// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
  6895  	// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
  6896  	for {
  6897  		s := v.AuxInt
  6898  		t := v.Aux
  6899  		_ = v.Args[2]
  6900  		dst := v.Args[0]
  6901  		src := v.Args[1]
  6902  		mem := v.Args[2]
  6903  		if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) {
  6904  			break
  6905  		}
  6906  		v.reset(OpMIPSLoweredMove)
  6907  		v.AuxInt = t.(*types.Type).Alignment()
  6908  		v.AddArg(dst)
  6909  		v.AddArg(src)
  6910  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type)
  6911  		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  6912  		v0.AddArg(src)
  6913  		v.AddArg(v0)
  6914  		v.AddArg(mem)
  6915  		return true
  6916  	}
  6917  	return false
  6918  }
  6919  func rewriteValueMIPS_OpMul16_0(v *Value) bool {
  6920  	// match: (Mul16 x y)
  6921  	// cond:
  6922  	// result: (MUL x y)
  6923  	for {
  6924  		_ = v.Args[1]
  6925  		x := v.Args[0]
  6926  		y := v.Args[1]
  6927  		v.reset(OpMIPSMUL)
  6928  		v.AddArg(x)
  6929  		v.AddArg(y)
  6930  		return true
  6931  	}
  6932  }
  6933  func rewriteValueMIPS_OpMul32_0(v *Value) bool {
  6934  	// match: (Mul32 x y)
  6935  	// cond:
  6936  	// result: (MUL x y)
  6937  	for {
  6938  		_ = v.Args[1]
  6939  		x := v.Args[0]
  6940  		y := v.Args[1]
  6941  		v.reset(OpMIPSMUL)
  6942  		v.AddArg(x)
  6943  		v.AddArg(y)
  6944  		return true
  6945  	}
  6946  }
  6947  func rewriteValueMIPS_OpMul32F_0(v *Value) bool {
  6948  	// match: (Mul32F x y)
  6949  	// cond:
  6950  	// result: (MULF x y)
  6951  	for {
  6952  		_ = v.Args[1]
  6953  		x := v.Args[0]
  6954  		y := v.Args[1]
  6955  		v.reset(OpMIPSMULF)
  6956  		v.AddArg(x)
  6957  		v.AddArg(y)
  6958  		return true
  6959  	}
  6960  }
  6961  func rewriteValueMIPS_OpMul32uhilo_0(v *Value) bool {
  6962  	// match: (Mul32uhilo x y)
  6963  	// cond:
  6964  	// result: (MULTU x y)
  6965  	for {
  6966  		_ = v.Args[1]
  6967  		x := v.Args[0]
  6968  		y := v.Args[1]
  6969  		v.reset(OpMIPSMULTU)
  6970  		v.AddArg(x)
  6971  		v.AddArg(y)
  6972  		return true
  6973  	}
  6974  }
  6975  func rewriteValueMIPS_OpMul64F_0(v *Value) bool {
  6976  	// match: (Mul64F x y)
  6977  	// cond:
  6978  	// result: (MULD x y)
  6979  	for {
  6980  		_ = v.Args[1]
  6981  		x := v.Args[0]
  6982  		y := v.Args[1]
  6983  		v.reset(OpMIPSMULD)
  6984  		v.AddArg(x)
  6985  		v.AddArg(y)
  6986  		return true
  6987  	}
  6988  }
  6989  func rewriteValueMIPS_OpMul8_0(v *Value) bool {
  6990  	// match: (Mul8 x y)
  6991  	// cond:
  6992  	// result: (MUL x y)
  6993  	for {
  6994  		_ = v.Args[1]
  6995  		x := v.Args[0]
  6996  		y := v.Args[1]
  6997  		v.reset(OpMIPSMUL)
  6998  		v.AddArg(x)
  6999  		v.AddArg(y)
  7000  		return true
  7001  	}
  7002  }
  7003  func rewriteValueMIPS_OpNeg16_0(v *Value) bool {
  7004  	// match: (Neg16 x)
  7005  	// cond:
  7006  	// result: (NEG x)
  7007  	for {
  7008  		x := v.Args[0]
  7009  		v.reset(OpMIPSNEG)
  7010  		v.AddArg(x)
  7011  		return true
  7012  	}
  7013  }
  7014  func rewriteValueMIPS_OpNeg32_0(v *Value) bool {
  7015  	// match: (Neg32 x)
  7016  	// cond:
  7017  	// result: (NEG x)
  7018  	for {
  7019  		x := v.Args[0]
  7020  		v.reset(OpMIPSNEG)
  7021  		v.AddArg(x)
  7022  		return true
  7023  	}
  7024  }
  7025  func rewriteValueMIPS_OpNeg32F_0(v *Value) bool {
  7026  	// match: (Neg32F x)
  7027  	// cond:
  7028  	// result: (NEGF x)
  7029  	for {
  7030  		x := v.Args[0]
  7031  		v.reset(OpMIPSNEGF)
  7032  		v.AddArg(x)
  7033  		return true
  7034  	}
  7035  }
  7036  func rewriteValueMIPS_OpNeg64F_0(v *Value) bool {
  7037  	// match: (Neg64F x)
  7038  	// cond:
  7039  	// result: (NEGD x)
  7040  	for {
  7041  		x := v.Args[0]
  7042  		v.reset(OpMIPSNEGD)
  7043  		v.AddArg(x)
  7044  		return true
  7045  	}
  7046  }
  7047  func rewriteValueMIPS_OpNeg8_0(v *Value) bool {
  7048  	// match: (Neg8 x)
  7049  	// cond:
  7050  	// result: (NEG x)
  7051  	for {
  7052  		x := v.Args[0]
  7053  		v.reset(OpMIPSNEG)
  7054  		v.AddArg(x)
  7055  		return true
  7056  	}
  7057  }
  7058  func rewriteValueMIPS_OpNeq16_0(v *Value) bool {
  7059  	b := v.Block
  7060  	_ = b
  7061  	typ := &b.Func.Config.Types
  7062  	_ = typ
  7063  	// match: (Neq16 x y)
  7064  	// cond:
  7065  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0]))
  7066  	for {
  7067  		_ = v.Args[1]
  7068  		x := v.Args[0]
  7069  		y := v.Args[1]
  7070  		v.reset(OpMIPSSGTU)
  7071  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7072  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7073  		v1.AddArg(x)
  7074  		v0.AddArg(v1)
  7075  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7076  		v2.AddArg(y)
  7077  		v0.AddArg(v2)
  7078  		v.AddArg(v0)
  7079  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7080  		v3.AuxInt = 0
  7081  		v.AddArg(v3)
  7082  		return true
  7083  	}
  7084  }
  7085  func rewriteValueMIPS_OpNeq32_0(v *Value) bool {
  7086  	b := v.Block
  7087  	_ = b
  7088  	typ := &b.Func.Config.Types
  7089  	_ = typ
  7090  	// match: (Neq32 x y)
  7091  	// cond:
  7092  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  7093  	for {
  7094  		_ = v.Args[1]
  7095  		x := v.Args[0]
  7096  		y := v.Args[1]
  7097  		v.reset(OpMIPSSGTU)
  7098  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7099  		v0.AddArg(x)
  7100  		v0.AddArg(y)
  7101  		v.AddArg(v0)
  7102  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7103  		v1.AuxInt = 0
  7104  		v.AddArg(v1)
  7105  		return true
  7106  	}
  7107  }
  7108  func rewriteValueMIPS_OpNeq32F_0(v *Value) bool {
  7109  	b := v.Block
  7110  	_ = b
  7111  	// match: (Neq32F x y)
  7112  	// cond:
  7113  	// result: (FPFlagFalse (CMPEQF x y))
  7114  	for {
  7115  		_ = v.Args[1]
  7116  		x := v.Args[0]
  7117  		y := v.Args[1]
  7118  		v.reset(OpMIPSFPFlagFalse)
  7119  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags)
  7120  		v0.AddArg(x)
  7121  		v0.AddArg(y)
  7122  		v.AddArg(v0)
  7123  		return true
  7124  	}
  7125  }
  7126  func rewriteValueMIPS_OpNeq64F_0(v *Value) bool {
  7127  	b := v.Block
  7128  	_ = b
  7129  	// match: (Neq64F x y)
  7130  	// cond:
  7131  	// result: (FPFlagFalse (CMPEQD x y))
  7132  	for {
  7133  		_ = v.Args[1]
  7134  		x := v.Args[0]
  7135  		y := v.Args[1]
  7136  		v.reset(OpMIPSFPFlagFalse)
  7137  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags)
  7138  		v0.AddArg(x)
  7139  		v0.AddArg(y)
  7140  		v.AddArg(v0)
  7141  		return true
  7142  	}
  7143  }
  7144  func rewriteValueMIPS_OpNeq8_0(v *Value) bool {
  7145  	b := v.Block
  7146  	_ = b
  7147  	typ := &b.Func.Config.Types
  7148  	_ = typ
  7149  	// match: (Neq8 x y)
  7150  	// cond:
  7151  	// result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0]))
  7152  	for {
  7153  		_ = v.Args[1]
  7154  		x := v.Args[0]
  7155  		y := v.Args[1]
  7156  		v.reset(OpMIPSSGTU)
  7157  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7158  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7159  		v1.AddArg(x)
  7160  		v0.AddArg(v1)
  7161  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7162  		v2.AddArg(y)
  7163  		v0.AddArg(v2)
  7164  		v.AddArg(v0)
  7165  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7166  		v3.AuxInt = 0
  7167  		v.AddArg(v3)
  7168  		return true
  7169  	}
  7170  }
  7171  func rewriteValueMIPS_OpNeqB_0(v *Value) bool {
  7172  	// match: (NeqB x y)
  7173  	// cond:
  7174  	// result: (XOR x y)
  7175  	for {
  7176  		_ = v.Args[1]
  7177  		x := v.Args[0]
  7178  		y := v.Args[1]
  7179  		v.reset(OpMIPSXOR)
  7180  		v.AddArg(x)
  7181  		v.AddArg(y)
  7182  		return true
  7183  	}
  7184  }
  7185  func rewriteValueMIPS_OpNeqPtr_0(v *Value) bool {
  7186  	b := v.Block
  7187  	_ = b
  7188  	typ := &b.Func.Config.Types
  7189  	_ = typ
  7190  	// match: (NeqPtr x y)
  7191  	// cond:
  7192  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  7193  	for {
  7194  		_ = v.Args[1]
  7195  		x := v.Args[0]
  7196  		y := v.Args[1]
  7197  		v.reset(OpMIPSSGTU)
  7198  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  7199  		v0.AddArg(x)
  7200  		v0.AddArg(y)
  7201  		v.AddArg(v0)
  7202  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7203  		v1.AuxInt = 0
  7204  		v.AddArg(v1)
  7205  		return true
  7206  	}
  7207  }
  7208  func rewriteValueMIPS_OpNilCheck_0(v *Value) bool {
  7209  	// match: (NilCheck ptr mem)
  7210  	// cond:
  7211  	// result: (LoweredNilCheck ptr mem)
  7212  	for {
  7213  		_ = v.Args[1]
  7214  		ptr := v.Args[0]
  7215  		mem := v.Args[1]
  7216  		v.reset(OpMIPSLoweredNilCheck)
  7217  		v.AddArg(ptr)
  7218  		v.AddArg(mem)
  7219  		return true
  7220  	}
  7221  }
  7222  func rewriteValueMIPS_OpNot_0(v *Value) bool {
  7223  	// match: (Not x)
  7224  	// cond:
  7225  	// result: (XORconst [1] x)
  7226  	for {
  7227  		x := v.Args[0]
  7228  		v.reset(OpMIPSXORconst)
  7229  		v.AuxInt = 1
  7230  		v.AddArg(x)
  7231  		return true
  7232  	}
  7233  }
  7234  func rewriteValueMIPS_OpOffPtr_0(v *Value) bool {
  7235  	// match: (OffPtr [off] ptr:(SP))
  7236  	// cond:
  7237  	// result: (MOVWaddr [off] ptr)
  7238  	for {
  7239  		off := v.AuxInt
  7240  		ptr := v.Args[0]
  7241  		if ptr.Op != OpSP {
  7242  			break
  7243  		}
  7244  		v.reset(OpMIPSMOVWaddr)
  7245  		v.AuxInt = off
  7246  		v.AddArg(ptr)
  7247  		return true
  7248  	}
  7249  	// match: (OffPtr [off] ptr)
  7250  	// cond:
  7251  	// result: (ADDconst [off] ptr)
  7252  	for {
  7253  		off := v.AuxInt
  7254  		ptr := v.Args[0]
  7255  		v.reset(OpMIPSADDconst)
  7256  		v.AuxInt = off
  7257  		v.AddArg(ptr)
  7258  		return true
  7259  	}
  7260  }
  7261  func rewriteValueMIPS_OpOr16_0(v *Value) bool {
  7262  	// match: (Or16 x y)
  7263  	// cond:
  7264  	// result: (OR x y)
  7265  	for {
  7266  		_ = v.Args[1]
  7267  		x := v.Args[0]
  7268  		y := v.Args[1]
  7269  		v.reset(OpMIPSOR)
  7270  		v.AddArg(x)
  7271  		v.AddArg(y)
  7272  		return true
  7273  	}
  7274  }
  7275  func rewriteValueMIPS_OpOr32_0(v *Value) bool {
  7276  	// match: (Or32 x y)
  7277  	// cond:
  7278  	// result: (OR x y)
  7279  	for {
  7280  		_ = v.Args[1]
  7281  		x := v.Args[0]
  7282  		y := v.Args[1]
  7283  		v.reset(OpMIPSOR)
  7284  		v.AddArg(x)
  7285  		v.AddArg(y)
  7286  		return true
  7287  	}
  7288  }
  7289  func rewriteValueMIPS_OpOr8_0(v *Value) bool {
  7290  	// match: (Or8 x y)
  7291  	// cond:
  7292  	// result: (OR x y)
  7293  	for {
  7294  		_ = v.Args[1]
  7295  		x := v.Args[0]
  7296  		y := v.Args[1]
  7297  		v.reset(OpMIPSOR)
  7298  		v.AddArg(x)
  7299  		v.AddArg(y)
  7300  		return true
  7301  	}
  7302  }
  7303  func rewriteValueMIPS_OpOrB_0(v *Value) bool {
  7304  	// match: (OrB x y)
  7305  	// cond:
  7306  	// result: (OR x y)
  7307  	for {
  7308  		_ = v.Args[1]
  7309  		x := v.Args[0]
  7310  		y := v.Args[1]
  7311  		v.reset(OpMIPSOR)
  7312  		v.AddArg(x)
  7313  		v.AddArg(y)
  7314  		return true
  7315  	}
  7316  }
  7317  func rewriteValueMIPS_OpRound32F_0(v *Value) bool {
  7318  	// match: (Round32F x)
  7319  	// cond:
  7320  	// result: x
  7321  	for {
  7322  		x := v.Args[0]
  7323  		v.reset(OpCopy)
  7324  		v.Type = x.Type
  7325  		v.AddArg(x)
  7326  		return true
  7327  	}
  7328  }
  7329  func rewriteValueMIPS_OpRound64F_0(v *Value) bool {
  7330  	// match: (Round64F x)
  7331  	// cond:
  7332  	// result: x
  7333  	for {
  7334  		x := v.Args[0]
  7335  		v.reset(OpCopy)
  7336  		v.Type = x.Type
  7337  		v.AddArg(x)
  7338  		return true
  7339  	}
  7340  }
  7341  func rewriteValueMIPS_OpRsh16Ux16_0(v *Value) bool {
  7342  	b := v.Block
  7343  	_ = b
  7344  	typ := &b.Func.Config.Types
  7345  	_ = typ
  7346  	// match: (Rsh16Ux16 <t> x y)
  7347  	// cond:
  7348  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7349  	for {
  7350  		t := v.Type
  7351  		_ = v.Args[1]
  7352  		x := v.Args[0]
  7353  		y := v.Args[1]
  7354  		v.reset(OpMIPSCMOVZ)
  7355  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7356  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7357  		v1.AddArg(x)
  7358  		v0.AddArg(v1)
  7359  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7360  		v2.AddArg(y)
  7361  		v0.AddArg(v2)
  7362  		v.AddArg(v0)
  7363  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7364  		v3.AuxInt = 0
  7365  		v.AddArg(v3)
  7366  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7367  		v4.AuxInt = 32
  7368  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7369  		v5.AddArg(y)
  7370  		v4.AddArg(v5)
  7371  		v.AddArg(v4)
  7372  		return true
  7373  	}
  7374  }
  7375  func rewriteValueMIPS_OpRsh16Ux32_0(v *Value) bool {
  7376  	b := v.Block
  7377  	_ = b
  7378  	typ := &b.Func.Config.Types
  7379  	_ = typ
  7380  	// match: (Rsh16Ux32 <t> x y)
  7381  	// cond:
  7382  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7383  	for {
  7384  		t := v.Type
  7385  		_ = v.Args[1]
  7386  		x := v.Args[0]
  7387  		y := v.Args[1]
  7388  		v.reset(OpMIPSCMOVZ)
  7389  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7390  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7391  		v1.AddArg(x)
  7392  		v0.AddArg(v1)
  7393  		v0.AddArg(y)
  7394  		v.AddArg(v0)
  7395  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7396  		v2.AuxInt = 0
  7397  		v.AddArg(v2)
  7398  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7399  		v3.AuxInt = 32
  7400  		v3.AddArg(y)
  7401  		v.AddArg(v3)
  7402  		return true
  7403  	}
  7404  }
  7405  func rewriteValueMIPS_OpRsh16Ux64_0(v *Value) bool {
  7406  	b := v.Block
  7407  	_ = b
  7408  	typ := &b.Func.Config.Types
  7409  	_ = typ
  7410  	// match: (Rsh16Ux64 x (Const64 [c]))
  7411  	// cond: uint32(c) < 16
  7412  	// result: (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16])
  7413  	for {
  7414  		_ = v.Args[1]
  7415  		x := v.Args[0]
  7416  		v_1 := v.Args[1]
  7417  		if v_1.Op != OpConst64 {
  7418  			break
  7419  		}
  7420  		c := v_1.AuxInt
  7421  		if !(uint32(c) < 16) {
  7422  			break
  7423  		}
  7424  		v.reset(OpMIPSSRLconst)
  7425  		v.AuxInt = c + 16
  7426  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7427  		v0.AuxInt = 16
  7428  		v0.AddArg(x)
  7429  		v.AddArg(v0)
  7430  		return true
  7431  	}
  7432  	// match: (Rsh16Ux64 _ (Const64 [c]))
  7433  	// cond: uint32(c) >= 16
  7434  	// result: (MOVWconst [0])
  7435  	for {
  7436  		_ = v.Args[1]
  7437  		v_1 := v.Args[1]
  7438  		if v_1.Op != OpConst64 {
  7439  			break
  7440  		}
  7441  		c := v_1.AuxInt
  7442  		if !(uint32(c) >= 16) {
  7443  			break
  7444  		}
  7445  		v.reset(OpMIPSMOVWconst)
  7446  		v.AuxInt = 0
  7447  		return true
  7448  	}
  7449  	return false
  7450  }
  7451  func rewriteValueMIPS_OpRsh16Ux8_0(v *Value) bool {
  7452  	b := v.Block
  7453  	_ = b
  7454  	typ := &b.Func.Config.Types
  7455  	_ = typ
  7456  	// match: (Rsh16Ux8 <t> x y)
  7457  	// cond:
  7458  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7459  	for {
  7460  		t := v.Type
  7461  		_ = v.Args[1]
  7462  		x := v.Args[0]
  7463  		y := v.Args[1]
  7464  		v.reset(OpMIPSCMOVZ)
  7465  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7466  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7467  		v1.AddArg(x)
  7468  		v0.AddArg(v1)
  7469  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7470  		v2.AddArg(y)
  7471  		v0.AddArg(v2)
  7472  		v.AddArg(v0)
  7473  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7474  		v3.AuxInt = 0
  7475  		v.AddArg(v3)
  7476  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7477  		v4.AuxInt = 32
  7478  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7479  		v5.AddArg(y)
  7480  		v4.AddArg(v5)
  7481  		v.AddArg(v4)
  7482  		return true
  7483  	}
  7484  }
  7485  func rewriteValueMIPS_OpRsh16x16_0(v *Value) bool {
  7486  	b := v.Block
  7487  	_ = b
  7488  	typ := &b.Func.Config.Types
  7489  	_ = typ
  7490  	// match: (Rsh16x16 x y)
  7491  	// cond:
  7492  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7493  	for {
  7494  		_ = v.Args[1]
  7495  		x := v.Args[0]
  7496  		y := v.Args[1]
  7497  		v.reset(OpMIPSSRA)
  7498  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7499  		v0.AddArg(x)
  7500  		v.AddArg(v0)
  7501  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7502  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7503  		v2.AddArg(y)
  7504  		v1.AddArg(v2)
  7505  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7506  		v3.AuxInt = -1
  7507  		v1.AddArg(v3)
  7508  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7509  		v4.AuxInt = 32
  7510  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7511  		v5.AddArg(y)
  7512  		v4.AddArg(v5)
  7513  		v1.AddArg(v4)
  7514  		v.AddArg(v1)
  7515  		return true
  7516  	}
  7517  }
  7518  func rewriteValueMIPS_OpRsh16x32_0(v *Value) bool {
  7519  	b := v.Block
  7520  	_ = b
  7521  	typ := &b.Func.Config.Types
  7522  	_ = typ
  7523  	// match: (Rsh16x32 x y)
  7524  	// cond:
  7525  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7526  	for {
  7527  		_ = v.Args[1]
  7528  		x := v.Args[0]
  7529  		y := v.Args[1]
  7530  		v.reset(OpMIPSSRA)
  7531  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7532  		v0.AddArg(x)
  7533  		v.AddArg(v0)
  7534  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7535  		v1.AddArg(y)
  7536  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7537  		v2.AuxInt = -1
  7538  		v1.AddArg(v2)
  7539  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7540  		v3.AuxInt = 32
  7541  		v3.AddArg(y)
  7542  		v1.AddArg(v3)
  7543  		v.AddArg(v1)
  7544  		return true
  7545  	}
  7546  }
  7547  func rewriteValueMIPS_OpRsh16x64_0(v *Value) bool {
  7548  	b := v.Block
  7549  	_ = b
  7550  	typ := &b.Func.Config.Types
  7551  	_ = typ
  7552  	// match: (Rsh16x64 x (Const64 [c]))
  7553  	// cond: uint32(c) < 16
  7554  	// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16])
  7555  	for {
  7556  		_ = v.Args[1]
  7557  		x := v.Args[0]
  7558  		v_1 := v.Args[1]
  7559  		if v_1.Op != OpConst64 {
  7560  			break
  7561  		}
  7562  		c := v_1.AuxInt
  7563  		if !(uint32(c) < 16) {
  7564  			break
  7565  		}
  7566  		v.reset(OpMIPSSRAconst)
  7567  		v.AuxInt = c + 16
  7568  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7569  		v0.AuxInt = 16
  7570  		v0.AddArg(x)
  7571  		v.AddArg(v0)
  7572  		return true
  7573  	}
  7574  	// match: (Rsh16x64 x (Const64 [c]))
  7575  	// cond: uint32(c) >= 16
  7576  	// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [31])
  7577  	for {
  7578  		_ = v.Args[1]
  7579  		x := v.Args[0]
  7580  		v_1 := v.Args[1]
  7581  		if v_1.Op != OpConst64 {
  7582  			break
  7583  		}
  7584  		c := v_1.AuxInt
  7585  		if !(uint32(c) >= 16) {
  7586  			break
  7587  		}
  7588  		v.reset(OpMIPSSRAconst)
  7589  		v.AuxInt = 31
  7590  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7591  		v0.AuxInt = 16
  7592  		v0.AddArg(x)
  7593  		v.AddArg(v0)
  7594  		return true
  7595  	}
  7596  	return false
  7597  }
  7598  func rewriteValueMIPS_OpRsh16x8_0(v *Value) bool {
  7599  	b := v.Block
  7600  	_ = b
  7601  	typ := &b.Func.Config.Types
  7602  	_ = typ
  7603  	// match: (Rsh16x8 x y)
  7604  	// cond:
  7605  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7606  	for {
  7607  		_ = v.Args[1]
  7608  		x := v.Args[0]
  7609  		y := v.Args[1]
  7610  		v.reset(OpMIPSSRA)
  7611  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7612  		v0.AddArg(x)
  7613  		v.AddArg(v0)
  7614  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7615  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7616  		v2.AddArg(y)
  7617  		v1.AddArg(v2)
  7618  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7619  		v3.AuxInt = -1
  7620  		v1.AddArg(v3)
  7621  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7622  		v4.AuxInt = 32
  7623  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7624  		v5.AddArg(y)
  7625  		v4.AddArg(v5)
  7626  		v1.AddArg(v4)
  7627  		v.AddArg(v1)
  7628  		return true
  7629  	}
  7630  }
  7631  func rewriteValueMIPS_OpRsh32Ux16_0(v *Value) bool {
  7632  	b := v.Block
  7633  	_ = b
  7634  	typ := &b.Func.Config.Types
  7635  	_ = typ
  7636  	// match: (Rsh32Ux16 <t> x y)
  7637  	// cond:
  7638  	// result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7639  	for {
  7640  		t := v.Type
  7641  		_ = v.Args[1]
  7642  		x := v.Args[0]
  7643  		y := v.Args[1]
  7644  		v.reset(OpMIPSCMOVZ)
  7645  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7646  		v0.AddArg(x)
  7647  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7648  		v1.AddArg(y)
  7649  		v0.AddArg(v1)
  7650  		v.AddArg(v0)
  7651  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7652  		v2.AuxInt = 0
  7653  		v.AddArg(v2)
  7654  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7655  		v3.AuxInt = 32
  7656  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7657  		v4.AddArg(y)
  7658  		v3.AddArg(v4)
  7659  		v.AddArg(v3)
  7660  		return true
  7661  	}
  7662  }
  7663  func rewriteValueMIPS_OpRsh32Ux32_0(v *Value) bool {
  7664  	b := v.Block
  7665  	_ = b
  7666  	typ := &b.Func.Config.Types
  7667  	_ = typ
  7668  	// match: (Rsh32Ux32 <t> x y)
  7669  	// cond:
  7670  	// result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  7671  	for {
  7672  		t := v.Type
  7673  		_ = v.Args[1]
  7674  		x := v.Args[0]
  7675  		y := v.Args[1]
  7676  		v.reset(OpMIPSCMOVZ)
  7677  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7678  		v0.AddArg(x)
  7679  		v0.AddArg(y)
  7680  		v.AddArg(v0)
  7681  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7682  		v1.AuxInt = 0
  7683  		v.AddArg(v1)
  7684  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7685  		v2.AuxInt = 32
  7686  		v2.AddArg(y)
  7687  		v.AddArg(v2)
  7688  		return true
  7689  	}
  7690  }
  7691  func rewriteValueMIPS_OpRsh32Ux64_0(v *Value) bool {
  7692  	// match: (Rsh32Ux64 x (Const64 [c]))
  7693  	// cond: uint32(c) < 32
  7694  	// result: (SRLconst x [c])
  7695  	for {
  7696  		_ = v.Args[1]
  7697  		x := v.Args[0]
  7698  		v_1 := v.Args[1]
  7699  		if v_1.Op != OpConst64 {
  7700  			break
  7701  		}
  7702  		c := v_1.AuxInt
  7703  		if !(uint32(c) < 32) {
  7704  			break
  7705  		}
  7706  		v.reset(OpMIPSSRLconst)
  7707  		v.AuxInt = c
  7708  		v.AddArg(x)
  7709  		return true
  7710  	}
  7711  	// match: (Rsh32Ux64 _ (Const64 [c]))
  7712  	// cond: uint32(c) >= 32
  7713  	// result: (MOVWconst [0])
  7714  	for {
  7715  		_ = v.Args[1]
  7716  		v_1 := v.Args[1]
  7717  		if v_1.Op != OpConst64 {
  7718  			break
  7719  		}
  7720  		c := v_1.AuxInt
  7721  		if !(uint32(c) >= 32) {
  7722  			break
  7723  		}
  7724  		v.reset(OpMIPSMOVWconst)
  7725  		v.AuxInt = 0
  7726  		return true
  7727  	}
  7728  	return false
  7729  }
  7730  func rewriteValueMIPS_OpRsh32Ux8_0(v *Value) bool {
  7731  	b := v.Block
  7732  	_ = b
  7733  	typ := &b.Func.Config.Types
  7734  	_ = typ
  7735  	// match: (Rsh32Ux8 <t> x y)
  7736  	// cond:
  7737  	// result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7738  	for {
  7739  		t := v.Type
  7740  		_ = v.Args[1]
  7741  		x := v.Args[0]
  7742  		y := v.Args[1]
  7743  		v.reset(OpMIPSCMOVZ)
  7744  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7745  		v0.AddArg(x)
  7746  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7747  		v1.AddArg(y)
  7748  		v0.AddArg(v1)
  7749  		v.AddArg(v0)
  7750  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7751  		v2.AuxInt = 0
  7752  		v.AddArg(v2)
  7753  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7754  		v3.AuxInt = 32
  7755  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7756  		v4.AddArg(y)
  7757  		v3.AddArg(v4)
  7758  		v.AddArg(v3)
  7759  		return true
  7760  	}
  7761  }
  7762  func rewriteValueMIPS_OpRsh32x16_0(v *Value) bool {
  7763  	b := v.Block
  7764  	_ = b
  7765  	typ := &b.Func.Config.Types
  7766  	_ = typ
  7767  	// match: (Rsh32x16 x y)
  7768  	// cond:
  7769  	// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7770  	for {
  7771  		_ = v.Args[1]
  7772  		x := v.Args[0]
  7773  		y := v.Args[1]
  7774  		v.reset(OpMIPSSRA)
  7775  		v.AddArg(x)
  7776  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7777  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7778  		v1.AddArg(y)
  7779  		v0.AddArg(v1)
  7780  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7781  		v2.AuxInt = -1
  7782  		v0.AddArg(v2)
  7783  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7784  		v3.AuxInt = 32
  7785  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7786  		v4.AddArg(y)
  7787  		v3.AddArg(v4)
  7788  		v0.AddArg(v3)
  7789  		v.AddArg(v0)
  7790  		return true
  7791  	}
  7792  }
  7793  func rewriteValueMIPS_OpRsh32x32_0(v *Value) bool {
  7794  	b := v.Block
  7795  	_ = b
  7796  	typ := &b.Func.Config.Types
  7797  	_ = typ
  7798  	// match: (Rsh32x32 x y)
  7799  	// cond:
  7800  	// result: (SRA x ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7801  	for {
  7802  		_ = v.Args[1]
  7803  		x := v.Args[0]
  7804  		y := v.Args[1]
  7805  		v.reset(OpMIPSSRA)
  7806  		v.AddArg(x)
  7807  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7808  		v0.AddArg(y)
  7809  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7810  		v1.AuxInt = -1
  7811  		v0.AddArg(v1)
  7812  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7813  		v2.AuxInt = 32
  7814  		v2.AddArg(y)
  7815  		v0.AddArg(v2)
  7816  		v.AddArg(v0)
  7817  		return true
  7818  	}
  7819  }
  7820  func rewriteValueMIPS_OpRsh32x64_0(v *Value) bool {
  7821  	// match: (Rsh32x64 x (Const64 [c]))
  7822  	// cond: uint32(c) < 32
  7823  	// result: (SRAconst x [c])
  7824  	for {
  7825  		_ = v.Args[1]
  7826  		x := v.Args[0]
  7827  		v_1 := v.Args[1]
  7828  		if v_1.Op != OpConst64 {
  7829  			break
  7830  		}
  7831  		c := v_1.AuxInt
  7832  		if !(uint32(c) < 32) {
  7833  			break
  7834  		}
  7835  		v.reset(OpMIPSSRAconst)
  7836  		v.AuxInt = c
  7837  		v.AddArg(x)
  7838  		return true
  7839  	}
  7840  	// match: (Rsh32x64 x (Const64 [c]))
  7841  	// cond: uint32(c) >= 32
  7842  	// result: (SRAconst x [31])
  7843  	for {
  7844  		_ = v.Args[1]
  7845  		x := v.Args[0]
  7846  		v_1 := v.Args[1]
  7847  		if v_1.Op != OpConst64 {
  7848  			break
  7849  		}
  7850  		c := v_1.AuxInt
  7851  		if !(uint32(c) >= 32) {
  7852  			break
  7853  		}
  7854  		v.reset(OpMIPSSRAconst)
  7855  		v.AuxInt = 31
  7856  		v.AddArg(x)
  7857  		return true
  7858  	}
  7859  	return false
  7860  }
  7861  func rewriteValueMIPS_OpRsh32x8_0(v *Value) bool {
  7862  	b := v.Block
  7863  	_ = b
  7864  	typ := &b.Func.Config.Types
  7865  	_ = typ
  7866  	// match: (Rsh32x8 x y)
  7867  	// cond:
  7868  	// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7869  	for {
  7870  		_ = v.Args[1]
  7871  		x := v.Args[0]
  7872  		y := v.Args[1]
  7873  		v.reset(OpMIPSSRA)
  7874  		v.AddArg(x)
  7875  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7876  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7877  		v1.AddArg(y)
  7878  		v0.AddArg(v1)
  7879  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7880  		v2.AuxInt = -1
  7881  		v0.AddArg(v2)
  7882  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7883  		v3.AuxInt = 32
  7884  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7885  		v4.AddArg(y)
  7886  		v3.AddArg(v4)
  7887  		v0.AddArg(v3)
  7888  		v.AddArg(v0)
  7889  		return true
  7890  	}
  7891  }
  7892  func rewriteValueMIPS_OpRsh8Ux16_0(v *Value) bool {
  7893  	b := v.Block
  7894  	_ = b
  7895  	typ := &b.Func.Config.Types
  7896  	_ = typ
  7897  	// match: (Rsh8Ux16 <t> x y)
  7898  	// cond:
  7899  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7900  	for {
  7901  		t := v.Type
  7902  		_ = v.Args[1]
  7903  		x := v.Args[0]
  7904  		y := v.Args[1]
  7905  		v.reset(OpMIPSCMOVZ)
  7906  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7907  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7908  		v1.AddArg(x)
  7909  		v0.AddArg(v1)
  7910  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7911  		v2.AddArg(y)
  7912  		v0.AddArg(v2)
  7913  		v.AddArg(v0)
  7914  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7915  		v3.AuxInt = 0
  7916  		v.AddArg(v3)
  7917  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7918  		v4.AuxInt = 32
  7919  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7920  		v5.AddArg(y)
  7921  		v4.AddArg(v5)
  7922  		v.AddArg(v4)
  7923  		return true
  7924  	}
  7925  }
  7926  func rewriteValueMIPS_OpRsh8Ux32_0(v *Value) bool {
  7927  	b := v.Block
  7928  	_ = b
  7929  	typ := &b.Func.Config.Types
  7930  	_ = typ
  7931  	// match: (Rsh8Ux32 <t> x y)
  7932  	// cond:
  7933  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7934  	for {
  7935  		t := v.Type
  7936  		_ = v.Args[1]
  7937  		x := v.Args[0]
  7938  		y := v.Args[1]
  7939  		v.reset(OpMIPSCMOVZ)
  7940  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7941  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7942  		v1.AddArg(x)
  7943  		v0.AddArg(v1)
  7944  		v0.AddArg(y)
  7945  		v.AddArg(v0)
  7946  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7947  		v2.AuxInt = 0
  7948  		v.AddArg(v2)
  7949  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7950  		v3.AuxInt = 32
  7951  		v3.AddArg(y)
  7952  		v.AddArg(v3)
  7953  		return true
  7954  	}
  7955  }
  7956  func rewriteValueMIPS_OpRsh8Ux64_0(v *Value) bool {
  7957  	b := v.Block
  7958  	_ = b
  7959  	typ := &b.Func.Config.Types
  7960  	_ = typ
  7961  	// match: (Rsh8Ux64 x (Const64 [c]))
  7962  	// cond: uint32(c) < 8
  7963  	// result: (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24])
  7964  	for {
  7965  		_ = v.Args[1]
  7966  		x := v.Args[0]
  7967  		v_1 := v.Args[1]
  7968  		if v_1.Op != OpConst64 {
  7969  			break
  7970  		}
  7971  		c := v_1.AuxInt
  7972  		if !(uint32(c) < 8) {
  7973  			break
  7974  		}
  7975  		v.reset(OpMIPSSRLconst)
  7976  		v.AuxInt = c + 24
  7977  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7978  		v0.AuxInt = 24
  7979  		v0.AddArg(x)
  7980  		v.AddArg(v0)
  7981  		return true
  7982  	}
  7983  	// match: (Rsh8Ux64 _ (Const64 [c]))
  7984  	// cond: uint32(c) >= 8
  7985  	// result: (MOVWconst [0])
  7986  	for {
  7987  		_ = v.Args[1]
  7988  		v_1 := v.Args[1]
  7989  		if v_1.Op != OpConst64 {
  7990  			break
  7991  		}
  7992  		c := v_1.AuxInt
  7993  		if !(uint32(c) >= 8) {
  7994  			break
  7995  		}
  7996  		v.reset(OpMIPSMOVWconst)
  7997  		v.AuxInt = 0
  7998  		return true
  7999  	}
  8000  	return false
  8001  }
  8002  func rewriteValueMIPS_OpRsh8Ux8_0(v *Value) bool {
  8003  	b := v.Block
  8004  	_ = b
  8005  	typ := &b.Func.Config.Types
  8006  	_ = typ
  8007  	// match: (Rsh8Ux8 <t> x y)
  8008  	// cond:
  8009  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  8010  	for {
  8011  		t := v.Type
  8012  		_ = v.Args[1]
  8013  		x := v.Args[0]
  8014  		y := v.Args[1]
  8015  		v.reset(OpMIPSCMOVZ)
  8016  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  8017  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8018  		v1.AddArg(x)
  8019  		v0.AddArg(v1)
  8020  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8021  		v2.AddArg(y)
  8022  		v0.AddArg(v2)
  8023  		v.AddArg(v0)
  8024  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8025  		v3.AuxInt = 0
  8026  		v.AddArg(v3)
  8027  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8028  		v4.AuxInt = 32
  8029  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8030  		v5.AddArg(y)
  8031  		v4.AddArg(v5)
  8032  		v.AddArg(v4)
  8033  		return true
  8034  	}
  8035  }
  8036  func rewriteValueMIPS_OpRsh8x16_0(v *Value) bool {
  8037  	b := v.Block
  8038  	_ = b
  8039  	typ := &b.Func.Config.Types
  8040  	_ = typ
  8041  	// match: (Rsh8x16 x y)
  8042  	// cond:
  8043  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  8044  	for {
  8045  		_ = v.Args[1]
  8046  		x := v.Args[0]
  8047  		y := v.Args[1]
  8048  		v.reset(OpMIPSSRA)
  8049  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  8050  		v0.AddArg(x)
  8051  		v.AddArg(v0)
  8052  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  8053  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  8054  		v2.AddArg(y)
  8055  		v1.AddArg(v2)
  8056  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8057  		v3.AuxInt = -1
  8058  		v1.AddArg(v3)
  8059  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8060  		v4.AuxInt = 32
  8061  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  8062  		v5.AddArg(y)
  8063  		v4.AddArg(v5)
  8064  		v1.AddArg(v4)
  8065  		v.AddArg(v1)
  8066  		return true
  8067  	}
  8068  }
  8069  func rewriteValueMIPS_OpRsh8x32_0(v *Value) bool {
  8070  	b := v.Block
  8071  	_ = b
  8072  	typ := &b.Func.Config.Types
  8073  	_ = typ
  8074  	// match: (Rsh8x32 x y)
  8075  	// cond:
  8076  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  8077  	for {
  8078  		_ = v.Args[1]
  8079  		x := v.Args[0]
  8080  		y := v.Args[1]
  8081  		v.reset(OpMIPSSRA)
  8082  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  8083  		v0.AddArg(x)
  8084  		v.AddArg(v0)
  8085  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  8086  		v1.AddArg(y)
  8087  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8088  		v2.AuxInt = -1
  8089  		v1.AddArg(v2)
  8090  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8091  		v3.AuxInt = 32
  8092  		v3.AddArg(y)
  8093  		v1.AddArg(v3)
  8094  		v.AddArg(v1)
  8095  		return true
  8096  	}
  8097  }
  8098  func rewriteValueMIPS_OpRsh8x64_0(v *Value) bool {
  8099  	b := v.Block
  8100  	_ = b
  8101  	typ := &b.Func.Config.Types
  8102  	_ = typ
  8103  	// match: (Rsh8x64 x (Const64 [c]))
  8104  	// cond: uint32(c) < 8
  8105  	// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24])
  8106  	for {
  8107  		_ = v.Args[1]
  8108  		x := v.Args[0]
  8109  		v_1 := v.Args[1]
  8110  		if v_1.Op != OpConst64 {
  8111  			break
  8112  		}
  8113  		c := v_1.AuxInt
  8114  		if !(uint32(c) < 8) {
  8115  			break
  8116  		}
  8117  		v.reset(OpMIPSSRAconst)
  8118  		v.AuxInt = c + 24
  8119  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  8120  		v0.AuxInt = 24
  8121  		v0.AddArg(x)
  8122  		v.AddArg(v0)
  8123  		return true
  8124  	}
  8125  	// match: (Rsh8x64 x (Const64 [c]))
  8126  	// cond: uint32(c) >= 8
  8127  	// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [31])
  8128  	for {
  8129  		_ = v.Args[1]
  8130  		x := v.Args[0]
  8131  		v_1 := v.Args[1]
  8132  		if v_1.Op != OpConst64 {
  8133  			break
  8134  		}
  8135  		c := v_1.AuxInt
  8136  		if !(uint32(c) >= 8) {
  8137  			break
  8138  		}
  8139  		v.reset(OpMIPSSRAconst)
  8140  		v.AuxInt = 31
  8141  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  8142  		v0.AuxInt = 24
  8143  		v0.AddArg(x)
  8144  		v.AddArg(v0)
  8145  		return true
  8146  	}
  8147  	return false
  8148  }
  8149  func rewriteValueMIPS_OpRsh8x8_0(v *Value) bool {
  8150  	b := v.Block
  8151  	_ = b
  8152  	typ := &b.Func.Config.Types
  8153  	_ = typ
  8154  	// match: (Rsh8x8 x y)
  8155  	// cond:
  8156  	// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  8157  	for {
  8158  		_ = v.Args[1]
  8159  		x := v.Args[0]
  8160  		y := v.Args[1]
  8161  		v.reset(OpMIPSSRA)
  8162  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  8163  		v0.AddArg(x)
  8164  		v.AddArg(v0)
  8165  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  8166  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8167  		v2.AddArg(y)
  8168  		v1.AddArg(v2)
  8169  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8170  		v3.AuxInt = -1
  8171  		v1.AddArg(v3)
  8172  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8173  		v4.AuxInt = 32
  8174  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8175  		v5.AddArg(y)
  8176  		v4.AddArg(v5)
  8177  		v1.AddArg(v4)
  8178  		v.AddArg(v1)
  8179  		return true
  8180  	}
  8181  }
  8182  func rewriteValueMIPS_OpSelect0_0(v *Value) bool {
  8183  	b := v.Block
  8184  	_ = b
  8185  	typ := &b.Func.Config.Types
  8186  	_ = typ
  8187  	// match: (Select0 (Add32carry <t> x y))
  8188  	// cond:
  8189  	// result: (ADD <t.FieldType(0)> x y)
  8190  	for {
  8191  		v_0 := v.Args[0]
  8192  		if v_0.Op != OpAdd32carry {
  8193  			break
  8194  		}
  8195  		t := v_0.Type
  8196  		_ = v_0.Args[1]
  8197  		x := v_0.Args[0]
  8198  		y := v_0.Args[1]
  8199  		v.reset(OpMIPSADD)
  8200  		v.Type = t.FieldType(0)
  8201  		v.AddArg(x)
  8202  		v.AddArg(y)
  8203  		return true
  8204  	}
  8205  	// match: (Select0 (Sub32carry <t> x y))
  8206  	// cond:
  8207  	// result: (SUB <t.FieldType(0)> x y)
  8208  	for {
  8209  		v_0 := v.Args[0]
  8210  		if v_0.Op != OpSub32carry {
  8211  			break
  8212  		}
  8213  		t := v_0.Type
  8214  		_ = v_0.Args[1]
  8215  		x := v_0.Args[0]
  8216  		y := v_0.Args[1]
  8217  		v.reset(OpMIPSSUB)
  8218  		v.Type = t.FieldType(0)
  8219  		v.AddArg(x)
  8220  		v.AddArg(y)
  8221  		return true
  8222  	}
  8223  	// match: (Select0 (MULTU (MOVWconst [0]) _))
  8224  	// cond:
  8225  	// result: (MOVWconst [0])
  8226  	for {
  8227  		v_0 := v.Args[0]
  8228  		if v_0.Op != OpMIPSMULTU {
  8229  			break
  8230  		}
  8231  		_ = v_0.Args[1]
  8232  		v_0_0 := v_0.Args[0]
  8233  		if v_0_0.Op != OpMIPSMOVWconst {
  8234  			break
  8235  		}
  8236  		if v_0_0.AuxInt != 0 {
  8237  			break
  8238  		}
  8239  		v.reset(OpMIPSMOVWconst)
  8240  		v.AuxInt = 0
  8241  		return true
  8242  	}
  8243  	// match: (Select0 (MULTU _ (MOVWconst [0])))
  8244  	// cond:
  8245  	// result: (MOVWconst [0])
  8246  	for {
  8247  		v_0 := v.Args[0]
  8248  		if v_0.Op != OpMIPSMULTU {
  8249  			break
  8250  		}
  8251  		_ = v_0.Args[1]
  8252  		v_0_1 := v_0.Args[1]
  8253  		if v_0_1.Op != OpMIPSMOVWconst {
  8254  			break
  8255  		}
  8256  		if v_0_1.AuxInt != 0 {
  8257  			break
  8258  		}
  8259  		v.reset(OpMIPSMOVWconst)
  8260  		v.AuxInt = 0
  8261  		return true
  8262  	}
  8263  	// match: (Select0 (MULTU (MOVWconst [1]) _))
  8264  	// cond:
  8265  	// result: (MOVWconst [0])
  8266  	for {
  8267  		v_0 := v.Args[0]
  8268  		if v_0.Op != OpMIPSMULTU {
  8269  			break
  8270  		}
  8271  		_ = v_0.Args[1]
  8272  		v_0_0 := v_0.Args[0]
  8273  		if v_0_0.Op != OpMIPSMOVWconst {
  8274  			break
  8275  		}
  8276  		if v_0_0.AuxInt != 1 {
  8277  			break
  8278  		}
  8279  		v.reset(OpMIPSMOVWconst)
  8280  		v.AuxInt = 0
  8281  		return true
  8282  	}
  8283  	// match: (Select0 (MULTU _ (MOVWconst [1])))
  8284  	// cond:
  8285  	// result: (MOVWconst [0])
  8286  	for {
  8287  		v_0 := v.Args[0]
  8288  		if v_0.Op != OpMIPSMULTU {
  8289  			break
  8290  		}
  8291  		_ = v_0.Args[1]
  8292  		v_0_1 := v_0.Args[1]
  8293  		if v_0_1.Op != OpMIPSMOVWconst {
  8294  			break
  8295  		}
  8296  		if v_0_1.AuxInt != 1 {
  8297  			break
  8298  		}
  8299  		v.reset(OpMIPSMOVWconst)
  8300  		v.AuxInt = 0
  8301  		return true
  8302  	}
  8303  	// match: (Select0 (MULTU (MOVWconst [-1]) x))
  8304  	// cond:
  8305  	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  8306  	for {
  8307  		v_0 := v.Args[0]
  8308  		if v_0.Op != OpMIPSMULTU {
  8309  			break
  8310  		}
  8311  		_ = v_0.Args[1]
  8312  		v_0_0 := v_0.Args[0]
  8313  		if v_0_0.Op != OpMIPSMOVWconst {
  8314  			break
  8315  		}
  8316  		if v_0_0.AuxInt != -1 {
  8317  			break
  8318  		}
  8319  		x := v_0.Args[1]
  8320  		v.reset(OpMIPSCMOVZ)
  8321  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  8322  		v0.AuxInt = -1
  8323  		v0.AddArg(x)
  8324  		v.AddArg(v0)
  8325  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8326  		v1.AuxInt = 0
  8327  		v.AddArg(v1)
  8328  		v.AddArg(x)
  8329  		return true
  8330  	}
  8331  	// match: (Select0 (MULTU x (MOVWconst [-1])))
  8332  	// cond:
  8333  	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  8334  	for {
  8335  		v_0 := v.Args[0]
  8336  		if v_0.Op != OpMIPSMULTU {
  8337  			break
  8338  		}
  8339  		_ = v_0.Args[1]
  8340  		x := v_0.Args[0]
  8341  		v_0_1 := v_0.Args[1]
  8342  		if v_0_1.Op != OpMIPSMOVWconst {
  8343  			break
  8344  		}
  8345  		if v_0_1.AuxInt != -1 {
  8346  			break
  8347  		}
  8348  		v.reset(OpMIPSCMOVZ)
  8349  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  8350  		v0.AuxInt = -1
  8351  		v0.AddArg(x)
  8352  		v.AddArg(v0)
  8353  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8354  		v1.AuxInt = 0
  8355  		v.AddArg(v1)
  8356  		v.AddArg(x)
  8357  		return true
  8358  	}
  8359  	// match: (Select0 (MULTU (MOVWconst [c]) x))
  8360  	// cond: isPowerOfTwo(int64(uint32(c)))
  8361  	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  8362  	for {
  8363  		v_0 := v.Args[0]
  8364  		if v_0.Op != OpMIPSMULTU {
  8365  			break
  8366  		}
  8367  		_ = v_0.Args[1]
  8368  		v_0_0 := v_0.Args[0]
  8369  		if v_0_0.Op != OpMIPSMOVWconst {
  8370  			break
  8371  		}
  8372  		c := v_0_0.AuxInt
  8373  		x := v_0.Args[1]
  8374  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8375  			break
  8376  		}
  8377  		v.reset(OpMIPSSRLconst)
  8378  		v.AuxInt = 32 - log2(int64(uint32(c)))
  8379  		v.AddArg(x)
  8380  		return true
  8381  	}
  8382  	// match: (Select0 (MULTU x (MOVWconst [c])))
  8383  	// cond: isPowerOfTwo(int64(uint32(c)))
  8384  	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  8385  	for {
  8386  		v_0 := v.Args[0]
  8387  		if v_0.Op != OpMIPSMULTU {
  8388  			break
  8389  		}
  8390  		_ = v_0.Args[1]
  8391  		x := v_0.Args[0]
  8392  		v_0_1 := v_0.Args[1]
  8393  		if v_0_1.Op != OpMIPSMOVWconst {
  8394  			break
  8395  		}
  8396  		c := v_0_1.AuxInt
  8397  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8398  			break
  8399  		}
  8400  		v.reset(OpMIPSSRLconst)
  8401  		v.AuxInt = 32 - log2(int64(uint32(c)))
  8402  		v.AddArg(x)
  8403  		return true
  8404  	}
  8405  	return false
  8406  }
  8407  func rewriteValueMIPS_OpSelect0_10(v *Value) bool {
  8408  	// match: (Select0 (MULTU (MOVWconst [c]) (MOVWconst [d])))
  8409  	// cond:
  8410  	// result: (MOVWconst [(c*d)>>32])
  8411  	for {
  8412  		v_0 := v.Args[0]
  8413  		if v_0.Op != OpMIPSMULTU {
  8414  			break
  8415  		}
  8416  		_ = v_0.Args[1]
  8417  		v_0_0 := v_0.Args[0]
  8418  		if v_0_0.Op != OpMIPSMOVWconst {
  8419  			break
  8420  		}
  8421  		c := v_0_0.AuxInt
  8422  		v_0_1 := v_0.Args[1]
  8423  		if v_0_1.Op != OpMIPSMOVWconst {
  8424  			break
  8425  		}
  8426  		d := v_0_1.AuxInt
  8427  		v.reset(OpMIPSMOVWconst)
  8428  		v.AuxInt = (c * d) >> 32
  8429  		return true
  8430  	}
  8431  	// match: (Select0 (MULTU (MOVWconst [d]) (MOVWconst [c])))
  8432  	// cond:
  8433  	// result: (MOVWconst [(c*d)>>32])
  8434  	for {
  8435  		v_0 := v.Args[0]
  8436  		if v_0.Op != OpMIPSMULTU {
  8437  			break
  8438  		}
  8439  		_ = v_0.Args[1]
  8440  		v_0_0 := v_0.Args[0]
  8441  		if v_0_0.Op != OpMIPSMOVWconst {
  8442  			break
  8443  		}
  8444  		d := v_0_0.AuxInt
  8445  		v_0_1 := v_0.Args[1]
  8446  		if v_0_1.Op != OpMIPSMOVWconst {
  8447  			break
  8448  		}
  8449  		c := v_0_1.AuxInt
  8450  		v.reset(OpMIPSMOVWconst)
  8451  		v.AuxInt = (c * d) >> 32
  8452  		return true
  8453  	}
  8454  	// match: (Select0 (DIV (MOVWconst [c]) (MOVWconst [d])))
  8455  	// cond:
  8456  	// result: (MOVWconst [int64(int32(c)%int32(d))])
  8457  	for {
  8458  		v_0 := v.Args[0]
  8459  		if v_0.Op != OpMIPSDIV {
  8460  			break
  8461  		}
  8462  		_ = v_0.Args[1]
  8463  		v_0_0 := v_0.Args[0]
  8464  		if v_0_0.Op != OpMIPSMOVWconst {
  8465  			break
  8466  		}
  8467  		c := v_0_0.AuxInt
  8468  		v_0_1 := v_0.Args[1]
  8469  		if v_0_1.Op != OpMIPSMOVWconst {
  8470  			break
  8471  		}
  8472  		d := v_0_1.AuxInt
  8473  		v.reset(OpMIPSMOVWconst)
  8474  		v.AuxInt = int64(int32(c) % int32(d))
  8475  		return true
  8476  	}
  8477  	// match: (Select0 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8478  	// cond:
  8479  	// result: (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
  8480  	for {
  8481  		v_0 := v.Args[0]
  8482  		if v_0.Op != OpMIPSDIVU {
  8483  			break
  8484  		}
  8485  		_ = v_0.Args[1]
  8486  		v_0_0 := v_0.Args[0]
  8487  		if v_0_0.Op != OpMIPSMOVWconst {
  8488  			break
  8489  		}
  8490  		c := v_0_0.AuxInt
  8491  		v_0_1 := v_0.Args[1]
  8492  		if v_0_1.Op != OpMIPSMOVWconst {
  8493  			break
  8494  		}
  8495  		d := v_0_1.AuxInt
  8496  		v.reset(OpMIPSMOVWconst)
  8497  		v.AuxInt = int64(int32(uint32(c) % uint32(d)))
  8498  		return true
  8499  	}
  8500  	return false
  8501  }
  8502  func rewriteValueMIPS_OpSelect1_0(v *Value) bool {
  8503  	b := v.Block
  8504  	_ = b
  8505  	typ := &b.Func.Config.Types
  8506  	_ = typ
  8507  	// match: (Select1 (Add32carry <t> x y))
  8508  	// cond:
  8509  	// result: (SGTU <typ.Bool> x (ADD <t.FieldType(0)> x y))
  8510  	for {
  8511  		v_0 := v.Args[0]
  8512  		if v_0.Op != OpAdd32carry {
  8513  			break
  8514  		}
  8515  		t := v_0.Type
  8516  		_ = v_0.Args[1]
  8517  		x := v_0.Args[0]
  8518  		y := v_0.Args[1]
  8519  		v.reset(OpMIPSSGTU)
  8520  		v.Type = typ.Bool
  8521  		v.AddArg(x)
  8522  		v0 := b.NewValue0(v.Pos, OpMIPSADD, t.FieldType(0))
  8523  		v0.AddArg(x)
  8524  		v0.AddArg(y)
  8525  		v.AddArg(v0)
  8526  		return true
  8527  	}
  8528  	// match: (Select1 (Sub32carry <t> x y))
  8529  	// cond:
  8530  	// result: (SGTU <typ.Bool> (SUB <t.FieldType(0)> x y) x)
  8531  	for {
  8532  		v_0 := v.Args[0]
  8533  		if v_0.Op != OpSub32carry {
  8534  			break
  8535  		}
  8536  		t := v_0.Type
  8537  		_ = v_0.Args[1]
  8538  		x := v_0.Args[0]
  8539  		y := v_0.Args[1]
  8540  		v.reset(OpMIPSSGTU)
  8541  		v.Type = typ.Bool
  8542  		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t.FieldType(0))
  8543  		v0.AddArg(x)
  8544  		v0.AddArg(y)
  8545  		v.AddArg(v0)
  8546  		v.AddArg(x)
  8547  		return true
  8548  	}
  8549  	// match: (Select1 (MULTU (MOVWconst [0]) _))
  8550  	// cond:
  8551  	// result: (MOVWconst [0])
  8552  	for {
  8553  		v_0 := v.Args[0]
  8554  		if v_0.Op != OpMIPSMULTU {
  8555  			break
  8556  		}
  8557  		_ = v_0.Args[1]
  8558  		v_0_0 := v_0.Args[0]
  8559  		if v_0_0.Op != OpMIPSMOVWconst {
  8560  			break
  8561  		}
  8562  		if v_0_0.AuxInt != 0 {
  8563  			break
  8564  		}
  8565  		v.reset(OpMIPSMOVWconst)
  8566  		v.AuxInt = 0
  8567  		return true
  8568  	}
  8569  	// match: (Select1 (MULTU _ (MOVWconst [0])))
  8570  	// cond:
  8571  	// result: (MOVWconst [0])
  8572  	for {
  8573  		v_0 := v.Args[0]
  8574  		if v_0.Op != OpMIPSMULTU {
  8575  			break
  8576  		}
  8577  		_ = v_0.Args[1]
  8578  		v_0_1 := v_0.Args[1]
  8579  		if v_0_1.Op != OpMIPSMOVWconst {
  8580  			break
  8581  		}
  8582  		if v_0_1.AuxInt != 0 {
  8583  			break
  8584  		}
  8585  		v.reset(OpMIPSMOVWconst)
  8586  		v.AuxInt = 0
  8587  		return true
  8588  	}
  8589  	// match: (Select1 (MULTU (MOVWconst [1]) x))
  8590  	// cond:
  8591  	// result: x
  8592  	for {
  8593  		v_0 := v.Args[0]
  8594  		if v_0.Op != OpMIPSMULTU {
  8595  			break
  8596  		}
  8597  		_ = v_0.Args[1]
  8598  		v_0_0 := v_0.Args[0]
  8599  		if v_0_0.Op != OpMIPSMOVWconst {
  8600  			break
  8601  		}
  8602  		if v_0_0.AuxInt != 1 {
  8603  			break
  8604  		}
  8605  		x := v_0.Args[1]
  8606  		v.reset(OpCopy)
  8607  		v.Type = x.Type
  8608  		v.AddArg(x)
  8609  		return true
  8610  	}
  8611  	// match: (Select1 (MULTU x (MOVWconst [1])))
  8612  	// cond:
  8613  	// result: x
  8614  	for {
  8615  		v_0 := v.Args[0]
  8616  		if v_0.Op != OpMIPSMULTU {
  8617  			break
  8618  		}
  8619  		_ = v_0.Args[1]
  8620  		x := v_0.Args[0]
  8621  		v_0_1 := v_0.Args[1]
  8622  		if v_0_1.Op != OpMIPSMOVWconst {
  8623  			break
  8624  		}
  8625  		if v_0_1.AuxInt != 1 {
  8626  			break
  8627  		}
  8628  		v.reset(OpCopy)
  8629  		v.Type = x.Type
  8630  		v.AddArg(x)
  8631  		return true
  8632  	}
  8633  	// match: (Select1 (MULTU (MOVWconst [-1]) x))
  8634  	// cond:
  8635  	// result: (NEG <x.Type> x)
  8636  	for {
  8637  		v_0 := v.Args[0]
  8638  		if v_0.Op != OpMIPSMULTU {
  8639  			break
  8640  		}
  8641  		_ = v_0.Args[1]
  8642  		v_0_0 := v_0.Args[0]
  8643  		if v_0_0.Op != OpMIPSMOVWconst {
  8644  			break
  8645  		}
  8646  		if v_0_0.AuxInt != -1 {
  8647  			break
  8648  		}
  8649  		x := v_0.Args[1]
  8650  		v.reset(OpMIPSNEG)
  8651  		v.Type = x.Type
  8652  		v.AddArg(x)
  8653  		return true
  8654  	}
  8655  	// match: (Select1 (MULTU x (MOVWconst [-1])))
  8656  	// cond:
  8657  	// result: (NEG <x.Type> x)
  8658  	for {
  8659  		v_0 := v.Args[0]
  8660  		if v_0.Op != OpMIPSMULTU {
  8661  			break
  8662  		}
  8663  		_ = v_0.Args[1]
  8664  		x := v_0.Args[0]
  8665  		v_0_1 := v_0.Args[1]
  8666  		if v_0_1.Op != OpMIPSMOVWconst {
  8667  			break
  8668  		}
  8669  		if v_0_1.AuxInt != -1 {
  8670  			break
  8671  		}
  8672  		v.reset(OpMIPSNEG)
  8673  		v.Type = x.Type
  8674  		v.AddArg(x)
  8675  		return true
  8676  	}
  8677  	// match: (Select1 (MULTU (MOVWconst [c]) x))
  8678  	// cond: isPowerOfTwo(int64(uint32(c)))
  8679  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  8680  	for {
  8681  		v_0 := v.Args[0]
  8682  		if v_0.Op != OpMIPSMULTU {
  8683  			break
  8684  		}
  8685  		_ = v_0.Args[1]
  8686  		v_0_0 := v_0.Args[0]
  8687  		if v_0_0.Op != OpMIPSMOVWconst {
  8688  			break
  8689  		}
  8690  		c := v_0_0.AuxInt
  8691  		x := v_0.Args[1]
  8692  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8693  			break
  8694  		}
  8695  		v.reset(OpMIPSSLLconst)
  8696  		v.AuxInt = log2(int64(uint32(c)))
  8697  		v.AddArg(x)
  8698  		return true
  8699  	}
  8700  	// match: (Select1 (MULTU x (MOVWconst [c])))
  8701  	// cond: isPowerOfTwo(int64(uint32(c)))
  8702  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  8703  	for {
  8704  		v_0 := v.Args[0]
  8705  		if v_0.Op != OpMIPSMULTU {
  8706  			break
  8707  		}
  8708  		_ = v_0.Args[1]
  8709  		x := v_0.Args[0]
  8710  		v_0_1 := v_0.Args[1]
  8711  		if v_0_1.Op != OpMIPSMOVWconst {
  8712  			break
  8713  		}
  8714  		c := v_0_1.AuxInt
  8715  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8716  			break
  8717  		}
  8718  		v.reset(OpMIPSSLLconst)
  8719  		v.AuxInt = log2(int64(uint32(c)))
  8720  		v.AddArg(x)
  8721  		return true
  8722  	}
  8723  	return false
  8724  }
  8725  func rewriteValueMIPS_OpSelect1_10(v *Value) bool {
  8726  	// match: (Select1 (MULTU (MOVWconst [c]) (MOVWconst [d])))
  8727  	// cond:
  8728  	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8729  	for {
  8730  		v_0 := v.Args[0]
  8731  		if v_0.Op != OpMIPSMULTU {
  8732  			break
  8733  		}
  8734  		_ = v_0.Args[1]
  8735  		v_0_0 := v_0.Args[0]
  8736  		if v_0_0.Op != OpMIPSMOVWconst {
  8737  			break
  8738  		}
  8739  		c := v_0_0.AuxInt
  8740  		v_0_1 := v_0.Args[1]
  8741  		if v_0_1.Op != OpMIPSMOVWconst {
  8742  			break
  8743  		}
  8744  		d := v_0_1.AuxInt
  8745  		v.reset(OpMIPSMOVWconst)
  8746  		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8747  		return true
  8748  	}
  8749  	// match: (Select1 (MULTU (MOVWconst [d]) (MOVWconst [c])))
  8750  	// cond:
  8751  	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8752  	for {
  8753  		v_0 := v.Args[0]
  8754  		if v_0.Op != OpMIPSMULTU {
  8755  			break
  8756  		}
  8757  		_ = v_0.Args[1]
  8758  		v_0_0 := v_0.Args[0]
  8759  		if v_0_0.Op != OpMIPSMOVWconst {
  8760  			break
  8761  		}
  8762  		d := v_0_0.AuxInt
  8763  		v_0_1 := v_0.Args[1]
  8764  		if v_0_1.Op != OpMIPSMOVWconst {
  8765  			break
  8766  		}
  8767  		c := v_0_1.AuxInt
  8768  		v.reset(OpMIPSMOVWconst)
  8769  		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8770  		return true
  8771  	}
  8772  	// match: (Select1 (DIV (MOVWconst [c]) (MOVWconst [d])))
  8773  	// cond:
  8774  	// result: (MOVWconst [int64(int32(c)/int32(d))])
  8775  	for {
  8776  		v_0 := v.Args[0]
  8777  		if v_0.Op != OpMIPSDIV {
  8778  			break
  8779  		}
  8780  		_ = v_0.Args[1]
  8781  		v_0_0 := v_0.Args[0]
  8782  		if v_0_0.Op != OpMIPSMOVWconst {
  8783  			break
  8784  		}
  8785  		c := v_0_0.AuxInt
  8786  		v_0_1 := v_0.Args[1]
  8787  		if v_0_1.Op != OpMIPSMOVWconst {
  8788  			break
  8789  		}
  8790  		d := v_0_1.AuxInt
  8791  		v.reset(OpMIPSMOVWconst)
  8792  		v.AuxInt = int64(int32(c) / int32(d))
  8793  		return true
  8794  	}
  8795  	// match: (Select1 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8796  	// cond:
  8797  	// result: (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
  8798  	for {
  8799  		v_0 := v.Args[0]
  8800  		if v_0.Op != OpMIPSDIVU {
  8801  			break
  8802  		}
  8803  		_ = v_0.Args[1]
  8804  		v_0_0 := v_0.Args[0]
  8805  		if v_0_0.Op != OpMIPSMOVWconst {
  8806  			break
  8807  		}
  8808  		c := v_0_0.AuxInt
  8809  		v_0_1 := v_0.Args[1]
  8810  		if v_0_1.Op != OpMIPSMOVWconst {
  8811  			break
  8812  		}
  8813  		d := v_0_1.AuxInt
  8814  		v.reset(OpMIPSMOVWconst)
  8815  		v.AuxInt = int64(int32(uint32(c) / uint32(d)))
  8816  		return true
  8817  	}
  8818  	return false
  8819  }
  8820  func rewriteValueMIPS_OpSignExt16to32_0(v *Value) bool {
  8821  	// match: (SignExt16to32 x)
  8822  	// cond:
  8823  	// result: (MOVHreg x)
  8824  	for {
  8825  		x := v.Args[0]
  8826  		v.reset(OpMIPSMOVHreg)
  8827  		v.AddArg(x)
  8828  		return true
  8829  	}
  8830  }
  8831  func rewriteValueMIPS_OpSignExt8to16_0(v *Value) bool {
  8832  	// match: (SignExt8to16 x)
  8833  	// cond:
  8834  	// result: (MOVBreg x)
  8835  	for {
  8836  		x := v.Args[0]
  8837  		v.reset(OpMIPSMOVBreg)
  8838  		v.AddArg(x)
  8839  		return true
  8840  	}
  8841  }
  8842  func rewriteValueMIPS_OpSignExt8to32_0(v *Value) bool {
  8843  	// match: (SignExt8to32 x)
  8844  	// cond:
  8845  	// result: (MOVBreg x)
  8846  	for {
  8847  		x := v.Args[0]
  8848  		v.reset(OpMIPSMOVBreg)
  8849  		v.AddArg(x)
  8850  		return true
  8851  	}
  8852  }
  8853  func rewriteValueMIPS_OpSignmask_0(v *Value) bool {
  8854  	// match: (Signmask x)
  8855  	// cond:
  8856  	// result: (SRAconst x [31])
  8857  	for {
  8858  		x := v.Args[0]
  8859  		v.reset(OpMIPSSRAconst)
  8860  		v.AuxInt = 31
  8861  		v.AddArg(x)
  8862  		return true
  8863  	}
  8864  }
  8865  func rewriteValueMIPS_OpSlicemask_0(v *Value) bool {
  8866  	b := v.Block
  8867  	_ = b
  8868  	// match: (Slicemask <t> x)
  8869  	// cond:
  8870  	// result: (SRAconst (NEG <t> x) [31])
  8871  	for {
  8872  		t := v.Type
  8873  		x := v.Args[0]
  8874  		v.reset(OpMIPSSRAconst)
  8875  		v.AuxInt = 31
  8876  		v0 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  8877  		v0.AddArg(x)
  8878  		v.AddArg(v0)
  8879  		return true
  8880  	}
  8881  }
  8882  func rewriteValueMIPS_OpSqrt_0(v *Value) bool {
  8883  	// match: (Sqrt x)
  8884  	// cond:
  8885  	// result: (SQRTD x)
  8886  	for {
  8887  		x := v.Args[0]
  8888  		v.reset(OpMIPSSQRTD)
  8889  		v.AddArg(x)
  8890  		return true
  8891  	}
  8892  }
  8893  func rewriteValueMIPS_OpStaticCall_0(v *Value) bool {
  8894  	// match: (StaticCall [argwid] {target} mem)
  8895  	// cond:
  8896  	// result: (CALLstatic [argwid] {target} mem)
  8897  	for {
  8898  		argwid := v.AuxInt
  8899  		target := v.Aux
  8900  		mem := v.Args[0]
  8901  		v.reset(OpMIPSCALLstatic)
  8902  		v.AuxInt = argwid
  8903  		v.Aux = target
  8904  		v.AddArg(mem)
  8905  		return true
  8906  	}
  8907  }
  8908  func rewriteValueMIPS_OpStore_0(v *Value) bool {
  8909  	// match: (Store {t} ptr val mem)
  8910  	// cond: t.(*types.Type).Size() == 1
  8911  	// result: (MOVBstore ptr val mem)
  8912  	for {
  8913  		t := v.Aux
  8914  		_ = v.Args[2]
  8915  		ptr := v.Args[0]
  8916  		val := v.Args[1]
  8917  		mem := v.Args[2]
  8918  		if !(t.(*types.Type).Size() == 1) {
  8919  			break
  8920  		}
  8921  		v.reset(OpMIPSMOVBstore)
  8922  		v.AddArg(ptr)
  8923  		v.AddArg(val)
  8924  		v.AddArg(mem)
  8925  		return true
  8926  	}
  8927  	// match: (Store {t} ptr val mem)
  8928  	// cond: t.(*types.Type).Size() == 2
  8929  	// result: (MOVHstore ptr val mem)
  8930  	for {
  8931  		t := v.Aux
  8932  		_ = v.Args[2]
  8933  		ptr := v.Args[0]
  8934  		val := v.Args[1]
  8935  		mem := v.Args[2]
  8936  		if !(t.(*types.Type).Size() == 2) {
  8937  			break
  8938  		}
  8939  		v.reset(OpMIPSMOVHstore)
  8940  		v.AddArg(ptr)
  8941  		v.AddArg(val)
  8942  		v.AddArg(mem)
  8943  		return true
  8944  	}
  8945  	// match: (Store {t} ptr val mem)
  8946  	// cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)
  8947  	// result: (MOVWstore ptr val mem)
  8948  	for {
  8949  		t := v.Aux
  8950  		_ = v.Args[2]
  8951  		ptr := v.Args[0]
  8952  		val := v.Args[1]
  8953  		mem := v.Args[2]
  8954  		if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) {
  8955  			break
  8956  		}
  8957  		v.reset(OpMIPSMOVWstore)
  8958  		v.AddArg(ptr)
  8959  		v.AddArg(val)
  8960  		v.AddArg(mem)
  8961  		return true
  8962  	}
  8963  	// match: (Store {t} ptr val mem)
  8964  	// cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)
  8965  	// result: (MOVFstore ptr val mem)
  8966  	for {
  8967  		t := v.Aux
  8968  		_ = v.Args[2]
  8969  		ptr := v.Args[0]
  8970  		val := v.Args[1]
  8971  		mem := v.Args[2]
  8972  		if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) {
  8973  			break
  8974  		}
  8975  		v.reset(OpMIPSMOVFstore)
  8976  		v.AddArg(ptr)
  8977  		v.AddArg(val)
  8978  		v.AddArg(mem)
  8979  		return true
  8980  	}
  8981  	// match: (Store {t} ptr val mem)
  8982  	// cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)
  8983  	// result: (MOVDstore ptr val mem)
  8984  	for {
  8985  		t := v.Aux
  8986  		_ = v.Args[2]
  8987  		ptr := v.Args[0]
  8988  		val := v.Args[1]
  8989  		mem := v.Args[2]
  8990  		if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) {
  8991  			break
  8992  		}
  8993  		v.reset(OpMIPSMOVDstore)
  8994  		v.AddArg(ptr)
  8995  		v.AddArg(val)
  8996  		v.AddArg(mem)
  8997  		return true
  8998  	}
  8999  	return false
  9000  }
  9001  func rewriteValueMIPS_OpSub16_0(v *Value) bool {
  9002  	// match: (Sub16 x y)
  9003  	// cond:
  9004  	// result: (SUB x y)
  9005  	for {
  9006  		_ = v.Args[1]
  9007  		x := v.Args[0]
  9008  		y := v.Args[1]
  9009  		v.reset(OpMIPSSUB)
  9010  		v.AddArg(x)
  9011  		v.AddArg(y)
  9012  		return true
  9013  	}
  9014  }
  9015  func rewriteValueMIPS_OpSub32_0(v *Value) bool {
  9016  	// match: (Sub32 x y)
  9017  	// cond:
  9018  	// result: (SUB x y)
  9019  	for {
  9020  		_ = v.Args[1]
  9021  		x := v.Args[0]
  9022  		y := v.Args[1]
  9023  		v.reset(OpMIPSSUB)
  9024  		v.AddArg(x)
  9025  		v.AddArg(y)
  9026  		return true
  9027  	}
  9028  }
  9029  func rewriteValueMIPS_OpSub32F_0(v *Value) bool {
  9030  	// match: (Sub32F x y)
  9031  	// cond:
  9032  	// result: (SUBF x y)
  9033  	for {
  9034  		_ = v.Args[1]
  9035  		x := v.Args[0]
  9036  		y := v.Args[1]
  9037  		v.reset(OpMIPSSUBF)
  9038  		v.AddArg(x)
  9039  		v.AddArg(y)
  9040  		return true
  9041  	}
  9042  }
  9043  func rewriteValueMIPS_OpSub32withcarry_0(v *Value) bool {
  9044  	b := v.Block
  9045  	_ = b
  9046  	// match: (Sub32withcarry <t> x y c)
  9047  	// cond:
  9048  	// result: (SUB (SUB <t> x y) c)
  9049  	for {
  9050  		t := v.Type
  9051  		_ = v.Args[2]
  9052  		x := v.Args[0]
  9053  		y := v.Args[1]
  9054  		c := v.Args[2]
  9055  		v.reset(OpMIPSSUB)
  9056  		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t)
  9057  		v0.AddArg(x)
  9058  		v0.AddArg(y)
  9059  		v.AddArg(v0)
  9060  		v.AddArg(c)
  9061  		return true
  9062  	}
  9063  }
  9064  func rewriteValueMIPS_OpSub64F_0(v *Value) bool {
  9065  	// match: (Sub64F x y)
  9066  	// cond:
  9067  	// result: (SUBD x y)
  9068  	for {
  9069  		_ = v.Args[1]
  9070  		x := v.Args[0]
  9071  		y := v.Args[1]
  9072  		v.reset(OpMIPSSUBD)
  9073  		v.AddArg(x)
  9074  		v.AddArg(y)
  9075  		return true
  9076  	}
  9077  }
  9078  func rewriteValueMIPS_OpSub8_0(v *Value) bool {
  9079  	// match: (Sub8 x y)
  9080  	// cond:
  9081  	// result: (SUB x y)
  9082  	for {
  9083  		_ = v.Args[1]
  9084  		x := v.Args[0]
  9085  		y := v.Args[1]
  9086  		v.reset(OpMIPSSUB)
  9087  		v.AddArg(x)
  9088  		v.AddArg(y)
  9089  		return true
  9090  	}
  9091  }
  9092  func rewriteValueMIPS_OpSubPtr_0(v *Value) bool {
  9093  	// match: (SubPtr x y)
  9094  	// cond:
  9095  	// result: (SUB x y)
  9096  	for {
  9097  		_ = v.Args[1]
  9098  		x := v.Args[0]
  9099  		y := v.Args[1]
  9100  		v.reset(OpMIPSSUB)
  9101  		v.AddArg(x)
  9102  		v.AddArg(y)
  9103  		return true
  9104  	}
  9105  }
  9106  func rewriteValueMIPS_OpTrunc16to8_0(v *Value) bool {
  9107  	// match: (Trunc16to8 x)
  9108  	// cond:
  9109  	// result: x
  9110  	for {
  9111  		x := v.Args[0]
  9112  		v.reset(OpCopy)
  9113  		v.Type = x.Type
  9114  		v.AddArg(x)
  9115  		return true
  9116  	}
  9117  }
  9118  func rewriteValueMIPS_OpTrunc32to16_0(v *Value) bool {
  9119  	// match: (Trunc32to16 x)
  9120  	// cond:
  9121  	// result: x
  9122  	for {
  9123  		x := v.Args[0]
  9124  		v.reset(OpCopy)
  9125  		v.Type = x.Type
  9126  		v.AddArg(x)
  9127  		return true
  9128  	}
  9129  }
  9130  func rewriteValueMIPS_OpTrunc32to8_0(v *Value) bool {
  9131  	// match: (Trunc32to8 x)
  9132  	// cond:
  9133  	// result: x
  9134  	for {
  9135  		x := v.Args[0]
  9136  		v.reset(OpCopy)
  9137  		v.Type = x.Type
  9138  		v.AddArg(x)
  9139  		return true
  9140  	}
  9141  }
  9142  func rewriteValueMIPS_OpWB_0(v *Value) bool {
  9143  	// match: (WB {fn} destptr srcptr mem)
  9144  	// cond:
  9145  	// result: (LoweredWB {fn} destptr srcptr mem)
  9146  	for {
  9147  		fn := v.Aux
  9148  		_ = v.Args[2]
  9149  		destptr := v.Args[0]
  9150  		srcptr := v.Args[1]
  9151  		mem := v.Args[2]
  9152  		v.reset(OpMIPSLoweredWB)
  9153  		v.Aux = fn
  9154  		v.AddArg(destptr)
  9155  		v.AddArg(srcptr)
  9156  		v.AddArg(mem)
  9157  		return true
  9158  	}
  9159  }
  9160  func rewriteValueMIPS_OpXor16_0(v *Value) bool {
  9161  	// match: (Xor16 x y)
  9162  	// cond:
  9163  	// result: (XOR x y)
  9164  	for {
  9165  		_ = v.Args[1]
  9166  		x := v.Args[0]
  9167  		y := v.Args[1]
  9168  		v.reset(OpMIPSXOR)
  9169  		v.AddArg(x)
  9170  		v.AddArg(y)
  9171  		return true
  9172  	}
  9173  }
  9174  func rewriteValueMIPS_OpXor32_0(v *Value) bool {
  9175  	// match: (Xor32 x y)
  9176  	// cond:
  9177  	// result: (XOR x y)
  9178  	for {
  9179  		_ = v.Args[1]
  9180  		x := v.Args[0]
  9181  		y := v.Args[1]
  9182  		v.reset(OpMIPSXOR)
  9183  		v.AddArg(x)
  9184  		v.AddArg(y)
  9185  		return true
  9186  	}
  9187  }
  9188  func rewriteValueMIPS_OpXor8_0(v *Value) bool {
  9189  	// match: (Xor8 x y)
  9190  	// cond:
  9191  	// result: (XOR x y)
  9192  	for {
  9193  		_ = v.Args[1]
  9194  		x := v.Args[0]
  9195  		y := v.Args[1]
  9196  		v.reset(OpMIPSXOR)
  9197  		v.AddArg(x)
  9198  		v.AddArg(y)
  9199  		return true
  9200  	}
  9201  }
  9202  func rewriteValueMIPS_OpZero_0(v *Value) bool {
  9203  	b := v.Block
  9204  	_ = b
  9205  	typ := &b.Func.Config.Types
  9206  	_ = typ
  9207  	// match: (Zero [0] _ mem)
  9208  	// cond:
  9209  	// result: mem
  9210  	for {
  9211  		if v.AuxInt != 0 {
  9212  			break
  9213  		}
  9214  		_ = v.Args[1]
  9215  		mem := v.Args[1]
  9216  		v.reset(OpCopy)
  9217  		v.Type = mem.Type
  9218  		v.AddArg(mem)
  9219  		return true
  9220  	}
  9221  	// match: (Zero [1] ptr mem)
  9222  	// cond:
  9223  	// result: (MOVBstore ptr (MOVWconst [0]) mem)
  9224  	for {
  9225  		if v.AuxInt != 1 {
  9226  			break
  9227  		}
  9228  		_ = v.Args[1]
  9229  		ptr := v.Args[0]
  9230  		mem := v.Args[1]
  9231  		v.reset(OpMIPSMOVBstore)
  9232  		v.AddArg(ptr)
  9233  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9234  		v0.AuxInt = 0
  9235  		v.AddArg(v0)
  9236  		v.AddArg(mem)
  9237  		return true
  9238  	}
  9239  	// match: (Zero [2] {t} ptr mem)
  9240  	// cond: t.(*types.Type).Alignment()%2 == 0
  9241  	// result: (MOVHstore ptr (MOVWconst [0]) mem)
  9242  	for {
  9243  		if v.AuxInt != 2 {
  9244  			break
  9245  		}
  9246  		t := v.Aux
  9247  		_ = v.Args[1]
  9248  		ptr := v.Args[0]
  9249  		mem := v.Args[1]
  9250  		if !(t.(*types.Type).Alignment()%2 == 0) {
  9251  			break
  9252  		}
  9253  		v.reset(OpMIPSMOVHstore)
  9254  		v.AddArg(ptr)
  9255  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9256  		v0.AuxInt = 0
  9257  		v.AddArg(v0)
  9258  		v.AddArg(mem)
  9259  		return true
  9260  	}
  9261  	// match: (Zero [2] ptr mem)
  9262  	// cond:
  9263  	// result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))
  9264  	for {
  9265  		if v.AuxInt != 2 {
  9266  			break
  9267  		}
  9268  		_ = v.Args[1]
  9269  		ptr := v.Args[0]
  9270  		mem := v.Args[1]
  9271  		v.reset(OpMIPSMOVBstore)
  9272  		v.AuxInt = 1
  9273  		v.AddArg(ptr)
  9274  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9275  		v0.AuxInt = 0
  9276  		v.AddArg(v0)
  9277  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9278  		v1.AuxInt = 0
  9279  		v1.AddArg(ptr)
  9280  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9281  		v2.AuxInt = 0
  9282  		v1.AddArg(v2)
  9283  		v1.AddArg(mem)
  9284  		v.AddArg(v1)
  9285  		return true
  9286  	}
  9287  	// match: (Zero [4] {t} ptr mem)
  9288  	// cond: t.(*types.Type).Alignment()%4 == 0
  9289  	// result: (MOVWstore ptr (MOVWconst [0]) mem)
  9290  	for {
  9291  		if v.AuxInt != 4 {
  9292  			break
  9293  		}
  9294  		t := v.Aux
  9295  		_ = v.Args[1]
  9296  		ptr := v.Args[0]
  9297  		mem := v.Args[1]
  9298  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9299  			break
  9300  		}
  9301  		v.reset(OpMIPSMOVWstore)
  9302  		v.AddArg(ptr)
  9303  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9304  		v0.AuxInt = 0
  9305  		v.AddArg(v0)
  9306  		v.AddArg(mem)
  9307  		return true
  9308  	}
  9309  	// match: (Zero [4] {t} ptr mem)
  9310  	// cond: t.(*types.Type).Alignment()%2 == 0
  9311  	// result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))
  9312  	for {
  9313  		if v.AuxInt != 4 {
  9314  			break
  9315  		}
  9316  		t := v.Aux
  9317  		_ = v.Args[1]
  9318  		ptr := v.Args[0]
  9319  		mem := v.Args[1]
  9320  		if !(t.(*types.Type).Alignment()%2 == 0) {
  9321  			break
  9322  		}
  9323  		v.reset(OpMIPSMOVHstore)
  9324  		v.AuxInt = 2
  9325  		v.AddArg(ptr)
  9326  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9327  		v0.AuxInt = 0
  9328  		v.AddArg(v0)
  9329  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9330  		v1.AuxInt = 0
  9331  		v1.AddArg(ptr)
  9332  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9333  		v2.AuxInt = 0
  9334  		v1.AddArg(v2)
  9335  		v1.AddArg(mem)
  9336  		v.AddArg(v1)
  9337  		return true
  9338  	}
  9339  	// match: (Zero [4] ptr mem)
  9340  	// cond:
  9341  	// result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
  9342  	for {
  9343  		if v.AuxInt != 4 {
  9344  			break
  9345  		}
  9346  		_ = v.Args[1]
  9347  		ptr := v.Args[0]
  9348  		mem := v.Args[1]
  9349  		v.reset(OpMIPSMOVBstore)
  9350  		v.AuxInt = 3
  9351  		v.AddArg(ptr)
  9352  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9353  		v0.AuxInt = 0
  9354  		v.AddArg(v0)
  9355  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9356  		v1.AuxInt = 2
  9357  		v1.AddArg(ptr)
  9358  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9359  		v2.AuxInt = 0
  9360  		v1.AddArg(v2)
  9361  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9362  		v3.AuxInt = 1
  9363  		v3.AddArg(ptr)
  9364  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9365  		v4.AuxInt = 0
  9366  		v3.AddArg(v4)
  9367  		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9368  		v5.AuxInt = 0
  9369  		v5.AddArg(ptr)
  9370  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9371  		v6.AuxInt = 0
  9372  		v5.AddArg(v6)
  9373  		v5.AddArg(mem)
  9374  		v3.AddArg(v5)
  9375  		v1.AddArg(v3)
  9376  		v.AddArg(v1)
  9377  		return true
  9378  	}
  9379  	// match: (Zero [3] ptr mem)
  9380  	// cond:
  9381  	// result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
  9382  	for {
  9383  		if v.AuxInt != 3 {
  9384  			break
  9385  		}
  9386  		_ = v.Args[1]
  9387  		ptr := v.Args[0]
  9388  		mem := v.Args[1]
  9389  		v.reset(OpMIPSMOVBstore)
  9390  		v.AuxInt = 2
  9391  		v.AddArg(ptr)
  9392  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9393  		v0.AuxInt = 0
  9394  		v.AddArg(v0)
  9395  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9396  		v1.AuxInt = 1
  9397  		v1.AddArg(ptr)
  9398  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9399  		v2.AuxInt = 0
  9400  		v1.AddArg(v2)
  9401  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9402  		v3.AuxInt = 0
  9403  		v3.AddArg(ptr)
  9404  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9405  		v4.AuxInt = 0
  9406  		v3.AddArg(v4)
  9407  		v3.AddArg(mem)
  9408  		v1.AddArg(v3)
  9409  		v.AddArg(v1)
  9410  		return true
  9411  	}
  9412  	// match: (Zero [6] {t} ptr mem)
  9413  	// cond: t.(*types.Type).Alignment()%2 == 0
  9414  	// result: (MOVHstore [4] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem)))
  9415  	for {
  9416  		if v.AuxInt != 6 {
  9417  			break
  9418  		}
  9419  		t := v.Aux
  9420  		_ = v.Args[1]
  9421  		ptr := v.Args[0]
  9422  		mem := v.Args[1]
  9423  		if !(t.(*types.Type).Alignment()%2 == 0) {
  9424  			break
  9425  		}
  9426  		v.reset(OpMIPSMOVHstore)
  9427  		v.AuxInt = 4
  9428  		v.AddArg(ptr)
  9429  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9430  		v0.AuxInt = 0
  9431  		v.AddArg(v0)
  9432  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9433  		v1.AuxInt = 2
  9434  		v1.AddArg(ptr)
  9435  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9436  		v2.AuxInt = 0
  9437  		v1.AddArg(v2)
  9438  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9439  		v3.AuxInt = 0
  9440  		v3.AddArg(ptr)
  9441  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9442  		v4.AuxInt = 0
  9443  		v3.AddArg(v4)
  9444  		v3.AddArg(mem)
  9445  		v1.AddArg(v3)
  9446  		v.AddArg(v1)
  9447  		return true
  9448  	}
  9449  	// match: (Zero [8] {t} ptr mem)
  9450  	// cond: t.(*types.Type).Alignment()%4 == 0
  9451  	// result: (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))
  9452  	for {
  9453  		if v.AuxInt != 8 {
  9454  			break
  9455  		}
  9456  		t := v.Aux
  9457  		_ = v.Args[1]
  9458  		ptr := v.Args[0]
  9459  		mem := v.Args[1]
  9460  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9461  			break
  9462  		}
  9463  		v.reset(OpMIPSMOVWstore)
  9464  		v.AuxInt = 4
  9465  		v.AddArg(ptr)
  9466  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9467  		v0.AuxInt = 0
  9468  		v.AddArg(v0)
  9469  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9470  		v1.AuxInt = 0
  9471  		v1.AddArg(ptr)
  9472  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9473  		v2.AuxInt = 0
  9474  		v1.AddArg(v2)
  9475  		v1.AddArg(mem)
  9476  		v.AddArg(v1)
  9477  		return true
  9478  	}
  9479  	return false
  9480  }
  9481  func rewriteValueMIPS_OpZero_10(v *Value) bool {
  9482  	b := v.Block
  9483  	_ = b
  9484  	config := b.Func.Config
  9485  	_ = config
  9486  	typ := &b.Func.Config.Types
  9487  	_ = typ
  9488  	// match: (Zero [12] {t} ptr mem)
  9489  	// cond: t.(*types.Type).Alignment()%4 == 0
  9490  	// result: (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)))
  9491  	for {
  9492  		if v.AuxInt != 12 {
  9493  			break
  9494  		}
  9495  		t := v.Aux
  9496  		_ = v.Args[1]
  9497  		ptr := v.Args[0]
  9498  		mem := v.Args[1]
  9499  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9500  			break
  9501  		}
  9502  		v.reset(OpMIPSMOVWstore)
  9503  		v.AuxInt = 8
  9504  		v.AddArg(ptr)
  9505  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9506  		v0.AuxInt = 0
  9507  		v.AddArg(v0)
  9508  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9509  		v1.AuxInt = 4
  9510  		v1.AddArg(ptr)
  9511  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9512  		v2.AuxInt = 0
  9513  		v1.AddArg(v2)
  9514  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9515  		v3.AuxInt = 0
  9516  		v3.AddArg(ptr)
  9517  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9518  		v4.AuxInt = 0
  9519  		v3.AddArg(v4)
  9520  		v3.AddArg(mem)
  9521  		v1.AddArg(v3)
  9522  		v.AddArg(v1)
  9523  		return true
  9524  	}
  9525  	// match: (Zero [16] {t} ptr mem)
  9526  	// cond: t.(*types.Type).Alignment()%4 == 0
  9527  	// result: (MOVWstore [12] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))))
  9528  	for {
  9529  		if v.AuxInt != 16 {
  9530  			break
  9531  		}
  9532  		t := v.Aux
  9533  		_ = v.Args[1]
  9534  		ptr := v.Args[0]
  9535  		mem := v.Args[1]
  9536  		if !(t.(*types.Type).Alignment()%4 == 0) {
  9537  			break
  9538  		}
  9539  		v.reset(OpMIPSMOVWstore)
  9540  		v.AuxInt = 12
  9541  		v.AddArg(ptr)
  9542  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9543  		v0.AuxInt = 0
  9544  		v.AddArg(v0)
  9545  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9546  		v1.AuxInt = 8
  9547  		v1.AddArg(ptr)
  9548  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9549  		v2.AuxInt = 0
  9550  		v1.AddArg(v2)
  9551  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9552  		v3.AuxInt = 4
  9553  		v3.AddArg(ptr)
  9554  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9555  		v4.AuxInt = 0
  9556  		v3.AddArg(v4)
  9557  		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9558  		v5.AuxInt = 0
  9559  		v5.AddArg(ptr)
  9560  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9561  		v6.AuxInt = 0
  9562  		v5.AddArg(v6)
  9563  		v5.AddArg(mem)
  9564  		v3.AddArg(v5)
  9565  		v1.AddArg(v3)
  9566  		v.AddArg(v1)
  9567  		return true
  9568  	}
  9569  	// match: (Zero [s] {t} ptr mem)
  9570  	// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
  9571  	// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
  9572  	for {
  9573  		s := v.AuxInt
  9574  		t := v.Aux
  9575  		_ = v.Args[1]
  9576  		ptr := v.Args[0]
  9577  		mem := v.Args[1]
  9578  		if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) {
  9579  			break
  9580  		}
  9581  		v.reset(OpMIPSLoweredZero)
  9582  		v.AuxInt = t.(*types.Type).Alignment()
  9583  		v.AddArg(ptr)
  9584  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type)
  9585  		v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  9586  		v0.AddArg(ptr)
  9587  		v.AddArg(v0)
  9588  		v.AddArg(mem)
  9589  		return true
  9590  	}
  9591  	return false
  9592  }
  9593  func rewriteValueMIPS_OpZeroExt16to32_0(v *Value) bool {
  9594  	// match: (ZeroExt16to32 x)
  9595  	// cond:
  9596  	// result: (MOVHUreg x)
  9597  	for {
  9598  		x := v.Args[0]
  9599  		v.reset(OpMIPSMOVHUreg)
  9600  		v.AddArg(x)
  9601  		return true
  9602  	}
  9603  }
  9604  func rewriteValueMIPS_OpZeroExt8to16_0(v *Value) bool {
  9605  	// match: (ZeroExt8to16 x)
  9606  	// cond:
  9607  	// result: (MOVBUreg x)
  9608  	for {
  9609  		x := v.Args[0]
  9610  		v.reset(OpMIPSMOVBUreg)
  9611  		v.AddArg(x)
  9612  		return true
  9613  	}
  9614  }
  9615  func rewriteValueMIPS_OpZeroExt8to32_0(v *Value) bool {
  9616  	// match: (ZeroExt8to32 x)
  9617  	// cond:
  9618  	// result: (MOVBUreg x)
  9619  	for {
  9620  		x := v.Args[0]
  9621  		v.reset(OpMIPSMOVBUreg)
  9622  		v.AddArg(x)
  9623  		return true
  9624  	}
  9625  }
  9626  func rewriteValueMIPS_OpZeromask_0(v *Value) bool {
  9627  	b := v.Block
  9628  	_ = b
  9629  	typ := &b.Func.Config.Types
  9630  	_ = typ
  9631  	// match: (Zeromask x)
  9632  	// cond:
  9633  	// result: (NEG (SGTU x (MOVWconst [0])))
  9634  	for {
  9635  		x := v.Args[0]
  9636  		v.reset(OpMIPSNEG)
  9637  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  9638  		v0.AddArg(x)
  9639  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9640  		v1.AuxInt = 0
  9641  		v0.AddArg(v1)
  9642  		v.AddArg(v0)
  9643  		return true
  9644  	}
  9645  }
  9646  func rewriteBlockMIPS(b *Block) bool {
  9647  	config := b.Func.Config
  9648  	_ = config
  9649  	fe := b.Func.fe
  9650  	_ = fe
  9651  	typ := &config.Types
  9652  	_ = typ
  9653  	switch b.Kind {
  9654  	case BlockMIPSEQ:
  9655  		// match: (EQ (FPFlagTrue cmp) yes no)
  9656  		// cond:
  9657  		// result: (FPF cmp yes no)
  9658  		for {
  9659  			v := b.Control
  9660  			if v.Op != OpMIPSFPFlagTrue {
  9661  				break
  9662  			}
  9663  			cmp := v.Args[0]
  9664  			b.Kind = BlockMIPSFPF
  9665  			b.SetControl(cmp)
  9666  			b.Aux = nil
  9667  			return true
  9668  		}
  9669  		// match: (EQ (FPFlagFalse cmp) yes no)
  9670  		// cond:
  9671  		// result: (FPT cmp yes no)
  9672  		for {
  9673  			v := b.Control
  9674  			if v.Op != OpMIPSFPFlagFalse {
  9675  				break
  9676  			}
  9677  			cmp := v.Args[0]
  9678  			b.Kind = BlockMIPSFPT
  9679  			b.SetControl(cmp)
  9680  			b.Aux = nil
  9681  			return true
  9682  		}
  9683  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  9684  		// cond:
  9685  		// result: (NE cmp yes no)
  9686  		for {
  9687  			v := b.Control
  9688  			if v.Op != OpMIPSXORconst {
  9689  				break
  9690  			}
  9691  			if v.AuxInt != 1 {
  9692  				break
  9693  			}
  9694  			cmp := v.Args[0]
  9695  			if cmp.Op != OpMIPSSGT {
  9696  				break
  9697  			}
  9698  			_ = cmp.Args[1]
  9699  			b.Kind = BlockMIPSNE
  9700  			b.SetControl(cmp)
  9701  			b.Aux = nil
  9702  			return true
  9703  		}
  9704  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  9705  		// cond:
  9706  		// result: (NE cmp yes no)
  9707  		for {
  9708  			v := b.Control
  9709  			if v.Op != OpMIPSXORconst {
  9710  				break
  9711  			}
  9712  			if v.AuxInt != 1 {
  9713  				break
  9714  			}
  9715  			cmp := v.Args[0]
  9716  			if cmp.Op != OpMIPSSGTU {
  9717  				break
  9718  			}
  9719  			_ = cmp.Args[1]
  9720  			b.Kind = BlockMIPSNE
  9721  			b.SetControl(cmp)
  9722  			b.Aux = nil
  9723  			return true
  9724  		}
  9725  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  9726  		// cond:
  9727  		// result: (NE cmp yes no)
  9728  		for {
  9729  			v := b.Control
  9730  			if v.Op != OpMIPSXORconst {
  9731  				break
  9732  			}
  9733  			if v.AuxInt != 1 {
  9734  				break
  9735  			}
  9736  			cmp := v.Args[0]
  9737  			if cmp.Op != OpMIPSSGTconst {
  9738  				break
  9739  			}
  9740  			b.Kind = BlockMIPSNE
  9741  			b.SetControl(cmp)
  9742  			b.Aux = nil
  9743  			return true
  9744  		}
  9745  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  9746  		// cond:
  9747  		// result: (NE cmp yes no)
  9748  		for {
  9749  			v := b.Control
  9750  			if v.Op != OpMIPSXORconst {
  9751  				break
  9752  			}
  9753  			if v.AuxInt != 1 {
  9754  				break
  9755  			}
  9756  			cmp := v.Args[0]
  9757  			if cmp.Op != OpMIPSSGTUconst {
  9758  				break
  9759  			}
  9760  			b.Kind = BlockMIPSNE
  9761  			b.SetControl(cmp)
  9762  			b.Aux = nil
  9763  			return true
  9764  		}
  9765  		// match: (EQ (XORconst [1] cmp:(SGTzero _)) yes no)
  9766  		// cond:
  9767  		// result: (NE cmp yes no)
  9768  		for {
  9769  			v := b.Control
  9770  			if v.Op != OpMIPSXORconst {
  9771  				break
  9772  			}
  9773  			if v.AuxInt != 1 {
  9774  				break
  9775  			}
  9776  			cmp := v.Args[0]
  9777  			if cmp.Op != OpMIPSSGTzero {
  9778  				break
  9779  			}
  9780  			b.Kind = BlockMIPSNE
  9781  			b.SetControl(cmp)
  9782  			b.Aux = nil
  9783  			return true
  9784  		}
  9785  		// match: (EQ (XORconst [1] cmp:(SGTUzero _)) yes no)
  9786  		// cond:
  9787  		// result: (NE cmp yes no)
  9788  		for {
  9789  			v := b.Control
  9790  			if v.Op != OpMIPSXORconst {
  9791  				break
  9792  			}
  9793  			if v.AuxInt != 1 {
  9794  				break
  9795  			}
  9796  			cmp := v.Args[0]
  9797  			if cmp.Op != OpMIPSSGTUzero {
  9798  				break
  9799  			}
  9800  			b.Kind = BlockMIPSNE
  9801  			b.SetControl(cmp)
  9802  			b.Aux = nil
  9803  			return true
  9804  		}
  9805  		// match: (EQ (SGTUconst [1] x) yes no)
  9806  		// cond:
  9807  		// result: (NE x yes no)
  9808  		for {
  9809  			v := b.Control
  9810  			if v.Op != OpMIPSSGTUconst {
  9811  				break
  9812  			}
  9813  			if v.AuxInt != 1 {
  9814  				break
  9815  			}
  9816  			x := v.Args[0]
  9817  			b.Kind = BlockMIPSNE
  9818  			b.SetControl(x)
  9819  			b.Aux = nil
  9820  			return true
  9821  		}
  9822  		// match: (EQ (SGTUzero x) yes no)
  9823  		// cond:
  9824  		// result: (EQ x yes no)
  9825  		for {
  9826  			v := b.Control
  9827  			if v.Op != OpMIPSSGTUzero {
  9828  				break
  9829  			}
  9830  			x := v.Args[0]
  9831  			b.Kind = BlockMIPSEQ
  9832  			b.SetControl(x)
  9833  			b.Aux = nil
  9834  			return true
  9835  		}
  9836  		// match: (EQ (SGTconst [0] x) yes no)
  9837  		// cond:
  9838  		// result: (GEZ x yes no)
  9839  		for {
  9840  			v := b.Control
  9841  			if v.Op != OpMIPSSGTconst {
  9842  				break
  9843  			}
  9844  			if v.AuxInt != 0 {
  9845  				break
  9846  			}
  9847  			x := v.Args[0]
  9848  			b.Kind = BlockMIPSGEZ
  9849  			b.SetControl(x)
  9850  			b.Aux = nil
  9851  			return true
  9852  		}
  9853  		// match: (EQ (SGTzero x) yes no)
  9854  		// cond:
  9855  		// result: (LEZ x yes no)
  9856  		for {
  9857  			v := b.Control
  9858  			if v.Op != OpMIPSSGTzero {
  9859  				break
  9860  			}
  9861  			x := v.Args[0]
  9862  			b.Kind = BlockMIPSLEZ
  9863  			b.SetControl(x)
  9864  			b.Aux = nil
  9865  			return true
  9866  		}
  9867  		// match: (EQ (MOVWconst [0]) yes no)
  9868  		// cond:
  9869  		// result: (First nil yes no)
  9870  		for {
  9871  			v := b.Control
  9872  			if v.Op != OpMIPSMOVWconst {
  9873  				break
  9874  			}
  9875  			if v.AuxInt != 0 {
  9876  				break
  9877  			}
  9878  			b.Kind = BlockFirst
  9879  			b.SetControl(nil)
  9880  			b.Aux = nil
  9881  			return true
  9882  		}
  9883  		// match: (EQ (MOVWconst [c]) yes no)
  9884  		// cond: c != 0
  9885  		// result: (First nil no yes)
  9886  		for {
  9887  			v := b.Control
  9888  			if v.Op != OpMIPSMOVWconst {
  9889  				break
  9890  			}
  9891  			c := v.AuxInt
  9892  			if !(c != 0) {
  9893  				break
  9894  			}
  9895  			b.Kind = BlockFirst
  9896  			b.SetControl(nil)
  9897  			b.Aux = nil
  9898  			b.swapSuccessors()
  9899  			return true
  9900  		}
  9901  	case BlockMIPSGEZ:
  9902  		// match: (GEZ (MOVWconst [c]) yes no)
  9903  		// cond: int32(c) >= 0
  9904  		// result: (First nil yes no)
  9905  		for {
  9906  			v := b.Control
  9907  			if v.Op != OpMIPSMOVWconst {
  9908  				break
  9909  			}
  9910  			c := v.AuxInt
  9911  			if !(int32(c) >= 0) {
  9912  				break
  9913  			}
  9914  			b.Kind = BlockFirst
  9915  			b.SetControl(nil)
  9916  			b.Aux = nil
  9917  			return true
  9918  		}
  9919  		// match: (GEZ (MOVWconst [c]) yes no)
  9920  		// cond: int32(c) < 0
  9921  		// result: (First nil no yes)
  9922  		for {
  9923  			v := b.Control
  9924  			if v.Op != OpMIPSMOVWconst {
  9925  				break
  9926  			}
  9927  			c := v.AuxInt
  9928  			if !(int32(c) < 0) {
  9929  				break
  9930  			}
  9931  			b.Kind = BlockFirst
  9932  			b.SetControl(nil)
  9933  			b.Aux = nil
  9934  			b.swapSuccessors()
  9935  			return true
  9936  		}
  9937  	case BlockMIPSGTZ:
  9938  		// match: (GTZ (MOVWconst [c]) yes no)
  9939  		// cond: int32(c) > 0
  9940  		// result: (First nil yes no)
  9941  		for {
  9942  			v := b.Control
  9943  			if v.Op != OpMIPSMOVWconst {
  9944  				break
  9945  			}
  9946  			c := v.AuxInt
  9947  			if !(int32(c) > 0) {
  9948  				break
  9949  			}
  9950  			b.Kind = BlockFirst
  9951  			b.SetControl(nil)
  9952  			b.Aux = nil
  9953  			return true
  9954  		}
  9955  		// match: (GTZ (MOVWconst [c]) yes no)
  9956  		// cond: int32(c) <= 0
  9957  		// result: (First nil no yes)
  9958  		for {
  9959  			v := b.Control
  9960  			if v.Op != OpMIPSMOVWconst {
  9961  				break
  9962  			}
  9963  			c := v.AuxInt
  9964  			if !(int32(c) <= 0) {
  9965  				break
  9966  			}
  9967  			b.Kind = BlockFirst
  9968  			b.SetControl(nil)
  9969  			b.Aux = nil
  9970  			b.swapSuccessors()
  9971  			return true
  9972  		}
  9973  	case BlockIf:
  9974  		// match: (If cond yes no)
  9975  		// cond:
  9976  		// result: (NE cond yes no)
  9977  		for {
  9978  			v := b.Control
  9979  			_ = v
  9980  			cond := b.Control
  9981  			b.Kind = BlockMIPSNE
  9982  			b.SetControl(cond)
  9983  			b.Aux = nil
  9984  			return true
  9985  		}
  9986  	case BlockMIPSLEZ:
  9987  		// match: (LEZ (MOVWconst [c]) yes no)
  9988  		// cond: int32(c) <= 0
  9989  		// result: (First nil yes no)
  9990  		for {
  9991  			v := b.Control
  9992  			if v.Op != OpMIPSMOVWconst {
  9993  				break
  9994  			}
  9995  			c := v.AuxInt
  9996  			if !(int32(c) <= 0) {
  9997  				break
  9998  			}
  9999  			b.Kind = BlockFirst
 10000  			b.SetControl(nil)
 10001  			b.Aux = nil
 10002  			return true
 10003  		}
 10004  		// match: (LEZ (MOVWconst [c]) yes no)
 10005  		// cond: int32(c) > 0
 10006  		// result: (First nil no yes)
 10007  		for {
 10008  			v := b.Control
 10009  			if v.Op != OpMIPSMOVWconst {
 10010  				break
 10011  			}
 10012  			c := v.AuxInt
 10013  			if !(int32(c) > 0) {
 10014  				break
 10015  			}
 10016  			b.Kind = BlockFirst
 10017  			b.SetControl(nil)
 10018  			b.Aux = nil
 10019  			b.swapSuccessors()
 10020  			return true
 10021  		}
 10022  	case BlockMIPSLTZ:
 10023  		// match: (LTZ (MOVWconst [c]) yes no)
 10024  		// cond: int32(c) < 0
 10025  		// result: (First nil yes no)
 10026  		for {
 10027  			v := b.Control
 10028  			if v.Op != OpMIPSMOVWconst {
 10029  				break
 10030  			}
 10031  			c := v.AuxInt
 10032  			if !(int32(c) < 0) {
 10033  				break
 10034  			}
 10035  			b.Kind = BlockFirst
 10036  			b.SetControl(nil)
 10037  			b.Aux = nil
 10038  			return true
 10039  		}
 10040  		// match: (LTZ (MOVWconst [c]) yes no)
 10041  		// cond: int32(c) >= 0
 10042  		// result: (First nil no yes)
 10043  		for {
 10044  			v := b.Control
 10045  			if v.Op != OpMIPSMOVWconst {
 10046  				break
 10047  			}
 10048  			c := v.AuxInt
 10049  			if !(int32(c) >= 0) {
 10050  				break
 10051  			}
 10052  			b.Kind = BlockFirst
 10053  			b.SetControl(nil)
 10054  			b.Aux = nil
 10055  			b.swapSuccessors()
 10056  			return true
 10057  		}
 10058  	case BlockMIPSNE:
 10059  		// match: (NE (FPFlagTrue cmp) yes no)
 10060  		// cond:
 10061  		// result: (FPT cmp yes no)
 10062  		for {
 10063  			v := b.Control
 10064  			if v.Op != OpMIPSFPFlagTrue {
 10065  				break
 10066  			}
 10067  			cmp := v.Args[0]
 10068  			b.Kind = BlockMIPSFPT
 10069  			b.SetControl(cmp)
 10070  			b.Aux = nil
 10071  			return true
 10072  		}
 10073  		// match: (NE (FPFlagFalse cmp) yes no)
 10074  		// cond:
 10075  		// result: (FPF cmp yes no)
 10076  		for {
 10077  			v := b.Control
 10078  			if v.Op != OpMIPSFPFlagFalse {
 10079  				break
 10080  			}
 10081  			cmp := v.Args[0]
 10082  			b.Kind = BlockMIPSFPF
 10083  			b.SetControl(cmp)
 10084  			b.Aux = nil
 10085  			return true
 10086  		}
 10087  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 10088  		// cond:
 10089  		// result: (EQ cmp yes no)
 10090  		for {
 10091  			v := b.Control
 10092  			if v.Op != OpMIPSXORconst {
 10093  				break
 10094  			}
 10095  			if v.AuxInt != 1 {
 10096  				break
 10097  			}
 10098  			cmp := v.Args[0]
 10099  			if cmp.Op != OpMIPSSGT {
 10100  				break
 10101  			}
 10102  			_ = cmp.Args[1]
 10103  			b.Kind = BlockMIPSEQ
 10104  			b.SetControl(cmp)
 10105  			b.Aux = nil
 10106  			return true
 10107  		}
 10108  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 10109  		// cond:
 10110  		// result: (EQ cmp yes no)
 10111  		for {
 10112  			v := b.Control
 10113  			if v.Op != OpMIPSXORconst {
 10114  				break
 10115  			}
 10116  			if v.AuxInt != 1 {
 10117  				break
 10118  			}
 10119  			cmp := v.Args[0]
 10120  			if cmp.Op != OpMIPSSGTU {
 10121  				break
 10122  			}
 10123  			_ = cmp.Args[1]
 10124  			b.Kind = BlockMIPSEQ
 10125  			b.SetControl(cmp)
 10126  			b.Aux = nil
 10127  			return true
 10128  		}
 10129  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 10130  		// cond:
 10131  		// result: (EQ cmp yes no)
 10132  		for {
 10133  			v := b.Control
 10134  			if v.Op != OpMIPSXORconst {
 10135  				break
 10136  			}
 10137  			if v.AuxInt != 1 {
 10138  				break
 10139  			}
 10140  			cmp := v.Args[0]
 10141  			if cmp.Op != OpMIPSSGTconst {
 10142  				break
 10143  			}
 10144  			b.Kind = BlockMIPSEQ
 10145  			b.SetControl(cmp)
 10146  			b.Aux = nil
 10147  			return true
 10148  		}
 10149  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 10150  		// cond:
 10151  		// result: (EQ cmp yes no)
 10152  		for {
 10153  			v := b.Control
 10154  			if v.Op != OpMIPSXORconst {
 10155  				break
 10156  			}
 10157  			if v.AuxInt != 1 {
 10158  				break
 10159  			}
 10160  			cmp := v.Args[0]
 10161  			if cmp.Op != OpMIPSSGTUconst {
 10162  				break
 10163  			}
 10164  			b.Kind = BlockMIPSEQ
 10165  			b.SetControl(cmp)
 10166  			b.Aux = nil
 10167  			return true
 10168  		}
 10169  		// match: (NE (XORconst [1] cmp:(SGTzero _)) yes no)
 10170  		// cond:
 10171  		// result: (EQ cmp yes no)
 10172  		for {
 10173  			v := b.Control
 10174  			if v.Op != OpMIPSXORconst {
 10175  				break
 10176  			}
 10177  			if v.AuxInt != 1 {
 10178  				break
 10179  			}
 10180  			cmp := v.Args[0]
 10181  			if cmp.Op != OpMIPSSGTzero {
 10182  				break
 10183  			}
 10184  			b.Kind = BlockMIPSEQ
 10185  			b.SetControl(cmp)
 10186  			b.Aux = nil
 10187  			return true
 10188  		}
 10189  		// match: (NE (XORconst [1] cmp:(SGTUzero _)) yes no)
 10190  		// cond:
 10191  		// result: (EQ cmp yes no)
 10192  		for {
 10193  			v := b.Control
 10194  			if v.Op != OpMIPSXORconst {
 10195  				break
 10196  			}
 10197  			if v.AuxInt != 1 {
 10198  				break
 10199  			}
 10200  			cmp := v.Args[0]
 10201  			if cmp.Op != OpMIPSSGTUzero {
 10202  				break
 10203  			}
 10204  			b.Kind = BlockMIPSEQ
 10205  			b.SetControl(cmp)
 10206  			b.Aux = nil
 10207  			return true
 10208  		}
 10209  		// match: (NE (SGTUconst [1] x) yes no)
 10210  		// cond:
 10211  		// result: (EQ x yes no)
 10212  		for {
 10213  			v := b.Control
 10214  			if v.Op != OpMIPSSGTUconst {
 10215  				break
 10216  			}
 10217  			if v.AuxInt != 1 {
 10218  				break
 10219  			}
 10220  			x := v.Args[0]
 10221  			b.Kind = BlockMIPSEQ
 10222  			b.SetControl(x)
 10223  			b.Aux = nil
 10224  			return true
 10225  		}
 10226  		// match: (NE (SGTUzero x) yes no)
 10227  		// cond:
 10228  		// result: (NE x yes no)
 10229  		for {
 10230  			v := b.Control
 10231  			if v.Op != OpMIPSSGTUzero {
 10232  				break
 10233  			}
 10234  			x := v.Args[0]
 10235  			b.Kind = BlockMIPSNE
 10236  			b.SetControl(x)
 10237  			b.Aux = nil
 10238  			return true
 10239  		}
 10240  		// match: (NE (SGTconst [0] x) yes no)
 10241  		// cond:
 10242  		// result: (LTZ x yes no)
 10243  		for {
 10244  			v := b.Control
 10245  			if v.Op != OpMIPSSGTconst {
 10246  				break
 10247  			}
 10248  			if v.AuxInt != 0 {
 10249  				break
 10250  			}
 10251  			x := v.Args[0]
 10252  			b.Kind = BlockMIPSLTZ
 10253  			b.SetControl(x)
 10254  			b.Aux = nil
 10255  			return true
 10256  		}
 10257  		// match: (NE (SGTzero x) yes no)
 10258  		// cond:
 10259  		// result: (GTZ x yes no)
 10260  		for {
 10261  			v := b.Control
 10262  			if v.Op != OpMIPSSGTzero {
 10263  				break
 10264  			}
 10265  			x := v.Args[0]
 10266  			b.Kind = BlockMIPSGTZ
 10267  			b.SetControl(x)
 10268  			b.Aux = nil
 10269  			return true
 10270  		}
 10271  		// match: (NE (MOVWconst [0]) yes no)
 10272  		// cond:
 10273  		// result: (First nil no yes)
 10274  		for {
 10275  			v := b.Control
 10276  			if v.Op != OpMIPSMOVWconst {
 10277  				break
 10278  			}
 10279  			if v.AuxInt != 0 {
 10280  				break
 10281  			}
 10282  			b.Kind = BlockFirst
 10283  			b.SetControl(nil)
 10284  			b.Aux = nil
 10285  			b.swapSuccessors()
 10286  			return true
 10287  		}
 10288  		// match: (NE (MOVWconst [c]) yes no)
 10289  		// cond: c != 0
 10290  		// result: (First nil yes no)
 10291  		for {
 10292  			v := b.Control
 10293  			if v.Op != OpMIPSMOVWconst {
 10294  				break
 10295  			}
 10296  			c := v.AuxInt
 10297  			if !(c != 0) {
 10298  				break
 10299  			}
 10300  			b.Kind = BlockFirst
 10301  			b.SetControl(nil)
 10302  			b.Aux = nil
 10303  			return true
 10304  		}
 10305  	}
 10306  	return false
 10307  }