github.com/freddyisaac/sicortex-golang@v0.0.0-20231019035217-e03519e66f60/src/cmd/compile/internal/ssa/rewriteMIPS.go (about)

     1  // autogenerated from gen/MIPS.rules: do not edit!
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  
     8  var _ = math.MinInt8 // in case not otherwise used
     9  func rewriteValueMIPS(v *Value, config *Config) bool {
    10  	switch v.Op {
    11  	case OpAdd16:
    12  		return rewriteValueMIPS_OpAdd16(v, config)
    13  	case OpAdd32:
    14  		return rewriteValueMIPS_OpAdd32(v, config)
    15  	case OpAdd32F:
    16  		return rewriteValueMIPS_OpAdd32F(v, config)
    17  	case OpAdd32withcarry:
    18  		return rewriteValueMIPS_OpAdd32withcarry(v, config)
    19  	case OpAdd64F:
    20  		return rewriteValueMIPS_OpAdd64F(v, config)
    21  	case OpAdd8:
    22  		return rewriteValueMIPS_OpAdd8(v, config)
    23  	case OpAddPtr:
    24  		return rewriteValueMIPS_OpAddPtr(v, config)
    25  	case OpAddr:
    26  		return rewriteValueMIPS_OpAddr(v, config)
    27  	case OpAnd16:
    28  		return rewriteValueMIPS_OpAnd16(v, config)
    29  	case OpAnd32:
    30  		return rewriteValueMIPS_OpAnd32(v, config)
    31  	case OpAnd8:
    32  		return rewriteValueMIPS_OpAnd8(v, config)
    33  	case OpAndB:
    34  		return rewriteValueMIPS_OpAndB(v, config)
    35  	case OpAtomicAdd32:
    36  		return rewriteValueMIPS_OpAtomicAdd32(v, config)
    37  	case OpAtomicAnd8:
    38  		return rewriteValueMIPS_OpAtomicAnd8(v, config)
    39  	case OpAtomicCompareAndSwap32:
    40  		return rewriteValueMIPS_OpAtomicCompareAndSwap32(v, config)
    41  	case OpAtomicExchange32:
    42  		return rewriteValueMIPS_OpAtomicExchange32(v, config)
    43  	case OpAtomicLoad32:
    44  		return rewriteValueMIPS_OpAtomicLoad32(v, config)
    45  	case OpAtomicLoadPtr:
    46  		return rewriteValueMIPS_OpAtomicLoadPtr(v, config)
    47  	case OpAtomicOr8:
    48  		return rewriteValueMIPS_OpAtomicOr8(v, config)
    49  	case OpAtomicStore32:
    50  		return rewriteValueMIPS_OpAtomicStore32(v, config)
    51  	case OpAtomicStorePtrNoWB:
    52  		return rewriteValueMIPS_OpAtomicStorePtrNoWB(v, config)
    53  	case OpClosureCall:
    54  		return rewriteValueMIPS_OpClosureCall(v, config)
    55  	case OpCom16:
    56  		return rewriteValueMIPS_OpCom16(v, config)
    57  	case OpCom32:
    58  		return rewriteValueMIPS_OpCom32(v, config)
    59  	case OpCom8:
    60  		return rewriteValueMIPS_OpCom8(v, config)
    61  	case OpConst16:
    62  		return rewriteValueMIPS_OpConst16(v, config)
    63  	case OpConst32:
    64  		return rewriteValueMIPS_OpConst32(v, config)
    65  	case OpConst32F:
    66  		return rewriteValueMIPS_OpConst32F(v, config)
    67  	case OpConst64F:
    68  		return rewriteValueMIPS_OpConst64F(v, config)
    69  	case OpConst8:
    70  		return rewriteValueMIPS_OpConst8(v, config)
    71  	case OpConstBool:
    72  		return rewriteValueMIPS_OpConstBool(v, config)
    73  	case OpConstNil:
    74  		return rewriteValueMIPS_OpConstNil(v, config)
    75  	case OpConvert:
    76  		return rewriteValueMIPS_OpConvert(v, config)
    77  	case OpCtz32:
    78  		return rewriteValueMIPS_OpCtz32(v, config)
    79  	case OpCvt32Fto32:
    80  		return rewriteValueMIPS_OpCvt32Fto32(v, config)
    81  	case OpCvt32Fto64F:
    82  		return rewriteValueMIPS_OpCvt32Fto64F(v, config)
    83  	case OpCvt32to32F:
    84  		return rewriteValueMIPS_OpCvt32to32F(v, config)
    85  	case OpCvt32to64F:
    86  		return rewriteValueMIPS_OpCvt32to64F(v, config)
    87  	case OpCvt64Fto32:
    88  		return rewriteValueMIPS_OpCvt64Fto32(v, config)
    89  	case OpCvt64Fto32F:
    90  		return rewriteValueMIPS_OpCvt64Fto32F(v, config)
    91  	case OpDeferCall:
    92  		return rewriteValueMIPS_OpDeferCall(v, config)
    93  	case OpDiv16:
    94  		return rewriteValueMIPS_OpDiv16(v, config)
    95  	case OpDiv16u:
    96  		return rewriteValueMIPS_OpDiv16u(v, config)
    97  	case OpDiv32:
    98  		return rewriteValueMIPS_OpDiv32(v, config)
    99  	case OpDiv32F:
   100  		return rewriteValueMIPS_OpDiv32F(v, config)
   101  	case OpDiv32u:
   102  		return rewriteValueMIPS_OpDiv32u(v, config)
   103  	case OpDiv64F:
   104  		return rewriteValueMIPS_OpDiv64F(v, config)
   105  	case OpDiv8:
   106  		return rewriteValueMIPS_OpDiv8(v, config)
   107  	case OpDiv8u:
   108  		return rewriteValueMIPS_OpDiv8u(v, config)
   109  	case OpEq16:
   110  		return rewriteValueMIPS_OpEq16(v, config)
   111  	case OpEq32:
   112  		return rewriteValueMIPS_OpEq32(v, config)
   113  	case OpEq32F:
   114  		return rewriteValueMIPS_OpEq32F(v, config)
   115  	case OpEq64F:
   116  		return rewriteValueMIPS_OpEq64F(v, config)
   117  	case OpEq8:
   118  		return rewriteValueMIPS_OpEq8(v, config)
   119  	case OpEqB:
   120  		return rewriteValueMIPS_OpEqB(v, config)
   121  	case OpEqPtr:
   122  		return rewriteValueMIPS_OpEqPtr(v, config)
   123  	case OpGeq16:
   124  		return rewriteValueMIPS_OpGeq16(v, config)
   125  	case OpGeq16U:
   126  		return rewriteValueMIPS_OpGeq16U(v, config)
   127  	case OpGeq32:
   128  		return rewriteValueMIPS_OpGeq32(v, config)
   129  	case OpGeq32F:
   130  		return rewriteValueMIPS_OpGeq32F(v, config)
   131  	case OpGeq32U:
   132  		return rewriteValueMIPS_OpGeq32U(v, config)
   133  	case OpGeq64F:
   134  		return rewriteValueMIPS_OpGeq64F(v, config)
   135  	case OpGeq8:
   136  		return rewriteValueMIPS_OpGeq8(v, config)
   137  	case OpGeq8U:
   138  		return rewriteValueMIPS_OpGeq8U(v, config)
   139  	case OpGetClosurePtr:
   140  		return rewriteValueMIPS_OpGetClosurePtr(v, config)
   141  	case OpGoCall:
   142  		return rewriteValueMIPS_OpGoCall(v, config)
   143  	case OpGreater16:
   144  		return rewriteValueMIPS_OpGreater16(v, config)
   145  	case OpGreater16U:
   146  		return rewriteValueMIPS_OpGreater16U(v, config)
   147  	case OpGreater32:
   148  		return rewriteValueMIPS_OpGreater32(v, config)
   149  	case OpGreater32F:
   150  		return rewriteValueMIPS_OpGreater32F(v, config)
   151  	case OpGreater32U:
   152  		return rewriteValueMIPS_OpGreater32U(v, config)
   153  	case OpGreater64F:
   154  		return rewriteValueMIPS_OpGreater64F(v, config)
   155  	case OpGreater8:
   156  		return rewriteValueMIPS_OpGreater8(v, config)
   157  	case OpGreater8U:
   158  		return rewriteValueMIPS_OpGreater8U(v, config)
   159  	case OpHmul16:
   160  		return rewriteValueMIPS_OpHmul16(v, config)
   161  	case OpHmul16u:
   162  		return rewriteValueMIPS_OpHmul16u(v, config)
   163  	case OpHmul32:
   164  		return rewriteValueMIPS_OpHmul32(v, config)
   165  	case OpHmul32u:
   166  		return rewriteValueMIPS_OpHmul32u(v, config)
   167  	case OpHmul8:
   168  		return rewriteValueMIPS_OpHmul8(v, config)
   169  	case OpHmul8u:
   170  		return rewriteValueMIPS_OpHmul8u(v, config)
   171  	case OpInterCall:
   172  		return rewriteValueMIPS_OpInterCall(v, config)
   173  	case OpIsInBounds:
   174  		return rewriteValueMIPS_OpIsInBounds(v, config)
   175  	case OpIsNonNil:
   176  		return rewriteValueMIPS_OpIsNonNil(v, config)
   177  	case OpIsSliceInBounds:
   178  		return rewriteValueMIPS_OpIsSliceInBounds(v, config)
   179  	case OpLeq16:
   180  		return rewriteValueMIPS_OpLeq16(v, config)
   181  	case OpLeq16U:
   182  		return rewriteValueMIPS_OpLeq16U(v, config)
   183  	case OpLeq32:
   184  		return rewriteValueMIPS_OpLeq32(v, config)
   185  	case OpLeq32F:
   186  		return rewriteValueMIPS_OpLeq32F(v, config)
   187  	case OpLeq32U:
   188  		return rewriteValueMIPS_OpLeq32U(v, config)
   189  	case OpLeq64F:
   190  		return rewriteValueMIPS_OpLeq64F(v, config)
   191  	case OpLeq8:
   192  		return rewriteValueMIPS_OpLeq8(v, config)
   193  	case OpLeq8U:
   194  		return rewriteValueMIPS_OpLeq8U(v, config)
   195  	case OpLess16:
   196  		return rewriteValueMIPS_OpLess16(v, config)
   197  	case OpLess16U:
   198  		return rewriteValueMIPS_OpLess16U(v, config)
   199  	case OpLess32:
   200  		return rewriteValueMIPS_OpLess32(v, config)
   201  	case OpLess32F:
   202  		return rewriteValueMIPS_OpLess32F(v, config)
   203  	case OpLess32U:
   204  		return rewriteValueMIPS_OpLess32U(v, config)
   205  	case OpLess64F:
   206  		return rewriteValueMIPS_OpLess64F(v, config)
   207  	case OpLess8:
   208  		return rewriteValueMIPS_OpLess8(v, config)
   209  	case OpLess8U:
   210  		return rewriteValueMIPS_OpLess8U(v, config)
   211  	case OpLoad:
   212  		return rewriteValueMIPS_OpLoad(v, config)
   213  	case OpLsh16x16:
   214  		return rewriteValueMIPS_OpLsh16x16(v, config)
   215  	case OpLsh16x32:
   216  		return rewriteValueMIPS_OpLsh16x32(v, config)
   217  	case OpLsh16x64:
   218  		return rewriteValueMIPS_OpLsh16x64(v, config)
   219  	case OpLsh16x8:
   220  		return rewriteValueMIPS_OpLsh16x8(v, config)
   221  	case OpLsh32x16:
   222  		return rewriteValueMIPS_OpLsh32x16(v, config)
   223  	case OpLsh32x32:
   224  		return rewriteValueMIPS_OpLsh32x32(v, config)
   225  	case OpLsh32x64:
   226  		return rewriteValueMIPS_OpLsh32x64(v, config)
   227  	case OpLsh32x8:
   228  		return rewriteValueMIPS_OpLsh32x8(v, config)
   229  	case OpLsh8x16:
   230  		return rewriteValueMIPS_OpLsh8x16(v, config)
   231  	case OpLsh8x32:
   232  		return rewriteValueMIPS_OpLsh8x32(v, config)
   233  	case OpLsh8x64:
   234  		return rewriteValueMIPS_OpLsh8x64(v, config)
   235  	case OpLsh8x8:
   236  		return rewriteValueMIPS_OpLsh8x8(v, config)
   237  	case OpMIPSADD:
   238  		return rewriteValueMIPS_OpMIPSADD(v, config)
   239  	case OpMIPSADDconst:
   240  		return rewriteValueMIPS_OpMIPSADDconst(v, config)
   241  	case OpMIPSAND:
   242  		return rewriteValueMIPS_OpMIPSAND(v, config)
   243  	case OpMIPSANDconst:
   244  		return rewriteValueMIPS_OpMIPSANDconst(v, config)
   245  	case OpMIPSCMOVZ:
   246  		return rewriteValueMIPS_OpMIPSCMOVZ(v, config)
   247  	case OpMIPSCMOVZzero:
   248  		return rewriteValueMIPS_OpMIPSCMOVZzero(v, config)
   249  	case OpMIPSLoweredAtomicAdd:
   250  		return rewriteValueMIPS_OpMIPSLoweredAtomicAdd(v, config)
   251  	case OpMIPSLoweredAtomicStore:
   252  		return rewriteValueMIPS_OpMIPSLoweredAtomicStore(v, config)
   253  	case OpMIPSMOVBUload:
   254  		return rewriteValueMIPS_OpMIPSMOVBUload(v, config)
   255  	case OpMIPSMOVBUreg:
   256  		return rewriteValueMIPS_OpMIPSMOVBUreg(v, config)
   257  	case OpMIPSMOVBload:
   258  		return rewriteValueMIPS_OpMIPSMOVBload(v, config)
   259  	case OpMIPSMOVBreg:
   260  		return rewriteValueMIPS_OpMIPSMOVBreg(v, config)
   261  	case OpMIPSMOVBstore:
   262  		return rewriteValueMIPS_OpMIPSMOVBstore(v, config)
   263  	case OpMIPSMOVBstorezero:
   264  		return rewriteValueMIPS_OpMIPSMOVBstorezero(v, config)
   265  	case OpMIPSMOVDload:
   266  		return rewriteValueMIPS_OpMIPSMOVDload(v, config)
   267  	case OpMIPSMOVDstore:
   268  		return rewriteValueMIPS_OpMIPSMOVDstore(v, config)
   269  	case OpMIPSMOVFload:
   270  		return rewriteValueMIPS_OpMIPSMOVFload(v, config)
   271  	case OpMIPSMOVFstore:
   272  		return rewriteValueMIPS_OpMIPSMOVFstore(v, config)
   273  	case OpMIPSMOVHUload:
   274  		return rewriteValueMIPS_OpMIPSMOVHUload(v, config)
   275  	case OpMIPSMOVHUreg:
   276  		return rewriteValueMIPS_OpMIPSMOVHUreg(v, config)
   277  	case OpMIPSMOVHload:
   278  		return rewriteValueMIPS_OpMIPSMOVHload(v, config)
   279  	case OpMIPSMOVHreg:
   280  		return rewriteValueMIPS_OpMIPSMOVHreg(v, config)
   281  	case OpMIPSMOVHstore:
   282  		return rewriteValueMIPS_OpMIPSMOVHstore(v, config)
   283  	case OpMIPSMOVHstorezero:
   284  		return rewriteValueMIPS_OpMIPSMOVHstorezero(v, config)
   285  	case OpMIPSMOVWload:
   286  		return rewriteValueMIPS_OpMIPSMOVWload(v, config)
   287  	case OpMIPSMOVWreg:
   288  		return rewriteValueMIPS_OpMIPSMOVWreg(v, config)
   289  	case OpMIPSMOVWstore:
   290  		return rewriteValueMIPS_OpMIPSMOVWstore(v, config)
   291  	case OpMIPSMOVWstorezero:
   292  		return rewriteValueMIPS_OpMIPSMOVWstorezero(v, config)
   293  	case OpMIPSMUL:
   294  		return rewriteValueMIPS_OpMIPSMUL(v, config)
   295  	case OpMIPSNEG:
   296  		return rewriteValueMIPS_OpMIPSNEG(v, config)
   297  	case OpMIPSNOR:
   298  		return rewriteValueMIPS_OpMIPSNOR(v, config)
   299  	case OpMIPSNORconst:
   300  		return rewriteValueMIPS_OpMIPSNORconst(v, config)
   301  	case OpMIPSOR:
   302  		return rewriteValueMIPS_OpMIPSOR(v, config)
   303  	case OpMIPSORconst:
   304  		return rewriteValueMIPS_OpMIPSORconst(v, config)
   305  	case OpMIPSSGT:
   306  		return rewriteValueMIPS_OpMIPSSGT(v, config)
   307  	case OpMIPSSGTU:
   308  		return rewriteValueMIPS_OpMIPSSGTU(v, config)
   309  	case OpMIPSSGTUconst:
   310  		return rewriteValueMIPS_OpMIPSSGTUconst(v, config)
   311  	case OpMIPSSGTUzero:
   312  		return rewriteValueMIPS_OpMIPSSGTUzero(v, config)
   313  	case OpMIPSSGTconst:
   314  		return rewriteValueMIPS_OpMIPSSGTconst(v, config)
   315  	case OpMIPSSGTzero:
   316  		return rewriteValueMIPS_OpMIPSSGTzero(v, config)
   317  	case OpMIPSSLL:
   318  		return rewriteValueMIPS_OpMIPSSLL(v, config)
   319  	case OpMIPSSLLconst:
   320  		return rewriteValueMIPS_OpMIPSSLLconst(v, config)
   321  	case OpMIPSSRA:
   322  		return rewriteValueMIPS_OpMIPSSRA(v, config)
   323  	case OpMIPSSRAconst:
   324  		return rewriteValueMIPS_OpMIPSSRAconst(v, config)
   325  	case OpMIPSSRL:
   326  		return rewriteValueMIPS_OpMIPSSRL(v, config)
   327  	case OpMIPSSRLconst:
   328  		return rewriteValueMIPS_OpMIPSSRLconst(v, config)
   329  	case OpMIPSSUB:
   330  		return rewriteValueMIPS_OpMIPSSUB(v, config)
   331  	case OpMIPSSUBconst:
   332  		return rewriteValueMIPS_OpMIPSSUBconst(v, config)
   333  	case OpMIPSXOR:
   334  		return rewriteValueMIPS_OpMIPSXOR(v, config)
   335  	case OpMIPSXORconst:
   336  		return rewriteValueMIPS_OpMIPSXORconst(v, config)
   337  	case OpMod16:
   338  		return rewriteValueMIPS_OpMod16(v, config)
   339  	case OpMod16u:
   340  		return rewriteValueMIPS_OpMod16u(v, config)
   341  	case OpMod32:
   342  		return rewriteValueMIPS_OpMod32(v, config)
   343  	case OpMod32u:
   344  		return rewriteValueMIPS_OpMod32u(v, config)
   345  	case OpMod8:
   346  		return rewriteValueMIPS_OpMod8(v, config)
   347  	case OpMod8u:
   348  		return rewriteValueMIPS_OpMod8u(v, config)
   349  	case OpMove:
   350  		return rewriteValueMIPS_OpMove(v, config)
   351  	case OpMul16:
   352  		return rewriteValueMIPS_OpMul16(v, config)
   353  	case OpMul32:
   354  		return rewriteValueMIPS_OpMul32(v, config)
   355  	case OpMul32F:
   356  		return rewriteValueMIPS_OpMul32F(v, config)
   357  	case OpMul32uhilo:
   358  		return rewriteValueMIPS_OpMul32uhilo(v, config)
   359  	case OpMul64F:
   360  		return rewriteValueMIPS_OpMul64F(v, config)
   361  	case OpMul8:
   362  		return rewriteValueMIPS_OpMul8(v, config)
   363  	case OpNeg16:
   364  		return rewriteValueMIPS_OpNeg16(v, config)
   365  	case OpNeg32:
   366  		return rewriteValueMIPS_OpNeg32(v, config)
   367  	case OpNeg32F:
   368  		return rewriteValueMIPS_OpNeg32F(v, config)
   369  	case OpNeg64F:
   370  		return rewriteValueMIPS_OpNeg64F(v, config)
   371  	case OpNeg8:
   372  		return rewriteValueMIPS_OpNeg8(v, config)
   373  	case OpNeq16:
   374  		return rewriteValueMIPS_OpNeq16(v, config)
   375  	case OpNeq32:
   376  		return rewriteValueMIPS_OpNeq32(v, config)
   377  	case OpNeq32F:
   378  		return rewriteValueMIPS_OpNeq32F(v, config)
   379  	case OpNeq64F:
   380  		return rewriteValueMIPS_OpNeq64F(v, config)
   381  	case OpNeq8:
   382  		return rewriteValueMIPS_OpNeq8(v, config)
   383  	case OpNeqB:
   384  		return rewriteValueMIPS_OpNeqB(v, config)
   385  	case OpNeqPtr:
   386  		return rewriteValueMIPS_OpNeqPtr(v, config)
   387  	case OpNilCheck:
   388  		return rewriteValueMIPS_OpNilCheck(v, config)
   389  	case OpNot:
   390  		return rewriteValueMIPS_OpNot(v, config)
   391  	case OpOffPtr:
   392  		return rewriteValueMIPS_OpOffPtr(v, config)
   393  	case OpOr16:
   394  		return rewriteValueMIPS_OpOr16(v, config)
   395  	case OpOr32:
   396  		return rewriteValueMIPS_OpOr32(v, config)
   397  	case OpOr8:
   398  		return rewriteValueMIPS_OpOr8(v, config)
   399  	case OpOrB:
   400  		return rewriteValueMIPS_OpOrB(v, config)
   401  	case OpRsh16Ux16:
   402  		return rewriteValueMIPS_OpRsh16Ux16(v, config)
   403  	case OpRsh16Ux32:
   404  		return rewriteValueMIPS_OpRsh16Ux32(v, config)
   405  	case OpRsh16Ux64:
   406  		return rewriteValueMIPS_OpRsh16Ux64(v, config)
   407  	case OpRsh16Ux8:
   408  		return rewriteValueMIPS_OpRsh16Ux8(v, config)
   409  	case OpRsh16x16:
   410  		return rewriteValueMIPS_OpRsh16x16(v, config)
   411  	case OpRsh16x32:
   412  		return rewriteValueMIPS_OpRsh16x32(v, config)
   413  	case OpRsh16x64:
   414  		return rewriteValueMIPS_OpRsh16x64(v, config)
   415  	case OpRsh16x8:
   416  		return rewriteValueMIPS_OpRsh16x8(v, config)
   417  	case OpRsh32Ux16:
   418  		return rewriteValueMIPS_OpRsh32Ux16(v, config)
   419  	case OpRsh32Ux32:
   420  		return rewriteValueMIPS_OpRsh32Ux32(v, config)
   421  	case OpRsh32Ux64:
   422  		return rewriteValueMIPS_OpRsh32Ux64(v, config)
   423  	case OpRsh32Ux8:
   424  		return rewriteValueMIPS_OpRsh32Ux8(v, config)
   425  	case OpRsh32x16:
   426  		return rewriteValueMIPS_OpRsh32x16(v, config)
   427  	case OpRsh32x32:
   428  		return rewriteValueMIPS_OpRsh32x32(v, config)
   429  	case OpRsh32x64:
   430  		return rewriteValueMIPS_OpRsh32x64(v, config)
   431  	case OpRsh32x8:
   432  		return rewriteValueMIPS_OpRsh32x8(v, config)
   433  	case OpRsh8Ux16:
   434  		return rewriteValueMIPS_OpRsh8Ux16(v, config)
   435  	case OpRsh8Ux32:
   436  		return rewriteValueMIPS_OpRsh8Ux32(v, config)
   437  	case OpRsh8Ux64:
   438  		return rewriteValueMIPS_OpRsh8Ux64(v, config)
   439  	case OpRsh8Ux8:
   440  		return rewriteValueMIPS_OpRsh8Ux8(v, config)
   441  	case OpRsh8x16:
   442  		return rewriteValueMIPS_OpRsh8x16(v, config)
   443  	case OpRsh8x32:
   444  		return rewriteValueMIPS_OpRsh8x32(v, config)
   445  	case OpRsh8x64:
   446  		return rewriteValueMIPS_OpRsh8x64(v, config)
   447  	case OpRsh8x8:
   448  		return rewriteValueMIPS_OpRsh8x8(v, config)
   449  	case OpSelect0:
   450  		return rewriteValueMIPS_OpSelect0(v, config)
   451  	case OpSelect1:
   452  		return rewriteValueMIPS_OpSelect1(v, config)
   453  	case OpSignExt16to32:
   454  		return rewriteValueMIPS_OpSignExt16to32(v, config)
   455  	case OpSignExt8to16:
   456  		return rewriteValueMIPS_OpSignExt8to16(v, config)
   457  	case OpSignExt8to32:
   458  		return rewriteValueMIPS_OpSignExt8to32(v, config)
   459  	case OpSignmask:
   460  		return rewriteValueMIPS_OpSignmask(v, config)
   461  	case OpSlicemask:
   462  		return rewriteValueMIPS_OpSlicemask(v, config)
   463  	case OpSqrt:
   464  		return rewriteValueMIPS_OpSqrt(v, config)
   465  	case OpStaticCall:
   466  		return rewriteValueMIPS_OpStaticCall(v, config)
   467  	case OpStore:
   468  		return rewriteValueMIPS_OpStore(v, config)
   469  	case OpSub16:
   470  		return rewriteValueMIPS_OpSub16(v, config)
   471  	case OpSub32:
   472  		return rewriteValueMIPS_OpSub32(v, config)
   473  	case OpSub32F:
   474  		return rewriteValueMIPS_OpSub32F(v, config)
   475  	case OpSub32withcarry:
   476  		return rewriteValueMIPS_OpSub32withcarry(v, config)
   477  	case OpSub64F:
   478  		return rewriteValueMIPS_OpSub64F(v, config)
   479  	case OpSub8:
   480  		return rewriteValueMIPS_OpSub8(v, config)
   481  	case OpSubPtr:
   482  		return rewriteValueMIPS_OpSubPtr(v, config)
   483  	case OpTrunc16to8:
   484  		return rewriteValueMIPS_OpTrunc16to8(v, config)
   485  	case OpTrunc32to16:
   486  		return rewriteValueMIPS_OpTrunc32to16(v, config)
   487  	case OpTrunc32to8:
   488  		return rewriteValueMIPS_OpTrunc32to8(v, config)
   489  	case OpXor16:
   490  		return rewriteValueMIPS_OpXor16(v, config)
   491  	case OpXor32:
   492  		return rewriteValueMIPS_OpXor32(v, config)
   493  	case OpXor8:
   494  		return rewriteValueMIPS_OpXor8(v, config)
   495  	case OpZero:
   496  		return rewriteValueMIPS_OpZero(v, config)
   497  	case OpZeroExt16to32:
   498  		return rewriteValueMIPS_OpZeroExt16to32(v, config)
   499  	case OpZeroExt8to16:
   500  		return rewriteValueMIPS_OpZeroExt8to16(v, config)
   501  	case OpZeroExt8to32:
   502  		return rewriteValueMIPS_OpZeroExt8to32(v, config)
   503  	case OpZeromask:
   504  		return rewriteValueMIPS_OpZeromask(v, config)
   505  	}
   506  	return false
   507  }
   508  func rewriteValueMIPS_OpAdd16(v *Value, config *Config) bool {
   509  	b := v.Block
   510  	_ = b
   511  	// match: (Add16 x y)
   512  	// cond:
   513  	// result: (ADD x y)
   514  	for {
   515  		x := v.Args[0]
   516  		y := v.Args[1]
   517  		v.reset(OpMIPSADD)
   518  		v.AddArg(x)
   519  		v.AddArg(y)
   520  		return true
   521  	}
   522  }
   523  func rewriteValueMIPS_OpAdd32(v *Value, config *Config) bool {
   524  	b := v.Block
   525  	_ = b
   526  	// match: (Add32 x y)
   527  	// cond:
   528  	// result: (ADD x y)
   529  	for {
   530  		x := v.Args[0]
   531  		y := v.Args[1]
   532  		v.reset(OpMIPSADD)
   533  		v.AddArg(x)
   534  		v.AddArg(y)
   535  		return true
   536  	}
   537  }
   538  func rewriteValueMIPS_OpAdd32F(v *Value, config *Config) bool {
   539  	b := v.Block
   540  	_ = b
   541  	// match: (Add32F x y)
   542  	// cond:
   543  	// result: (ADDF x y)
   544  	for {
   545  		x := v.Args[0]
   546  		y := v.Args[1]
   547  		v.reset(OpMIPSADDF)
   548  		v.AddArg(x)
   549  		v.AddArg(y)
   550  		return true
   551  	}
   552  }
   553  func rewriteValueMIPS_OpAdd32withcarry(v *Value, config *Config) bool {
   554  	b := v.Block
   555  	_ = b
   556  	// match: (Add32withcarry <t> x y c)
   557  	// cond:
   558  	// result: (ADD c (ADD <t> x y))
   559  	for {
   560  		t := v.Type
   561  		x := v.Args[0]
   562  		y := v.Args[1]
   563  		c := v.Args[2]
   564  		v.reset(OpMIPSADD)
   565  		v.AddArg(c)
   566  		v0 := b.NewValue0(v.Line, OpMIPSADD, t)
   567  		v0.AddArg(x)
   568  		v0.AddArg(y)
   569  		v.AddArg(v0)
   570  		return true
   571  	}
   572  }
   573  func rewriteValueMIPS_OpAdd64F(v *Value, config *Config) bool {
   574  	b := v.Block
   575  	_ = b
   576  	// match: (Add64F x y)
   577  	// cond:
   578  	// result: (ADDD x y)
   579  	for {
   580  		x := v.Args[0]
   581  		y := v.Args[1]
   582  		v.reset(OpMIPSADDD)
   583  		v.AddArg(x)
   584  		v.AddArg(y)
   585  		return true
   586  	}
   587  }
   588  func rewriteValueMIPS_OpAdd8(v *Value, config *Config) bool {
   589  	b := v.Block
   590  	_ = b
   591  	// match: (Add8 x y)
   592  	// cond:
   593  	// result: (ADD x y)
   594  	for {
   595  		x := v.Args[0]
   596  		y := v.Args[1]
   597  		v.reset(OpMIPSADD)
   598  		v.AddArg(x)
   599  		v.AddArg(y)
   600  		return true
   601  	}
   602  }
   603  func rewriteValueMIPS_OpAddPtr(v *Value, config *Config) bool {
   604  	b := v.Block
   605  	_ = b
   606  	// match: (AddPtr x y)
   607  	// cond:
   608  	// result: (ADD x y)
   609  	for {
   610  		x := v.Args[0]
   611  		y := v.Args[1]
   612  		v.reset(OpMIPSADD)
   613  		v.AddArg(x)
   614  		v.AddArg(y)
   615  		return true
   616  	}
   617  }
   618  func rewriteValueMIPS_OpAddr(v *Value, config *Config) bool {
   619  	b := v.Block
   620  	_ = b
   621  	// match: (Addr {sym} base)
   622  	// cond:
   623  	// result: (MOVWaddr {sym} base)
   624  	for {
   625  		sym := v.Aux
   626  		base := v.Args[0]
   627  		v.reset(OpMIPSMOVWaddr)
   628  		v.Aux = sym
   629  		v.AddArg(base)
   630  		return true
   631  	}
   632  }
   633  func rewriteValueMIPS_OpAnd16(v *Value, config *Config) bool {
   634  	b := v.Block
   635  	_ = b
   636  	// match: (And16 x y)
   637  	// cond:
   638  	// result: (AND x y)
   639  	for {
   640  		x := v.Args[0]
   641  		y := v.Args[1]
   642  		v.reset(OpMIPSAND)
   643  		v.AddArg(x)
   644  		v.AddArg(y)
   645  		return true
   646  	}
   647  }
   648  func rewriteValueMIPS_OpAnd32(v *Value, config *Config) bool {
   649  	b := v.Block
   650  	_ = b
   651  	// match: (And32 x y)
   652  	// cond:
   653  	// result: (AND x y)
   654  	for {
   655  		x := v.Args[0]
   656  		y := v.Args[1]
   657  		v.reset(OpMIPSAND)
   658  		v.AddArg(x)
   659  		v.AddArg(y)
   660  		return true
   661  	}
   662  }
   663  func rewriteValueMIPS_OpAnd8(v *Value, config *Config) bool {
   664  	b := v.Block
   665  	_ = b
   666  	// match: (And8 x y)
   667  	// cond:
   668  	// result: (AND x y)
   669  	for {
   670  		x := v.Args[0]
   671  		y := v.Args[1]
   672  		v.reset(OpMIPSAND)
   673  		v.AddArg(x)
   674  		v.AddArg(y)
   675  		return true
   676  	}
   677  }
   678  func rewriteValueMIPS_OpAndB(v *Value, config *Config) bool {
   679  	b := v.Block
   680  	_ = b
   681  	// match: (AndB x y)
   682  	// cond:
   683  	// result: (AND x y)
   684  	for {
   685  		x := v.Args[0]
   686  		y := v.Args[1]
   687  		v.reset(OpMIPSAND)
   688  		v.AddArg(x)
   689  		v.AddArg(y)
   690  		return true
   691  	}
   692  }
   693  func rewriteValueMIPS_OpAtomicAdd32(v *Value, config *Config) bool {
   694  	b := v.Block
   695  	_ = b
   696  	// match: (AtomicAdd32 ptr val mem)
   697  	// cond:
   698  	// result: (LoweredAtomicAdd ptr val mem)
   699  	for {
   700  		ptr := v.Args[0]
   701  		val := v.Args[1]
   702  		mem := v.Args[2]
   703  		v.reset(OpMIPSLoweredAtomicAdd)
   704  		v.AddArg(ptr)
   705  		v.AddArg(val)
   706  		v.AddArg(mem)
   707  		return true
   708  	}
   709  }
   710  func rewriteValueMIPS_OpAtomicAnd8(v *Value, config *Config) bool {
   711  	b := v.Block
   712  	_ = b
   713  	// match: (AtomicAnd8  ptr val mem)
   714  	// cond: !config.BigEndian
   715  	// result: (LoweredAtomicAnd (AND <config.fe.TypeUInt32().PtrTo()> (MOVWconst [^3]) ptr) 		(OR <config.fe.TypeUInt32()> (SLL <config.fe.TypeUInt32()> (ZeroExt8to32 val) 			(SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst  <config.fe.TypeUInt32()> [3] ptr))) 		(NORconst [0] <config.fe.TypeUInt32()> (SLL <config.fe.TypeUInt32()> 			(MOVWconst [0xff]) (SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst <config.fe.TypeUInt32()> [3] ptr))))) mem)
   716  	for {
   717  		ptr := v.Args[0]
   718  		val := v.Args[1]
   719  		mem := v.Args[2]
   720  		if !(!config.BigEndian) {
   721  			break
   722  		}
   723  		v.reset(OpMIPSLoweredAtomicAnd)
   724  		v0 := b.NewValue0(v.Line, OpMIPSAND, config.fe.TypeUInt32().PtrTo())
   725  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
   726  		v1.AuxInt = ^3
   727  		v0.AddArg(v1)
   728  		v0.AddArg(ptr)
   729  		v.AddArg(v0)
   730  		v2 := b.NewValue0(v.Line, OpMIPSOR, config.fe.TypeUInt32())
   731  		v3 := b.NewValue0(v.Line, OpMIPSSLL, config.fe.TypeUInt32())
   732  		v4 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
   733  		v4.AddArg(val)
   734  		v3.AddArg(v4)
   735  		v5 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
   736  		v5.AuxInt = 3
   737  		v6 := b.NewValue0(v.Line, OpMIPSANDconst, config.fe.TypeUInt32())
   738  		v6.AuxInt = 3
   739  		v6.AddArg(ptr)
   740  		v5.AddArg(v6)
   741  		v3.AddArg(v5)
   742  		v2.AddArg(v3)
   743  		v7 := b.NewValue0(v.Line, OpMIPSNORconst, config.fe.TypeUInt32())
   744  		v7.AuxInt = 0
   745  		v8 := b.NewValue0(v.Line, OpMIPSSLL, config.fe.TypeUInt32())
   746  		v9 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
   747  		v9.AuxInt = 0xff
   748  		v8.AddArg(v9)
   749  		v10 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
   750  		v10.AuxInt = 3
   751  		v11 := b.NewValue0(v.Line, OpMIPSANDconst, config.fe.TypeUInt32())
   752  		v11.AuxInt = 3
   753  		v11.AddArg(ptr)
   754  		v10.AddArg(v11)
   755  		v8.AddArg(v10)
   756  		v7.AddArg(v8)
   757  		v2.AddArg(v7)
   758  		v.AddArg(v2)
   759  		v.AddArg(mem)
   760  		return true
   761  	}
   762  	// match: (AtomicAnd8  ptr val mem)
   763  	// cond: config.BigEndian
   764  	// result: (LoweredAtomicAnd (AND <config.fe.TypeUInt32().PtrTo()> (MOVWconst [^3]) ptr) 		(OR <config.fe.TypeUInt32()> (SLL <config.fe.TypeUInt32()> (ZeroExt8to32 val) 			(SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst  <config.fe.TypeUInt32()> [3] 					(XORconst <config.fe.TypeUInt32()> [3] ptr)))) 		(NORconst [0] <config.fe.TypeUInt32()> (SLL <config.fe.TypeUInt32()> 			(MOVWconst [0xff]) (SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst <config.fe.TypeUInt32()> [3] 					(XORconst <config.fe.TypeUInt32()> [3] ptr)))))) mem)
   765  	for {
   766  		ptr := v.Args[0]
   767  		val := v.Args[1]
   768  		mem := v.Args[2]
   769  		if !(config.BigEndian) {
   770  			break
   771  		}
   772  		v.reset(OpMIPSLoweredAtomicAnd)
   773  		v0 := b.NewValue0(v.Line, OpMIPSAND, config.fe.TypeUInt32().PtrTo())
   774  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
   775  		v1.AuxInt = ^3
   776  		v0.AddArg(v1)
   777  		v0.AddArg(ptr)
   778  		v.AddArg(v0)
   779  		v2 := b.NewValue0(v.Line, OpMIPSOR, config.fe.TypeUInt32())
   780  		v3 := b.NewValue0(v.Line, OpMIPSSLL, config.fe.TypeUInt32())
   781  		v4 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
   782  		v4.AddArg(val)
   783  		v3.AddArg(v4)
   784  		v5 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
   785  		v5.AuxInt = 3
   786  		v6 := b.NewValue0(v.Line, OpMIPSANDconst, config.fe.TypeUInt32())
   787  		v6.AuxInt = 3
   788  		v7 := b.NewValue0(v.Line, OpMIPSXORconst, config.fe.TypeUInt32())
   789  		v7.AuxInt = 3
   790  		v7.AddArg(ptr)
   791  		v6.AddArg(v7)
   792  		v5.AddArg(v6)
   793  		v3.AddArg(v5)
   794  		v2.AddArg(v3)
   795  		v8 := b.NewValue0(v.Line, OpMIPSNORconst, config.fe.TypeUInt32())
   796  		v8.AuxInt = 0
   797  		v9 := b.NewValue0(v.Line, OpMIPSSLL, config.fe.TypeUInt32())
   798  		v10 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
   799  		v10.AuxInt = 0xff
   800  		v9.AddArg(v10)
   801  		v11 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
   802  		v11.AuxInt = 3
   803  		v12 := b.NewValue0(v.Line, OpMIPSANDconst, config.fe.TypeUInt32())
   804  		v12.AuxInt = 3
   805  		v13 := b.NewValue0(v.Line, OpMIPSXORconst, config.fe.TypeUInt32())
   806  		v13.AuxInt = 3
   807  		v13.AddArg(ptr)
   808  		v12.AddArg(v13)
   809  		v11.AddArg(v12)
   810  		v9.AddArg(v11)
   811  		v8.AddArg(v9)
   812  		v2.AddArg(v8)
   813  		v.AddArg(v2)
   814  		v.AddArg(mem)
   815  		return true
   816  	}
   817  	return false
   818  }
   819  func rewriteValueMIPS_OpAtomicCompareAndSwap32(v *Value, config *Config) bool {
   820  	b := v.Block
   821  	_ = b
   822  	// match: (AtomicCompareAndSwap32 ptr old new_ mem)
   823  	// cond:
   824  	// result: (LoweredAtomicCas ptr old new_ mem)
   825  	for {
   826  		ptr := v.Args[0]
   827  		old := v.Args[1]
   828  		new_ := v.Args[2]
   829  		mem := v.Args[3]
   830  		v.reset(OpMIPSLoweredAtomicCas)
   831  		v.AddArg(ptr)
   832  		v.AddArg(old)
   833  		v.AddArg(new_)
   834  		v.AddArg(mem)
   835  		return true
   836  	}
   837  }
   838  func rewriteValueMIPS_OpAtomicExchange32(v *Value, config *Config) bool {
   839  	b := v.Block
   840  	_ = b
   841  	// match: (AtomicExchange32 ptr val mem)
   842  	// cond:
   843  	// result: (LoweredAtomicExchange ptr val mem)
   844  	for {
   845  		ptr := v.Args[0]
   846  		val := v.Args[1]
   847  		mem := v.Args[2]
   848  		v.reset(OpMIPSLoweredAtomicExchange)
   849  		v.AddArg(ptr)
   850  		v.AddArg(val)
   851  		v.AddArg(mem)
   852  		return true
   853  	}
   854  }
   855  func rewriteValueMIPS_OpAtomicLoad32(v *Value, config *Config) bool {
   856  	b := v.Block
   857  	_ = b
   858  	// match: (AtomicLoad32  ptr mem)
   859  	// cond:
   860  	// result: (LoweredAtomicLoad ptr mem)
   861  	for {
   862  		ptr := v.Args[0]
   863  		mem := v.Args[1]
   864  		v.reset(OpMIPSLoweredAtomicLoad)
   865  		v.AddArg(ptr)
   866  		v.AddArg(mem)
   867  		return true
   868  	}
   869  }
   870  func rewriteValueMIPS_OpAtomicLoadPtr(v *Value, config *Config) bool {
   871  	b := v.Block
   872  	_ = b
   873  	// match: (AtomicLoadPtr ptr mem)
   874  	// cond:
   875  	// result: (LoweredAtomicLoad  ptr mem)
   876  	for {
   877  		ptr := v.Args[0]
   878  		mem := v.Args[1]
   879  		v.reset(OpMIPSLoweredAtomicLoad)
   880  		v.AddArg(ptr)
   881  		v.AddArg(mem)
   882  		return true
   883  	}
   884  }
   885  func rewriteValueMIPS_OpAtomicOr8(v *Value, config *Config) bool {
   886  	b := v.Block
   887  	_ = b
   888  	// match: (AtomicOr8 ptr val mem)
   889  	// cond: !config.BigEndian
   890  	// result: (LoweredAtomicOr (AND <config.fe.TypeUInt32().PtrTo()> (MOVWconst [^3]) ptr) 		(SLL <config.fe.TypeUInt32()> (ZeroExt8to32 val) 			(SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst <config.fe.TypeUInt32()> [3] ptr))) mem)
   891  	for {
   892  		ptr := v.Args[0]
   893  		val := v.Args[1]
   894  		mem := v.Args[2]
   895  		if !(!config.BigEndian) {
   896  			break
   897  		}
   898  		v.reset(OpMIPSLoweredAtomicOr)
   899  		v0 := b.NewValue0(v.Line, OpMIPSAND, config.fe.TypeUInt32().PtrTo())
   900  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
   901  		v1.AuxInt = ^3
   902  		v0.AddArg(v1)
   903  		v0.AddArg(ptr)
   904  		v.AddArg(v0)
   905  		v2 := b.NewValue0(v.Line, OpMIPSSLL, config.fe.TypeUInt32())
   906  		v3 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
   907  		v3.AddArg(val)
   908  		v2.AddArg(v3)
   909  		v4 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
   910  		v4.AuxInt = 3
   911  		v5 := b.NewValue0(v.Line, OpMIPSANDconst, config.fe.TypeUInt32())
   912  		v5.AuxInt = 3
   913  		v5.AddArg(ptr)
   914  		v4.AddArg(v5)
   915  		v2.AddArg(v4)
   916  		v.AddArg(v2)
   917  		v.AddArg(mem)
   918  		return true
   919  	}
   920  	// match: (AtomicOr8 ptr val mem)
   921  	// cond: config.BigEndian
   922  	// result: (LoweredAtomicOr (AND <config.fe.TypeUInt32().PtrTo()> (MOVWconst [^3]) ptr) 		(SLL <config.fe.TypeUInt32()> (ZeroExt8to32 val) 			(SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst <config.fe.TypeUInt32()> [3] 					(XORconst <config.fe.TypeUInt32()> [3] ptr)))) mem)
   923  	for {
   924  		ptr := v.Args[0]
   925  		val := v.Args[1]
   926  		mem := v.Args[2]
   927  		if !(config.BigEndian) {
   928  			break
   929  		}
   930  		v.reset(OpMIPSLoweredAtomicOr)
   931  		v0 := b.NewValue0(v.Line, OpMIPSAND, config.fe.TypeUInt32().PtrTo())
   932  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
   933  		v1.AuxInt = ^3
   934  		v0.AddArg(v1)
   935  		v0.AddArg(ptr)
   936  		v.AddArg(v0)
   937  		v2 := b.NewValue0(v.Line, OpMIPSSLL, config.fe.TypeUInt32())
   938  		v3 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
   939  		v3.AddArg(val)
   940  		v2.AddArg(v3)
   941  		v4 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
   942  		v4.AuxInt = 3
   943  		v5 := b.NewValue0(v.Line, OpMIPSANDconst, config.fe.TypeUInt32())
   944  		v5.AuxInt = 3
   945  		v6 := b.NewValue0(v.Line, OpMIPSXORconst, config.fe.TypeUInt32())
   946  		v6.AuxInt = 3
   947  		v6.AddArg(ptr)
   948  		v5.AddArg(v6)
   949  		v4.AddArg(v5)
   950  		v2.AddArg(v4)
   951  		v.AddArg(v2)
   952  		v.AddArg(mem)
   953  		return true
   954  	}
   955  	return false
   956  }
   957  func rewriteValueMIPS_OpAtomicStore32(v *Value, config *Config) bool {
   958  	b := v.Block
   959  	_ = b
   960  	// match: (AtomicStore32      ptr val mem)
   961  	// cond:
   962  	// result: (LoweredAtomicStore ptr val mem)
   963  	for {
   964  		ptr := v.Args[0]
   965  		val := v.Args[1]
   966  		mem := v.Args[2]
   967  		v.reset(OpMIPSLoweredAtomicStore)
   968  		v.AddArg(ptr)
   969  		v.AddArg(val)
   970  		v.AddArg(mem)
   971  		return true
   972  	}
   973  }
   974  func rewriteValueMIPS_OpAtomicStorePtrNoWB(v *Value, config *Config) bool {
   975  	b := v.Block
   976  	_ = b
   977  	// match: (AtomicStorePtrNoWB ptr val mem)
   978  	// cond:
   979  	// result: (LoweredAtomicStore  ptr val mem)
   980  	for {
   981  		ptr := v.Args[0]
   982  		val := v.Args[1]
   983  		mem := v.Args[2]
   984  		v.reset(OpMIPSLoweredAtomicStore)
   985  		v.AddArg(ptr)
   986  		v.AddArg(val)
   987  		v.AddArg(mem)
   988  		return true
   989  	}
   990  }
   991  func rewriteValueMIPS_OpClosureCall(v *Value, config *Config) bool {
   992  	b := v.Block
   993  	_ = b
   994  	// match: (ClosureCall [argwid] entry closure mem)
   995  	// cond:
   996  	// result: (CALLclosure [argwid] entry closure mem)
   997  	for {
   998  		argwid := v.AuxInt
   999  		entry := v.Args[0]
  1000  		closure := v.Args[1]
  1001  		mem := v.Args[2]
  1002  		v.reset(OpMIPSCALLclosure)
  1003  		v.AuxInt = argwid
  1004  		v.AddArg(entry)
  1005  		v.AddArg(closure)
  1006  		v.AddArg(mem)
  1007  		return true
  1008  	}
  1009  }
  1010  func rewriteValueMIPS_OpCom16(v *Value, config *Config) bool {
  1011  	b := v.Block
  1012  	_ = b
  1013  	// match: (Com16 x)
  1014  	// cond:
  1015  	// result: (NORconst [0] x)
  1016  	for {
  1017  		x := v.Args[0]
  1018  		v.reset(OpMIPSNORconst)
  1019  		v.AuxInt = 0
  1020  		v.AddArg(x)
  1021  		return true
  1022  	}
  1023  }
  1024  func rewriteValueMIPS_OpCom32(v *Value, config *Config) bool {
  1025  	b := v.Block
  1026  	_ = b
  1027  	// match: (Com32 x)
  1028  	// cond:
  1029  	// result: (NORconst [0] x)
  1030  	for {
  1031  		x := v.Args[0]
  1032  		v.reset(OpMIPSNORconst)
  1033  		v.AuxInt = 0
  1034  		v.AddArg(x)
  1035  		return true
  1036  	}
  1037  }
  1038  func rewriteValueMIPS_OpCom8(v *Value, config *Config) bool {
  1039  	b := v.Block
  1040  	_ = b
  1041  	// match: (Com8 x)
  1042  	// cond:
  1043  	// result: (NORconst [0] x)
  1044  	for {
  1045  		x := v.Args[0]
  1046  		v.reset(OpMIPSNORconst)
  1047  		v.AuxInt = 0
  1048  		v.AddArg(x)
  1049  		return true
  1050  	}
  1051  }
  1052  func rewriteValueMIPS_OpConst16(v *Value, config *Config) bool {
  1053  	b := v.Block
  1054  	_ = b
  1055  	// match: (Const16 [val])
  1056  	// cond:
  1057  	// result: (MOVWconst [val])
  1058  	for {
  1059  		val := v.AuxInt
  1060  		v.reset(OpMIPSMOVWconst)
  1061  		v.AuxInt = val
  1062  		return true
  1063  	}
  1064  }
  1065  func rewriteValueMIPS_OpConst32(v *Value, config *Config) bool {
  1066  	b := v.Block
  1067  	_ = b
  1068  	// match: (Const32 [val])
  1069  	// cond:
  1070  	// result: (MOVWconst [val])
  1071  	for {
  1072  		val := v.AuxInt
  1073  		v.reset(OpMIPSMOVWconst)
  1074  		v.AuxInt = val
  1075  		return true
  1076  	}
  1077  }
  1078  func rewriteValueMIPS_OpConst32F(v *Value, config *Config) bool {
  1079  	b := v.Block
  1080  	_ = b
  1081  	// match: (Const32F [val])
  1082  	// cond:
  1083  	// result: (MOVFconst [val])
  1084  	for {
  1085  		val := v.AuxInt
  1086  		v.reset(OpMIPSMOVFconst)
  1087  		v.AuxInt = val
  1088  		return true
  1089  	}
  1090  }
  1091  func rewriteValueMIPS_OpConst64F(v *Value, config *Config) bool {
  1092  	b := v.Block
  1093  	_ = b
  1094  	// match: (Const64F [val])
  1095  	// cond:
  1096  	// result: (MOVDconst [val])
  1097  	for {
  1098  		val := v.AuxInt
  1099  		v.reset(OpMIPSMOVDconst)
  1100  		v.AuxInt = val
  1101  		return true
  1102  	}
  1103  }
  1104  func rewriteValueMIPS_OpConst8(v *Value, config *Config) bool {
  1105  	b := v.Block
  1106  	_ = b
  1107  	// match: (Const8 [val])
  1108  	// cond:
  1109  	// result: (MOVWconst [val])
  1110  	for {
  1111  		val := v.AuxInt
  1112  		v.reset(OpMIPSMOVWconst)
  1113  		v.AuxInt = val
  1114  		return true
  1115  	}
  1116  }
  1117  func rewriteValueMIPS_OpConstBool(v *Value, config *Config) bool {
  1118  	b := v.Block
  1119  	_ = b
  1120  	// match: (ConstBool [b])
  1121  	// cond:
  1122  	// result: (MOVWconst [b])
  1123  	for {
  1124  		b := v.AuxInt
  1125  		v.reset(OpMIPSMOVWconst)
  1126  		v.AuxInt = b
  1127  		return true
  1128  	}
  1129  }
  1130  func rewriteValueMIPS_OpConstNil(v *Value, config *Config) bool {
  1131  	b := v.Block
  1132  	_ = b
  1133  	// match: (ConstNil)
  1134  	// cond:
  1135  	// result: (MOVWconst [0])
  1136  	for {
  1137  		v.reset(OpMIPSMOVWconst)
  1138  		v.AuxInt = 0
  1139  		return true
  1140  	}
  1141  }
  1142  func rewriteValueMIPS_OpConvert(v *Value, config *Config) bool {
  1143  	b := v.Block
  1144  	_ = b
  1145  	// match: (Convert x mem)
  1146  	// cond:
  1147  	// result: (MOVWconvert x mem)
  1148  	for {
  1149  		x := v.Args[0]
  1150  		mem := v.Args[1]
  1151  		v.reset(OpMIPSMOVWconvert)
  1152  		v.AddArg(x)
  1153  		v.AddArg(mem)
  1154  		return true
  1155  	}
  1156  }
  1157  func rewriteValueMIPS_OpCtz32(v *Value, config *Config) bool {
  1158  	b := v.Block
  1159  	_ = b
  1160  	// match: (Ctz32 <t> x)
  1161  	// cond:
  1162  	// result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x)))))
  1163  	for {
  1164  		t := v.Type
  1165  		x := v.Args[0]
  1166  		v.reset(OpMIPSSUB)
  1167  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  1168  		v0.AuxInt = 32
  1169  		v.AddArg(v0)
  1170  		v1 := b.NewValue0(v.Line, OpMIPSCLZ, t)
  1171  		v2 := b.NewValue0(v.Line, OpMIPSSUBconst, t)
  1172  		v2.AuxInt = 1
  1173  		v3 := b.NewValue0(v.Line, OpMIPSAND, t)
  1174  		v3.AddArg(x)
  1175  		v4 := b.NewValue0(v.Line, OpMIPSNEG, t)
  1176  		v4.AddArg(x)
  1177  		v3.AddArg(v4)
  1178  		v2.AddArg(v3)
  1179  		v1.AddArg(v2)
  1180  		v.AddArg(v1)
  1181  		return true
  1182  	}
  1183  }
  1184  func rewriteValueMIPS_OpCvt32Fto32(v *Value, config *Config) bool {
  1185  	b := v.Block
  1186  	_ = b
  1187  	// match: (Cvt32Fto32 x)
  1188  	// cond:
  1189  	// result: (TRUNCFW x)
  1190  	for {
  1191  		x := v.Args[0]
  1192  		v.reset(OpMIPSTRUNCFW)
  1193  		v.AddArg(x)
  1194  		return true
  1195  	}
  1196  }
  1197  func rewriteValueMIPS_OpCvt32Fto64F(v *Value, config *Config) bool {
  1198  	b := v.Block
  1199  	_ = b
  1200  	// match: (Cvt32Fto64F x)
  1201  	// cond:
  1202  	// result: (MOVFD x)
  1203  	for {
  1204  		x := v.Args[0]
  1205  		v.reset(OpMIPSMOVFD)
  1206  		v.AddArg(x)
  1207  		return true
  1208  	}
  1209  }
  1210  func rewriteValueMIPS_OpCvt32to32F(v *Value, config *Config) bool {
  1211  	b := v.Block
  1212  	_ = b
  1213  	// match: (Cvt32to32F x)
  1214  	// cond:
  1215  	// result: (MOVWF x)
  1216  	for {
  1217  		x := v.Args[0]
  1218  		v.reset(OpMIPSMOVWF)
  1219  		v.AddArg(x)
  1220  		return true
  1221  	}
  1222  }
  1223  func rewriteValueMIPS_OpCvt32to64F(v *Value, config *Config) bool {
  1224  	b := v.Block
  1225  	_ = b
  1226  	// match: (Cvt32to64F x)
  1227  	// cond:
  1228  	// result: (MOVWD x)
  1229  	for {
  1230  		x := v.Args[0]
  1231  		v.reset(OpMIPSMOVWD)
  1232  		v.AddArg(x)
  1233  		return true
  1234  	}
  1235  }
  1236  func rewriteValueMIPS_OpCvt64Fto32(v *Value, config *Config) bool {
  1237  	b := v.Block
  1238  	_ = b
  1239  	// match: (Cvt64Fto32 x)
  1240  	// cond:
  1241  	// result: (TRUNCDW x)
  1242  	for {
  1243  		x := v.Args[0]
  1244  		v.reset(OpMIPSTRUNCDW)
  1245  		v.AddArg(x)
  1246  		return true
  1247  	}
  1248  }
  1249  func rewriteValueMIPS_OpCvt64Fto32F(v *Value, config *Config) bool {
  1250  	b := v.Block
  1251  	_ = b
  1252  	// match: (Cvt64Fto32F x)
  1253  	// cond:
  1254  	// result: (MOVDF x)
  1255  	for {
  1256  		x := v.Args[0]
  1257  		v.reset(OpMIPSMOVDF)
  1258  		v.AddArg(x)
  1259  		return true
  1260  	}
  1261  }
  1262  func rewriteValueMIPS_OpDeferCall(v *Value, config *Config) bool {
  1263  	b := v.Block
  1264  	_ = b
  1265  	// match: (DeferCall [argwid] mem)
  1266  	// cond:
  1267  	// result: (CALLdefer [argwid] mem)
  1268  	for {
  1269  		argwid := v.AuxInt
  1270  		mem := v.Args[0]
  1271  		v.reset(OpMIPSCALLdefer)
  1272  		v.AuxInt = argwid
  1273  		v.AddArg(mem)
  1274  		return true
  1275  	}
  1276  }
  1277  func rewriteValueMIPS_OpDiv16(v *Value, config *Config) bool {
  1278  	b := v.Block
  1279  	_ = b
  1280  	// match: (Div16 x y)
  1281  	// cond:
  1282  	// result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  1283  	for {
  1284  		x := v.Args[0]
  1285  		y := v.Args[1]
  1286  		v.reset(OpSelect1)
  1287  		v0 := b.NewValue0(v.Line, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  1288  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  1289  		v1.AddArg(x)
  1290  		v0.AddArg(v1)
  1291  		v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  1292  		v2.AddArg(y)
  1293  		v0.AddArg(v2)
  1294  		v.AddArg(v0)
  1295  		return true
  1296  	}
  1297  }
  1298  func rewriteValueMIPS_OpDiv16u(v *Value, config *Config) bool {
  1299  	b := v.Block
  1300  	_ = b
  1301  	// match: (Div16u x y)
  1302  	// cond:
  1303  	// result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1304  	for {
  1305  		x := v.Args[0]
  1306  		y := v.Args[1]
  1307  		v.reset(OpSelect1)
  1308  		v0 := b.NewValue0(v.Line, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  1309  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  1310  		v1.AddArg(x)
  1311  		v0.AddArg(v1)
  1312  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  1313  		v2.AddArg(y)
  1314  		v0.AddArg(v2)
  1315  		v.AddArg(v0)
  1316  		return true
  1317  	}
  1318  }
  1319  func rewriteValueMIPS_OpDiv32(v *Value, config *Config) bool {
  1320  	b := v.Block
  1321  	_ = b
  1322  	// match: (Div32 x y)
  1323  	// cond:
  1324  	// result: (Select1 (DIV x y))
  1325  	for {
  1326  		x := v.Args[0]
  1327  		y := v.Args[1]
  1328  		v.reset(OpSelect1)
  1329  		v0 := b.NewValue0(v.Line, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  1330  		v0.AddArg(x)
  1331  		v0.AddArg(y)
  1332  		v.AddArg(v0)
  1333  		return true
  1334  	}
  1335  }
  1336  func rewriteValueMIPS_OpDiv32F(v *Value, config *Config) bool {
  1337  	b := v.Block
  1338  	_ = b
  1339  	// match: (Div32F x y)
  1340  	// cond:
  1341  	// result: (DIVF x y)
  1342  	for {
  1343  		x := v.Args[0]
  1344  		y := v.Args[1]
  1345  		v.reset(OpMIPSDIVF)
  1346  		v.AddArg(x)
  1347  		v.AddArg(y)
  1348  		return true
  1349  	}
  1350  }
  1351  func rewriteValueMIPS_OpDiv32u(v *Value, config *Config) bool {
  1352  	b := v.Block
  1353  	_ = b
  1354  	// match: (Div32u x y)
  1355  	// cond:
  1356  	// result: (Select1 (DIVU x y))
  1357  	for {
  1358  		x := v.Args[0]
  1359  		y := v.Args[1]
  1360  		v.reset(OpSelect1)
  1361  		v0 := b.NewValue0(v.Line, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  1362  		v0.AddArg(x)
  1363  		v0.AddArg(y)
  1364  		v.AddArg(v0)
  1365  		return true
  1366  	}
  1367  }
  1368  func rewriteValueMIPS_OpDiv64F(v *Value, config *Config) bool {
  1369  	b := v.Block
  1370  	_ = b
  1371  	// match: (Div64F x y)
  1372  	// cond:
  1373  	// result: (DIVD x y)
  1374  	for {
  1375  		x := v.Args[0]
  1376  		y := v.Args[1]
  1377  		v.reset(OpMIPSDIVD)
  1378  		v.AddArg(x)
  1379  		v.AddArg(y)
  1380  		return true
  1381  	}
  1382  }
  1383  func rewriteValueMIPS_OpDiv8(v *Value, config *Config) bool {
  1384  	b := v.Block
  1385  	_ = b
  1386  	// match: (Div8 x y)
  1387  	// cond:
  1388  	// result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  1389  	for {
  1390  		x := v.Args[0]
  1391  		y := v.Args[1]
  1392  		v.reset(OpSelect1)
  1393  		v0 := b.NewValue0(v.Line, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  1394  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  1395  		v1.AddArg(x)
  1396  		v0.AddArg(v1)
  1397  		v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  1398  		v2.AddArg(y)
  1399  		v0.AddArg(v2)
  1400  		v.AddArg(v0)
  1401  		return true
  1402  	}
  1403  }
  1404  func rewriteValueMIPS_OpDiv8u(v *Value, config *Config) bool {
  1405  	b := v.Block
  1406  	_ = b
  1407  	// match: (Div8u x y)
  1408  	// cond:
  1409  	// result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1410  	for {
  1411  		x := v.Args[0]
  1412  		y := v.Args[1]
  1413  		v.reset(OpSelect1)
  1414  		v0 := b.NewValue0(v.Line, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  1415  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  1416  		v1.AddArg(x)
  1417  		v0.AddArg(v1)
  1418  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  1419  		v2.AddArg(y)
  1420  		v0.AddArg(v2)
  1421  		v.AddArg(v0)
  1422  		return true
  1423  	}
  1424  }
  1425  func rewriteValueMIPS_OpEq16(v *Value, config *Config) bool {
  1426  	b := v.Block
  1427  	_ = b
  1428  	// match: (Eq16 x y)
  1429  	// cond:
  1430  	// result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1431  	for {
  1432  		x := v.Args[0]
  1433  		y := v.Args[1]
  1434  		v.reset(OpMIPSSGTUconst)
  1435  		v.AuxInt = 1
  1436  		v0 := b.NewValue0(v.Line, OpMIPSXOR, config.fe.TypeUInt32())
  1437  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  1438  		v1.AddArg(x)
  1439  		v0.AddArg(v1)
  1440  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  1441  		v2.AddArg(y)
  1442  		v0.AddArg(v2)
  1443  		v.AddArg(v0)
  1444  		return true
  1445  	}
  1446  }
  1447  func rewriteValueMIPS_OpEq32(v *Value, config *Config) bool {
  1448  	b := v.Block
  1449  	_ = b
  1450  	// match: (Eq32 x y)
  1451  	// cond:
  1452  	// result: (SGTUconst [1] (XOR x y))
  1453  	for {
  1454  		x := v.Args[0]
  1455  		y := v.Args[1]
  1456  		v.reset(OpMIPSSGTUconst)
  1457  		v.AuxInt = 1
  1458  		v0 := b.NewValue0(v.Line, OpMIPSXOR, config.fe.TypeUInt32())
  1459  		v0.AddArg(x)
  1460  		v0.AddArg(y)
  1461  		v.AddArg(v0)
  1462  		return true
  1463  	}
  1464  }
  1465  func rewriteValueMIPS_OpEq32F(v *Value, config *Config) bool {
  1466  	b := v.Block
  1467  	_ = b
  1468  	// match: (Eq32F x y)
  1469  	// cond:
  1470  	// result: (FPFlagTrue (CMPEQF x y))
  1471  	for {
  1472  		x := v.Args[0]
  1473  		y := v.Args[1]
  1474  		v.reset(OpMIPSFPFlagTrue)
  1475  		v0 := b.NewValue0(v.Line, OpMIPSCMPEQF, TypeFlags)
  1476  		v0.AddArg(x)
  1477  		v0.AddArg(y)
  1478  		v.AddArg(v0)
  1479  		return true
  1480  	}
  1481  }
  1482  func rewriteValueMIPS_OpEq64F(v *Value, config *Config) bool {
  1483  	b := v.Block
  1484  	_ = b
  1485  	// match: (Eq64F x y)
  1486  	// cond:
  1487  	// result: (FPFlagTrue (CMPEQD x y))
  1488  	for {
  1489  		x := v.Args[0]
  1490  		y := v.Args[1]
  1491  		v.reset(OpMIPSFPFlagTrue)
  1492  		v0 := b.NewValue0(v.Line, OpMIPSCMPEQD, TypeFlags)
  1493  		v0.AddArg(x)
  1494  		v0.AddArg(y)
  1495  		v.AddArg(v0)
  1496  		return true
  1497  	}
  1498  }
  1499  func rewriteValueMIPS_OpEq8(v *Value, config *Config) bool {
  1500  	b := v.Block
  1501  	_ = b
  1502  	// match: (Eq8 x y)
  1503  	// cond:
  1504  	// result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1505  	for {
  1506  		x := v.Args[0]
  1507  		y := v.Args[1]
  1508  		v.reset(OpMIPSSGTUconst)
  1509  		v.AuxInt = 1
  1510  		v0 := b.NewValue0(v.Line, OpMIPSXOR, config.fe.TypeUInt32())
  1511  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  1512  		v1.AddArg(x)
  1513  		v0.AddArg(v1)
  1514  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  1515  		v2.AddArg(y)
  1516  		v0.AddArg(v2)
  1517  		v.AddArg(v0)
  1518  		return true
  1519  	}
  1520  }
  1521  func rewriteValueMIPS_OpEqB(v *Value, config *Config) bool {
  1522  	b := v.Block
  1523  	_ = b
  1524  	// match: (EqB x y)
  1525  	// cond:
  1526  	// result: (XORconst [1] (XOR <config.fe.TypeBool()> x y))
  1527  	for {
  1528  		x := v.Args[0]
  1529  		y := v.Args[1]
  1530  		v.reset(OpMIPSXORconst)
  1531  		v.AuxInt = 1
  1532  		v0 := b.NewValue0(v.Line, OpMIPSXOR, config.fe.TypeBool())
  1533  		v0.AddArg(x)
  1534  		v0.AddArg(y)
  1535  		v.AddArg(v0)
  1536  		return true
  1537  	}
  1538  }
  1539  func rewriteValueMIPS_OpEqPtr(v *Value, config *Config) bool {
  1540  	b := v.Block
  1541  	_ = b
  1542  	// match: (EqPtr x y)
  1543  	// cond:
  1544  	// result: (SGTUconst [1] (XOR x y))
  1545  	for {
  1546  		x := v.Args[0]
  1547  		y := v.Args[1]
  1548  		v.reset(OpMIPSSGTUconst)
  1549  		v.AuxInt = 1
  1550  		v0 := b.NewValue0(v.Line, OpMIPSXOR, config.fe.TypeUInt32())
  1551  		v0.AddArg(x)
  1552  		v0.AddArg(y)
  1553  		v.AddArg(v0)
  1554  		return true
  1555  	}
  1556  }
  1557  func rewriteValueMIPS_OpGeq16(v *Value, config *Config) bool {
  1558  	b := v.Block
  1559  	_ = b
  1560  	// match: (Geq16 x y)
  1561  	// cond:
  1562  	// result: (XORconst [1] (SGT (SignExt16to32 y) (SignExt16to32 x)))
  1563  	for {
  1564  		x := v.Args[0]
  1565  		y := v.Args[1]
  1566  		v.reset(OpMIPSXORconst)
  1567  		v.AuxInt = 1
  1568  		v0 := b.NewValue0(v.Line, OpMIPSSGT, config.fe.TypeBool())
  1569  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  1570  		v1.AddArg(y)
  1571  		v0.AddArg(v1)
  1572  		v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  1573  		v2.AddArg(x)
  1574  		v0.AddArg(v2)
  1575  		v.AddArg(v0)
  1576  		return true
  1577  	}
  1578  }
  1579  func rewriteValueMIPS_OpGeq16U(v *Value, config *Config) bool {
  1580  	b := v.Block
  1581  	_ = b
  1582  	// match: (Geq16U x y)
  1583  	// cond:
  1584  	// result: (XORconst [1] (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)))
  1585  	for {
  1586  		x := v.Args[0]
  1587  		y := v.Args[1]
  1588  		v.reset(OpMIPSXORconst)
  1589  		v.AuxInt = 1
  1590  		v0 := b.NewValue0(v.Line, OpMIPSSGTU, config.fe.TypeBool())
  1591  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  1592  		v1.AddArg(y)
  1593  		v0.AddArg(v1)
  1594  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  1595  		v2.AddArg(x)
  1596  		v0.AddArg(v2)
  1597  		v.AddArg(v0)
  1598  		return true
  1599  	}
  1600  }
  1601  func rewriteValueMIPS_OpGeq32(v *Value, config *Config) bool {
  1602  	b := v.Block
  1603  	_ = b
  1604  	// match: (Geq32 x y)
  1605  	// cond:
  1606  	// result: (XORconst [1] (SGT y x))
  1607  	for {
  1608  		x := v.Args[0]
  1609  		y := v.Args[1]
  1610  		v.reset(OpMIPSXORconst)
  1611  		v.AuxInt = 1
  1612  		v0 := b.NewValue0(v.Line, OpMIPSSGT, config.fe.TypeBool())
  1613  		v0.AddArg(y)
  1614  		v0.AddArg(x)
  1615  		v.AddArg(v0)
  1616  		return true
  1617  	}
  1618  }
  1619  func rewriteValueMIPS_OpGeq32F(v *Value, config *Config) bool {
  1620  	b := v.Block
  1621  	_ = b
  1622  	// match: (Geq32F x y)
  1623  	// cond:
  1624  	// result: (FPFlagTrue (CMPGEF x y))
  1625  	for {
  1626  		x := v.Args[0]
  1627  		y := v.Args[1]
  1628  		v.reset(OpMIPSFPFlagTrue)
  1629  		v0 := b.NewValue0(v.Line, OpMIPSCMPGEF, TypeFlags)
  1630  		v0.AddArg(x)
  1631  		v0.AddArg(y)
  1632  		v.AddArg(v0)
  1633  		return true
  1634  	}
  1635  }
  1636  func rewriteValueMIPS_OpGeq32U(v *Value, config *Config) bool {
  1637  	b := v.Block
  1638  	_ = b
  1639  	// match: (Geq32U x y)
  1640  	// cond:
  1641  	// result: (XORconst [1] (SGTU y x))
  1642  	for {
  1643  		x := v.Args[0]
  1644  		y := v.Args[1]
  1645  		v.reset(OpMIPSXORconst)
  1646  		v.AuxInt = 1
  1647  		v0 := b.NewValue0(v.Line, OpMIPSSGTU, config.fe.TypeBool())
  1648  		v0.AddArg(y)
  1649  		v0.AddArg(x)
  1650  		v.AddArg(v0)
  1651  		return true
  1652  	}
  1653  }
  1654  func rewriteValueMIPS_OpGeq64F(v *Value, config *Config) bool {
  1655  	b := v.Block
  1656  	_ = b
  1657  	// match: (Geq64F x y)
  1658  	// cond:
  1659  	// result: (FPFlagTrue (CMPGED x y))
  1660  	for {
  1661  		x := v.Args[0]
  1662  		y := v.Args[1]
  1663  		v.reset(OpMIPSFPFlagTrue)
  1664  		v0 := b.NewValue0(v.Line, OpMIPSCMPGED, TypeFlags)
  1665  		v0.AddArg(x)
  1666  		v0.AddArg(y)
  1667  		v.AddArg(v0)
  1668  		return true
  1669  	}
  1670  }
  1671  func rewriteValueMIPS_OpGeq8(v *Value, config *Config) bool {
  1672  	b := v.Block
  1673  	_ = b
  1674  	// match: (Geq8 x y)
  1675  	// cond:
  1676  	// result: (XORconst [1] (SGT (SignExt8to32 y) (SignExt8to32 x)))
  1677  	for {
  1678  		x := v.Args[0]
  1679  		y := v.Args[1]
  1680  		v.reset(OpMIPSXORconst)
  1681  		v.AuxInt = 1
  1682  		v0 := b.NewValue0(v.Line, OpMIPSSGT, config.fe.TypeBool())
  1683  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  1684  		v1.AddArg(y)
  1685  		v0.AddArg(v1)
  1686  		v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  1687  		v2.AddArg(x)
  1688  		v0.AddArg(v2)
  1689  		v.AddArg(v0)
  1690  		return true
  1691  	}
  1692  }
  1693  func rewriteValueMIPS_OpGeq8U(v *Value, config *Config) bool {
  1694  	b := v.Block
  1695  	_ = b
  1696  	// match: (Geq8U x y)
  1697  	// cond:
  1698  	// result: (XORconst [1] (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)))
  1699  	for {
  1700  		x := v.Args[0]
  1701  		y := v.Args[1]
  1702  		v.reset(OpMIPSXORconst)
  1703  		v.AuxInt = 1
  1704  		v0 := b.NewValue0(v.Line, OpMIPSSGTU, config.fe.TypeBool())
  1705  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  1706  		v1.AddArg(y)
  1707  		v0.AddArg(v1)
  1708  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  1709  		v2.AddArg(x)
  1710  		v0.AddArg(v2)
  1711  		v.AddArg(v0)
  1712  		return true
  1713  	}
  1714  }
  1715  func rewriteValueMIPS_OpGetClosurePtr(v *Value, config *Config) bool {
  1716  	b := v.Block
  1717  	_ = b
  1718  	// match: (GetClosurePtr)
  1719  	// cond:
  1720  	// result: (LoweredGetClosurePtr)
  1721  	for {
  1722  		v.reset(OpMIPSLoweredGetClosurePtr)
  1723  		return true
  1724  	}
  1725  }
  1726  func rewriteValueMIPS_OpGoCall(v *Value, config *Config) bool {
  1727  	b := v.Block
  1728  	_ = b
  1729  	// match: (GoCall [argwid] mem)
  1730  	// cond:
  1731  	// result: (CALLgo [argwid] mem)
  1732  	for {
  1733  		argwid := v.AuxInt
  1734  		mem := v.Args[0]
  1735  		v.reset(OpMIPSCALLgo)
  1736  		v.AuxInt = argwid
  1737  		v.AddArg(mem)
  1738  		return true
  1739  	}
  1740  }
  1741  func rewriteValueMIPS_OpGreater16(v *Value, config *Config) bool {
  1742  	b := v.Block
  1743  	_ = b
  1744  	// match: (Greater16 x y)
  1745  	// cond:
  1746  	// result: (SGT (SignExt16to32 x) (SignExt16to32 y))
  1747  	for {
  1748  		x := v.Args[0]
  1749  		y := v.Args[1]
  1750  		v.reset(OpMIPSSGT)
  1751  		v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  1752  		v0.AddArg(x)
  1753  		v.AddArg(v0)
  1754  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  1755  		v1.AddArg(y)
  1756  		v.AddArg(v1)
  1757  		return true
  1758  	}
  1759  }
  1760  func rewriteValueMIPS_OpGreater16U(v *Value, config *Config) bool {
  1761  	b := v.Block
  1762  	_ = b
  1763  	// match: (Greater16U x y)
  1764  	// cond:
  1765  	// result: (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))
  1766  	for {
  1767  		x := v.Args[0]
  1768  		y := v.Args[1]
  1769  		v.reset(OpMIPSSGTU)
  1770  		v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  1771  		v0.AddArg(x)
  1772  		v.AddArg(v0)
  1773  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  1774  		v1.AddArg(y)
  1775  		v.AddArg(v1)
  1776  		return true
  1777  	}
  1778  }
  1779  func rewriteValueMIPS_OpGreater32(v *Value, config *Config) bool {
  1780  	b := v.Block
  1781  	_ = b
  1782  	// match: (Greater32 x y)
  1783  	// cond:
  1784  	// result: (SGT x y)
  1785  	for {
  1786  		x := v.Args[0]
  1787  		y := v.Args[1]
  1788  		v.reset(OpMIPSSGT)
  1789  		v.AddArg(x)
  1790  		v.AddArg(y)
  1791  		return true
  1792  	}
  1793  }
  1794  func rewriteValueMIPS_OpGreater32F(v *Value, config *Config) bool {
  1795  	b := v.Block
  1796  	_ = b
  1797  	// match: (Greater32F x y)
  1798  	// cond:
  1799  	// result: (FPFlagTrue (CMPGTF x y))
  1800  	for {
  1801  		x := v.Args[0]
  1802  		y := v.Args[1]
  1803  		v.reset(OpMIPSFPFlagTrue)
  1804  		v0 := b.NewValue0(v.Line, OpMIPSCMPGTF, TypeFlags)
  1805  		v0.AddArg(x)
  1806  		v0.AddArg(y)
  1807  		v.AddArg(v0)
  1808  		return true
  1809  	}
  1810  }
  1811  func rewriteValueMIPS_OpGreater32U(v *Value, config *Config) bool {
  1812  	b := v.Block
  1813  	_ = b
  1814  	// match: (Greater32U x y)
  1815  	// cond:
  1816  	// result: (SGTU x y)
  1817  	for {
  1818  		x := v.Args[0]
  1819  		y := v.Args[1]
  1820  		v.reset(OpMIPSSGTU)
  1821  		v.AddArg(x)
  1822  		v.AddArg(y)
  1823  		return true
  1824  	}
  1825  }
  1826  func rewriteValueMIPS_OpGreater64F(v *Value, config *Config) bool {
  1827  	b := v.Block
  1828  	_ = b
  1829  	// match: (Greater64F x y)
  1830  	// cond:
  1831  	// result: (FPFlagTrue (CMPGTD x y))
  1832  	for {
  1833  		x := v.Args[0]
  1834  		y := v.Args[1]
  1835  		v.reset(OpMIPSFPFlagTrue)
  1836  		v0 := b.NewValue0(v.Line, OpMIPSCMPGTD, TypeFlags)
  1837  		v0.AddArg(x)
  1838  		v0.AddArg(y)
  1839  		v.AddArg(v0)
  1840  		return true
  1841  	}
  1842  }
  1843  func rewriteValueMIPS_OpGreater8(v *Value, config *Config) bool {
  1844  	b := v.Block
  1845  	_ = b
  1846  	// match: (Greater8 x y)
  1847  	// cond:
  1848  	// result: (SGT (SignExt8to32 x) (SignExt8to32 y))
  1849  	for {
  1850  		x := v.Args[0]
  1851  		y := v.Args[1]
  1852  		v.reset(OpMIPSSGT)
  1853  		v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  1854  		v0.AddArg(x)
  1855  		v.AddArg(v0)
  1856  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  1857  		v1.AddArg(y)
  1858  		v.AddArg(v1)
  1859  		return true
  1860  	}
  1861  }
  1862  func rewriteValueMIPS_OpGreater8U(v *Value, config *Config) bool {
  1863  	b := v.Block
  1864  	_ = b
  1865  	// match: (Greater8U x y)
  1866  	// cond:
  1867  	// result: (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))
  1868  	for {
  1869  		x := v.Args[0]
  1870  		y := v.Args[1]
  1871  		v.reset(OpMIPSSGTU)
  1872  		v0 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  1873  		v0.AddArg(x)
  1874  		v.AddArg(v0)
  1875  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  1876  		v1.AddArg(y)
  1877  		v.AddArg(v1)
  1878  		return true
  1879  	}
  1880  }
  1881  func rewriteValueMIPS_OpHmul16(v *Value, config *Config) bool {
  1882  	b := v.Block
  1883  	_ = b
  1884  	// match: (Hmul16 x y)
  1885  	// cond:
  1886  	// result: (SRAconst (MUL <config.fe.TypeInt32()> (SignExt16to32 x) (SignExt16to32 y)) [16])
  1887  	for {
  1888  		x := v.Args[0]
  1889  		y := v.Args[1]
  1890  		v.reset(OpMIPSSRAconst)
  1891  		v.AuxInt = 16
  1892  		v0 := b.NewValue0(v.Line, OpMIPSMUL, config.fe.TypeInt32())
  1893  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  1894  		v1.AddArg(x)
  1895  		v0.AddArg(v1)
  1896  		v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  1897  		v2.AddArg(y)
  1898  		v0.AddArg(v2)
  1899  		v.AddArg(v0)
  1900  		return true
  1901  	}
  1902  }
  1903  func rewriteValueMIPS_OpHmul16u(v *Value, config *Config) bool {
  1904  	b := v.Block
  1905  	_ = b
  1906  	// match: (Hmul16u x y)
  1907  	// cond:
  1908  	// result: (SRLconst (MUL <config.fe.TypeUInt32()> (ZeroExt16to32 x) (ZeroExt16to32 y)) [16])
  1909  	for {
  1910  		x := v.Args[0]
  1911  		y := v.Args[1]
  1912  		v.reset(OpMIPSSRLconst)
  1913  		v.AuxInt = 16
  1914  		v0 := b.NewValue0(v.Line, OpMIPSMUL, config.fe.TypeUInt32())
  1915  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  1916  		v1.AddArg(x)
  1917  		v0.AddArg(v1)
  1918  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  1919  		v2.AddArg(y)
  1920  		v0.AddArg(v2)
  1921  		v.AddArg(v0)
  1922  		return true
  1923  	}
  1924  }
  1925  func rewriteValueMIPS_OpHmul32(v *Value, config *Config) bool {
  1926  	b := v.Block
  1927  	_ = b
  1928  	// match: (Hmul32 x y)
  1929  	// cond:
  1930  	// result: (Select0 (MULT x y))
  1931  	for {
  1932  		x := v.Args[0]
  1933  		y := v.Args[1]
  1934  		v.reset(OpSelect0)
  1935  		v0 := b.NewValue0(v.Line, OpMIPSMULT, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  1936  		v0.AddArg(x)
  1937  		v0.AddArg(y)
  1938  		v.AddArg(v0)
  1939  		return true
  1940  	}
  1941  }
  1942  func rewriteValueMIPS_OpHmul32u(v *Value, config *Config) bool {
  1943  	b := v.Block
  1944  	_ = b
  1945  	// match: (Hmul32u x y)
  1946  	// cond:
  1947  	// result: (Select0 (MULTU x y))
  1948  	for {
  1949  		x := v.Args[0]
  1950  		y := v.Args[1]
  1951  		v.reset(OpSelect0)
  1952  		v0 := b.NewValue0(v.Line, OpMIPSMULTU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  1953  		v0.AddArg(x)
  1954  		v0.AddArg(y)
  1955  		v.AddArg(v0)
  1956  		return true
  1957  	}
  1958  }
  1959  func rewriteValueMIPS_OpHmul8(v *Value, config *Config) bool {
  1960  	b := v.Block
  1961  	_ = b
  1962  	// match: (Hmul8 x y)
  1963  	// cond:
  1964  	// result: (SRAconst  (MUL <config.fe.TypeInt32()> (SignExt8to32 x) (SignExt8to32 y)) [8])
  1965  	for {
  1966  		x := v.Args[0]
  1967  		y := v.Args[1]
  1968  		v.reset(OpMIPSSRAconst)
  1969  		v.AuxInt = 8
  1970  		v0 := b.NewValue0(v.Line, OpMIPSMUL, config.fe.TypeInt32())
  1971  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  1972  		v1.AddArg(x)
  1973  		v0.AddArg(v1)
  1974  		v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  1975  		v2.AddArg(y)
  1976  		v0.AddArg(v2)
  1977  		v.AddArg(v0)
  1978  		return true
  1979  	}
  1980  }
  1981  func rewriteValueMIPS_OpHmul8u(v *Value, config *Config) bool {
  1982  	b := v.Block
  1983  	_ = b
  1984  	// match: (Hmul8u x y)
  1985  	// cond:
  1986  	// result: (SRLconst (MUL <config.fe.TypeUInt32()> (ZeroExt8to32 x) (ZeroExt8to32 y)) [8])
  1987  	for {
  1988  		x := v.Args[0]
  1989  		y := v.Args[1]
  1990  		v.reset(OpMIPSSRLconst)
  1991  		v.AuxInt = 8
  1992  		v0 := b.NewValue0(v.Line, OpMIPSMUL, config.fe.TypeUInt32())
  1993  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  1994  		v1.AddArg(x)
  1995  		v0.AddArg(v1)
  1996  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  1997  		v2.AddArg(y)
  1998  		v0.AddArg(v2)
  1999  		v.AddArg(v0)
  2000  		return true
  2001  	}
  2002  }
  2003  func rewriteValueMIPS_OpInterCall(v *Value, config *Config) bool {
  2004  	b := v.Block
  2005  	_ = b
  2006  	// match: (InterCall [argwid] entry mem)
  2007  	// cond:
  2008  	// result: (CALLinter [argwid] entry mem)
  2009  	for {
  2010  		argwid := v.AuxInt
  2011  		entry := v.Args[0]
  2012  		mem := v.Args[1]
  2013  		v.reset(OpMIPSCALLinter)
  2014  		v.AuxInt = argwid
  2015  		v.AddArg(entry)
  2016  		v.AddArg(mem)
  2017  		return true
  2018  	}
  2019  }
  2020  func rewriteValueMIPS_OpIsInBounds(v *Value, config *Config) bool {
  2021  	b := v.Block
  2022  	_ = b
  2023  	// match: (IsInBounds idx len)
  2024  	// cond:
  2025  	// result: (SGTU len idx)
  2026  	for {
  2027  		idx := v.Args[0]
  2028  		len := v.Args[1]
  2029  		v.reset(OpMIPSSGTU)
  2030  		v.AddArg(len)
  2031  		v.AddArg(idx)
  2032  		return true
  2033  	}
  2034  }
  2035  func rewriteValueMIPS_OpIsNonNil(v *Value, config *Config) bool {
  2036  	b := v.Block
  2037  	_ = b
  2038  	// match: (IsNonNil ptr)
  2039  	// cond:
  2040  	// result: (SGTU ptr (MOVWconst [0]))
  2041  	for {
  2042  		ptr := v.Args[0]
  2043  		v.reset(OpMIPSSGTU)
  2044  		v.AddArg(ptr)
  2045  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2046  		v0.AuxInt = 0
  2047  		v.AddArg(v0)
  2048  		return true
  2049  	}
  2050  }
  2051  func rewriteValueMIPS_OpIsSliceInBounds(v *Value, config *Config) bool {
  2052  	b := v.Block
  2053  	_ = b
  2054  	// match: (IsSliceInBounds idx len)
  2055  	// cond:
  2056  	// result: (XORconst [1] (SGTU idx len))
  2057  	for {
  2058  		idx := v.Args[0]
  2059  		len := v.Args[1]
  2060  		v.reset(OpMIPSXORconst)
  2061  		v.AuxInt = 1
  2062  		v0 := b.NewValue0(v.Line, OpMIPSSGTU, config.fe.TypeBool())
  2063  		v0.AddArg(idx)
  2064  		v0.AddArg(len)
  2065  		v.AddArg(v0)
  2066  		return true
  2067  	}
  2068  }
  2069  func rewriteValueMIPS_OpLeq16(v *Value, config *Config) bool {
  2070  	b := v.Block
  2071  	_ = b
  2072  	// match: (Leq16 x y)
  2073  	// cond:
  2074  	// result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y)))
  2075  	for {
  2076  		x := v.Args[0]
  2077  		y := v.Args[1]
  2078  		v.reset(OpMIPSXORconst)
  2079  		v.AuxInt = 1
  2080  		v0 := b.NewValue0(v.Line, OpMIPSSGT, config.fe.TypeBool())
  2081  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  2082  		v1.AddArg(x)
  2083  		v0.AddArg(v1)
  2084  		v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  2085  		v2.AddArg(y)
  2086  		v0.AddArg(v2)
  2087  		v.AddArg(v0)
  2088  		return true
  2089  	}
  2090  }
  2091  func rewriteValueMIPS_OpLeq16U(v *Value, config *Config) bool {
  2092  	b := v.Block
  2093  	_ = b
  2094  	// match: (Leq16U x y)
  2095  	// cond:
  2096  	// result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  2097  	for {
  2098  		x := v.Args[0]
  2099  		y := v.Args[1]
  2100  		v.reset(OpMIPSXORconst)
  2101  		v.AuxInt = 1
  2102  		v0 := b.NewValue0(v.Line, OpMIPSSGTU, config.fe.TypeBool())
  2103  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  2104  		v1.AddArg(x)
  2105  		v0.AddArg(v1)
  2106  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  2107  		v2.AddArg(y)
  2108  		v0.AddArg(v2)
  2109  		v.AddArg(v0)
  2110  		return true
  2111  	}
  2112  }
  2113  func rewriteValueMIPS_OpLeq32(v *Value, config *Config) bool {
  2114  	b := v.Block
  2115  	_ = b
  2116  	// match: (Leq32 x y)
  2117  	// cond:
  2118  	// result: (XORconst [1] (SGT x y))
  2119  	for {
  2120  		x := v.Args[0]
  2121  		y := v.Args[1]
  2122  		v.reset(OpMIPSXORconst)
  2123  		v.AuxInt = 1
  2124  		v0 := b.NewValue0(v.Line, OpMIPSSGT, config.fe.TypeBool())
  2125  		v0.AddArg(x)
  2126  		v0.AddArg(y)
  2127  		v.AddArg(v0)
  2128  		return true
  2129  	}
  2130  }
  2131  func rewriteValueMIPS_OpLeq32F(v *Value, config *Config) bool {
  2132  	b := v.Block
  2133  	_ = b
  2134  	// match: (Leq32F x y)
  2135  	// cond:
  2136  	// result: (FPFlagTrue (CMPGEF y x))
  2137  	for {
  2138  		x := v.Args[0]
  2139  		y := v.Args[1]
  2140  		v.reset(OpMIPSFPFlagTrue)
  2141  		v0 := b.NewValue0(v.Line, OpMIPSCMPGEF, TypeFlags)
  2142  		v0.AddArg(y)
  2143  		v0.AddArg(x)
  2144  		v.AddArg(v0)
  2145  		return true
  2146  	}
  2147  }
  2148  func rewriteValueMIPS_OpLeq32U(v *Value, config *Config) bool {
  2149  	b := v.Block
  2150  	_ = b
  2151  	// match: (Leq32U x y)
  2152  	// cond:
  2153  	// result: (XORconst [1] (SGTU x y))
  2154  	for {
  2155  		x := v.Args[0]
  2156  		y := v.Args[1]
  2157  		v.reset(OpMIPSXORconst)
  2158  		v.AuxInt = 1
  2159  		v0 := b.NewValue0(v.Line, OpMIPSSGTU, config.fe.TypeBool())
  2160  		v0.AddArg(x)
  2161  		v0.AddArg(y)
  2162  		v.AddArg(v0)
  2163  		return true
  2164  	}
  2165  }
  2166  func rewriteValueMIPS_OpLeq64F(v *Value, config *Config) bool {
  2167  	b := v.Block
  2168  	_ = b
  2169  	// match: (Leq64F x y)
  2170  	// cond:
  2171  	// result: (FPFlagTrue (CMPGED y x))
  2172  	for {
  2173  		x := v.Args[0]
  2174  		y := v.Args[1]
  2175  		v.reset(OpMIPSFPFlagTrue)
  2176  		v0 := b.NewValue0(v.Line, OpMIPSCMPGED, TypeFlags)
  2177  		v0.AddArg(y)
  2178  		v0.AddArg(x)
  2179  		v.AddArg(v0)
  2180  		return true
  2181  	}
  2182  }
  2183  func rewriteValueMIPS_OpLeq8(v *Value, config *Config) bool {
  2184  	b := v.Block
  2185  	_ = b
  2186  	// match: (Leq8 x y)
  2187  	// cond:
  2188  	// result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y)))
  2189  	for {
  2190  		x := v.Args[0]
  2191  		y := v.Args[1]
  2192  		v.reset(OpMIPSXORconst)
  2193  		v.AuxInt = 1
  2194  		v0 := b.NewValue0(v.Line, OpMIPSSGT, config.fe.TypeBool())
  2195  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  2196  		v1.AddArg(x)
  2197  		v0.AddArg(v1)
  2198  		v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  2199  		v2.AddArg(y)
  2200  		v0.AddArg(v2)
  2201  		v.AddArg(v0)
  2202  		return true
  2203  	}
  2204  }
  2205  func rewriteValueMIPS_OpLeq8U(v *Value, config *Config) bool {
  2206  	b := v.Block
  2207  	_ = b
  2208  	// match: (Leq8U x y)
  2209  	// cond:
  2210  	// result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  2211  	for {
  2212  		x := v.Args[0]
  2213  		y := v.Args[1]
  2214  		v.reset(OpMIPSXORconst)
  2215  		v.AuxInt = 1
  2216  		v0 := b.NewValue0(v.Line, OpMIPSSGTU, config.fe.TypeBool())
  2217  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  2218  		v1.AddArg(x)
  2219  		v0.AddArg(v1)
  2220  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  2221  		v2.AddArg(y)
  2222  		v0.AddArg(v2)
  2223  		v.AddArg(v0)
  2224  		return true
  2225  	}
  2226  }
  2227  func rewriteValueMIPS_OpLess16(v *Value, config *Config) bool {
  2228  	b := v.Block
  2229  	_ = b
  2230  	// match: (Less16 x y)
  2231  	// cond:
  2232  	// result: (SGT (SignExt16to32 y) (SignExt16to32 x))
  2233  	for {
  2234  		x := v.Args[0]
  2235  		y := v.Args[1]
  2236  		v.reset(OpMIPSSGT)
  2237  		v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  2238  		v0.AddArg(y)
  2239  		v.AddArg(v0)
  2240  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  2241  		v1.AddArg(x)
  2242  		v.AddArg(v1)
  2243  		return true
  2244  	}
  2245  }
  2246  func rewriteValueMIPS_OpLess16U(v *Value, config *Config) bool {
  2247  	b := v.Block
  2248  	_ = b
  2249  	// match: (Less16U x y)
  2250  	// cond:
  2251  	// result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x))
  2252  	for {
  2253  		x := v.Args[0]
  2254  		y := v.Args[1]
  2255  		v.reset(OpMIPSSGTU)
  2256  		v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  2257  		v0.AddArg(y)
  2258  		v.AddArg(v0)
  2259  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  2260  		v1.AddArg(x)
  2261  		v.AddArg(v1)
  2262  		return true
  2263  	}
  2264  }
  2265  func rewriteValueMIPS_OpLess32(v *Value, config *Config) bool {
  2266  	b := v.Block
  2267  	_ = b
  2268  	// match: (Less32 x y)
  2269  	// cond:
  2270  	// result: (SGT y x)
  2271  	for {
  2272  		x := v.Args[0]
  2273  		y := v.Args[1]
  2274  		v.reset(OpMIPSSGT)
  2275  		v.AddArg(y)
  2276  		v.AddArg(x)
  2277  		return true
  2278  	}
  2279  }
  2280  func rewriteValueMIPS_OpLess32F(v *Value, config *Config) bool {
  2281  	b := v.Block
  2282  	_ = b
  2283  	// match: (Less32F x y)
  2284  	// cond:
  2285  	// result: (FPFlagTrue (CMPGTF y x))
  2286  	for {
  2287  		x := v.Args[0]
  2288  		y := v.Args[1]
  2289  		v.reset(OpMIPSFPFlagTrue)
  2290  		v0 := b.NewValue0(v.Line, OpMIPSCMPGTF, TypeFlags)
  2291  		v0.AddArg(y)
  2292  		v0.AddArg(x)
  2293  		v.AddArg(v0)
  2294  		return true
  2295  	}
  2296  }
  2297  func rewriteValueMIPS_OpLess32U(v *Value, config *Config) bool {
  2298  	b := v.Block
  2299  	_ = b
  2300  	// match: (Less32U x y)
  2301  	// cond:
  2302  	// result: (SGTU y x)
  2303  	for {
  2304  		x := v.Args[0]
  2305  		y := v.Args[1]
  2306  		v.reset(OpMIPSSGTU)
  2307  		v.AddArg(y)
  2308  		v.AddArg(x)
  2309  		return true
  2310  	}
  2311  }
  2312  func rewriteValueMIPS_OpLess64F(v *Value, config *Config) bool {
  2313  	b := v.Block
  2314  	_ = b
  2315  	// match: (Less64F x y)
  2316  	// cond:
  2317  	// result: (FPFlagTrue (CMPGTD y x))
  2318  	for {
  2319  		x := v.Args[0]
  2320  		y := v.Args[1]
  2321  		v.reset(OpMIPSFPFlagTrue)
  2322  		v0 := b.NewValue0(v.Line, OpMIPSCMPGTD, TypeFlags)
  2323  		v0.AddArg(y)
  2324  		v0.AddArg(x)
  2325  		v.AddArg(v0)
  2326  		return true
  2327  	}
  2328  }
  2329  func rewriteValueMIPS_OpLess8(v *Value, config *Config) bool {
  2330  	b := v.Block
  2331  	_ = b
  2332  	// match: (Less8 x y)
  2333  	// cond:
  2334  	// result: (SGT (SignExt8to32 y) (SignExt8to32 x))
  2335  	for {
  2336  		x := v.Args[0]
  2337  		y := v.Args[1]
  2338  		v.reset(OpMIPSSGT)
  2339  		v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  2340  		v0.AddArg(y)
  2341  		v.AddArg(v0)
  2342  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  2343  		v1.AddArg(x)
  2344  		v.AddArg(v1)
  2345  		return true
  2346  	}
  2347  }
  2348  func rewriteValueMIPS_OpLess8U(v *Value, config *Config) bool {
  2349  	b := v.Block
  2350  	_ = b
  2351  	// match: (Less8U x y)
  2352  	// cond:
  2353  	// result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x))
  2354  	for {
  2355  		x := v.Args[0]
  2356  		y := v.Args[1]
  2357  		v.reset(OpMIPSSGTU)
  2358  		v0 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  2359  		v0.AddArg(y)
  2360  		v.AddArg(v0)
  2361  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  2362  		v1.AddArg(x)
  2363  		v.AddArg(v1)
  2364  		return true
  2365  	}
  2366  }
  2367  func rewriteValueMIPS_OpLoad(v *Value, config *Config) bool {
  2368  	b := v.Block
  2369  	_ = b
  2370  	// match: (Load <t> ptr mem)
  2371  	// cond: t.IsBoolean()
  2372  	// result: (MOVBUload ptr mem)
  2373  	for {
  2374  		t := v.Type
  2375  		ptr := v.Args[0]
  2376  		mem := v.Args[1]
  2377  		if !(t.IsBoolean()) {
  2378  			break
  2379  		}
  2380  		v.reset(OpMIPSMOVBUload)
  2381  		v.AddArg(ptr)
  2382  		v.AddArg(mem)
  2383  		return true
  2384  	}
  2385  	// match: (Load <t> ptr mem)
  2386  	// cond: (is8BitInt(t) && isSigned(t))
  2387  	// result: (MOVBload ptr mem)
  2388  	for {
  2389  		t := v.Type
  2390  		ptr := v.Args[0]
  2391  		mem := v.Args[1]
  2392  		if !(is8BitInt(t) && isSigned(t)) {
  2393  			break
  2394  		}
  2395  		v.reset(OpMIPSMOVBload)
  2396  		v.AddArg(ptr)
  2397  		v.AddArg(mem)
  2398  		return true
  2399  	}
  2400  	// match: (Load <t> ptr mem)
  2401  	// cond: (is8BitInt(t) && !isSigned(t))
  2402  	// result: (MOVBUload ptr mem)
  2403  	for {
  2404  		t := v.Type
  2405  		ptr := v.Args[0]
  2406  		mem := v.Args[1]
  2407  		if !(is8BitInt(t) && !isSigned(t)) {
  2408  			break
  2409  		}
  2410  		v.reset(OpMIPSMOVBUload)
  2411  		v.AddArg(ptr)
  2412  		v.AddArg(mem)
  2413  		return true
  2414  	}
  2415  	// match: (Load <t> ptr mem)
  2416  	// cond: (is16BitInt(t) && isSigned(t))
  2417  	// result: (MOVHload ptr mem)
  2418  	for {
  2419  		t := v.Type
  2420  		ptr := v.Args[0]
  2421  		mem := v.Args[1]
  2422  		if !(is16BitInt(t) && isSigned(t)) {
  2423  			break
  2424  		}
  2425  		v.reset(OpMIPSMOVHload)
  2426  		v.AddArg(ptr)
  2427  		v.AddArg(mem)
  2428  		return true
  2429  	}
  2430  	// match: (Load <t> ptr mem)
  2431  	// cond: (is16BitInt(t) && !isSigned(t))
  2432  	// result: (MOVHUload ptr mem)
  2433  	for {
  2434  		t := v.Type
  2435  		ptr := v.Args[0]
  2436  		mem := v.Args[1]
  2437  		if !(is16BitInt(t) && !isSigned(t)) {
  2438  			break
  2439  		}
  2440  		v.reset(OpMIPSMOVHUload)
  2441  		v.AddArg(ptr)
  2442  		v.AddArg(mem)
  2443  		return true
  2444  	}
  2445  	// match: (Load <t> ptr mem)
  2446  	// cond: (is32BitInt(t) || isPtr(t))
  2447  	// result: (MOVWload ptr mem)
  2448  	for {
  2449  		t := v.Type
  2450  		ptr := v.Args[0]
  2451  		mem := v.Args[1]
  2452  		if !(is32BitInt(t) || isPtr(t)) {
  2453  			break
  2454  		}
  2455  		v.reset(OpMIPSMOVWload)
  2456  		v.AddArg(ptr)
  2457  		v.AddArg(mem)
  2458  		return true
  2459  	}
  2460  	// match: (Load <t> ptr mem)
  2461  	// cond: is32BitFloat(t)
  2462  	// result: (MOVFload ptr mem)
  2463  	for {
  2464  		t := v.Type
  2465  		ptr := v.Args[0]
  2466  		mem := v.Args[1]
  2467  		if !(is32BitFloat(t)) {
  2468  			break
  2469  		}
  2470  		v.reset(OpMIPSMOVFload)
  2471  		v.AddArg(ptr)
  2472  		v.AddArg(mem)
  2473  		return true
  2474  	}
  2475  	// match: (Load <t> ptr mem)
  2476  	// cond: is64BitFloat(t)
  2477  	// result: (MOVDload ptr mem)
  2478  	for {
  2479  		t := v.Type
  2480  		ptr := v.Args[0]
  2481  		mem := v.Args[1]
  2482  		if !(is64BitFloat(t)) {
  2483  			break
  2484  		}
  2485  		v.reset(OpMIPSMOVDload)
  2486  		v.AddArg(ptr)
  2487  		v.AddArg(mem)
  2488  		return true
  2489  	}
  2490  	return false
  2491  }
  2492  func rewriteValueMIPS_OpLsh16x16(v *Value, config *Config) bool {
  2493  	b := v.Block
  2494  	_ = b
  2495  	// match: (Lsh16x16 <t> x y)
  2496  	// cond:
  2497  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2498  	for {
  2499  		t := v.Type
  2500  		x := v.Args[0]
  2501  		y := v.Args[1]
  2502  		v.reset(OpMIPSCMOVZ)
  2503  		v0 := b.NewValue0(v.Line, OpMIPSSLL, t)
  2504  		v0.AddArg(x)
  2505  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  2506  		v1.AddArg(y)
  2507  		v0.AddArg(v1)
  2508  		v.AddArg(v0)
  2509  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2510  		v2.AuxInt = 0
  2511  		v.AddArg(v2)
  2512  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  2513  		v3.AuxInt = 32
  2514  		v4 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  2515  		v4.AddArg(y)
  2516  		v3.AddArg(v4)
  2517  		v.AddArg(v3)
  2518  		return true
  2519  	}
  2520  }
  2521  func rewriteValueMIPS_OpLsh16x32(v *Value, config *Config) bool {
  2522  	b := v.Block
  2523  	_ = b
  2524  	// match: (Lsh16x32 <t> x y)
  2525  	// cond:
  2526  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2527  	for {
  2528  		t := v.Type
  2529  		x := v.Args[0]
  2530  		y := v.Args[1]
  2531  		v.reset(OpMIPSCMOVZ)
  2532  		v0 := b.NewValue0(v.Line, OpMIPSSLL, t)
  2533  		v0.AddArg(x)
  2534  		v0.AddArg(y)
  2535  		v.AddArg(v0)
  2536  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2537  		v1.AuxInt = 0
  2538  		v.AddArg(v1)
  2539  		v2 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  2540  		v2.AuxInt = 32
  2541  		v2.AddArg(y)
  2542  		v.AddArg(v2)
  2543  		return true
  2544  	}
  2545  }
  2546  func rewriteValueMIPS_OpLsh16x64(v *Value, config *Config) bool {
  2547  	b := v.Block
  2548  	_ = b
  2549  	// match: (Lsh16x64 x (Const64 [c]))
  2550  	// cond: uint32(c) < 16
  2551  	// result: (SLLconst x [c])
  2552  	for {
  2553  		x := v.Args[0]
  2554  		v_1 := v.Args[1]
  2555  		if v_1.Op != OpConst64 {
  2556  			break
  2557  		}
  2558  		c := v_1.AuxInt
  2559  		if !(uint32(c) < 16) {
  2560  			break
  2561  		}
  2562  		v.reset(OpMIPSSLLconst)
  2563  		v.AuxInt = c
  2564  		v.AddArg(x)
  2565  		return true
  2566  	}
  2567  	// match: (Lsh16x64 _ (Const64 [c]))
  2568  	// cond: uint32(c) >= 16
  2569  	// result: (MOVWconst [0])
  2570  	for {
  2571  		v_1 := v.Args[1]
  2572  		if v_1.Op != OpConst64 {
  2573  			break
  2574  		}
  2575  		c := v_1.AuxInt
  2576  		if !(uint32(c) >= 16) {
  2577  			break
  2578  		}
  2579  		v.reset(OpMIPSMOVWconst)
  2580  		v.AuxInt = 0
  2581  		return true
  2582  	}
  2583  	return false
  2584  }
  2585  func rewriteValueMIPS_OpLsh16x8(v *Value, config *Config) bool {
  2586  	b := v.Block
  2587  	_ = b
  2588  	// match: (Lsh16x8 <t> x y)
  2589  	// cond:
  2590  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2591  	for {
  2592  		t := v.Type
  2593  		x := v.Args[0]
  2594  		y := v.Args[1]
  2595  		v.reset(OpMIPSCMOVZ)
  2596  		v0 := b.NewValue0(v.Line, OpMIPSSLL, t)
  2597  		v0.AddArg(x)
  2598  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  2599  		v1.AddArg(y)
  2600  		v0.AddArg(v1)
  2601  		v.AddArg(v0)
  2602  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2603  		v2.AuxInt = 0
  2604  		v.AddArg(v2)
  2605  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  2606  		v3.AuxInt = 32
  2607  		v4 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  2608  		v4.AddArg(y)
  2609  		v3.AddArg(v4)
  2610  		v.AddArg(v3)
  2611  		return true
  2612  	}
  2613  }
  2614  func rewriteValueMIPS_OpLsh32x16(v *Value, config *Config) bool {
  2615  	b := v.Block
  2616  	_ = b
  2617  	// match: (Lsh32x16 <t> x y)
  2618  	// cond:
  2619  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2620  	for {
  2621  		t := v.Type
  2622  		x := v.Args[0]
  2623  		y := v.Args[1]
  2624  		v.reset(OpMIPSCMOVZ)
  2625  		v0 := b.NewValue0(v.Line, OpMIPSSLL, t)
  2626  		v0.AddArg(x)
  2627  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  2628  		v1.AddArg(y)
  2629  		v0.AddArg(v1)
  2630  		v.AddArg(v0)
  2631  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2632  		v2.AuxInt = 0
  2633  		v.AddArg(v2)
  2634  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  2635  		v3.AuxInt = 32
  2636  		v4 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  2637  		v4.AddArg(y)
  2638  		v3.AddArg(v4)
  2639  		v.AddArg(v3)
  2640  		return true
  2641  	}
  2642  }
  2643  func rewriteValueMIPS_OpLsh32x32(v *Value, config *Config) bool {
  2644  	b := v.Block
  2645  	_ = b
  2646  	// match: (Lsh32x32 <t> x y)
  2647  	// cond:
  2648  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2649  	for {
  2650  		t := v.Type
  2651  		x := v.Args[0]
  2652  		y := v.Args[1]
  2653  		v.reset(OpMIPSCMOVZ)
  2654  		v0 := b.NewValue0(v.Line, OpMIPSSLL, t)
  2655  		v0.AddArg(x)
  2656  		v0.AddArg(y)
  2657  		v.AddArg(v0)
  2658  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2659  		v1.AuxInt = 0
  2660  		v.AddArg(v1)
  2661  		v2 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  2662  		v2.AuxInt = 32
  2663  		v2.AddArg(y)
  2664  		v.AddArg(v2)
  2665  		return true
  2666  	}
  2667  }
  2668  func rewriteValueMIPS_OpLsh32x64(v *Value, config *Config) bool {
  2669  	b := v.Block
  2670  	_ = b
  2671  	// match: (Lsh32x64 x (Const64 [c]))
  2672  	// cond: uint32(c) < 32
  2673  	// result: (SLLconst x [c])
  2674  	for {
  2675  		x := v.Args[0]
  2676  		v_1 := v.Args[1]
  2677  		if v_1.Op != OpConst64 {
  2678  			break
  2679  		}
  2680  		c := v_1.AuxInt
  2681  		if !(uint32(c) < 32) {
  2682  			break
  2683  		}
  2684  		v.reset(OpMIPSSLLconst)
  2685  		v.AuxInt = c
  2686  		v.AddArg(x)
  2687  		return true
  2688  	}
  2689  	// match: (Lsh32x64 _ (Const64 [c]))
  2690  	// cond: uint32(c) >= 32
  2691  	// result: (MOVWconst [0])
  2692  	for {
  2693  		v_1 := v.Args[1]
  2694  		if v_1.Op != OpConst64 {
  2695  			break
  2696  		}
  2697  		c := v_1.AuxInt
  2698  		if !(uint32(c) >= 32) {
  2699  			break
  2700  		}
  2701  		v.reset(OpMIPSMOVWconst)
  2702  		v.AuxInt = 0
  2703  		return true
  2704  	}
  2705  	return false
  2706  }
  2707  func rewriteValueMIPS_OpLsh32x8(v *Value, config *Config) bool {
  2708  	b := v.Block
  2709  	_ = b
  2710  	// match: (Lsh32x8 <t> x y)
  2711  	// cond:
  2712  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2713  	for {
  2714  		t := v.Type
  2715  		x := v.Args[0]
  2716  		y := v.Args[1]
  2717  		v.reset(OpMIPSCMOVZ)
  2718  		v0 := b.NewValue0(v.Line, OpMIPSSLL, t)
  2719  		v0.AddArg(x)
  2720  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  2721  		v1.AddArg(y)
  2722  		v0.AddArg(v1)
  2723  		v.AddArg(v0)
  2724  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2725  		v2.AuxInt = 0
  2726  		v.AddArg(v2)
  2727  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  2728  		v3.AuxInt = 32
  2729  		v4 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  2730  		v4.AddArg(y)
  2731  		v3.AddArg(v4)
  2732  		v.AddArg(v3)
  2733  		return true
  2734  	}
  2735  }
  2736  func rewriteValueMIPS_OpLsh8x16(v *Value, config *Config) bool {
  2737  	b := v.Block
  2738  	_ = b
  2739  	// match: (Lsh8x16 <t> x y)
  2740  	// cond:
  2741  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2742  	for {
  2743  		t := v.Type
  2744  		x := v.Args[0]
  2745  		y := v.Args[1]
  2746  		v.reset(OpMIPSCMOVZ)
  2747  		v0 := b.NewValue0(v.Line, OpMIPSSLL, t)
  2748  		v0.AddArg(x)
  2749  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  2750  		v1.AddArg(y)
  2751  		v0.AddArg(v1)
  2752  		v.AddArg(v0)
  2753  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2754  		v2.AuxInt = 0
  2755  		v.AddArg(v2)
  2756  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  2757  		v3.AuxInt = 32
  2758  		v4 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  2759  		v4.AddArg(y)
  2760  		v3.AddArg(v4)
  2761  		v.AddArg(v3)
  2762  		return true
  2763  	}
  2764  }
  2765  func rewriteValueMIPS_OpLsh8x32(v *Value, config *Config) bool {
  2766  	b := v.Block
  2767  	_ = b
  2768  	// match: (Lsh8x32 <t> x y)
  2769  	// cond:
  2770  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2771  	for {
  2772  		t := v.Type
  2773  		x := v.Args[0]
  2774  		y := v.Args[1]
  2775  		v.reset(OpMIPSCMOVZ)
  2776  		v0 := b.NewValue0(v.Line, OpMIPSSLL, t)
  2777  		v0.AddArg(x)
  2778  		v0.AddArg(y)
  2779  		v.AddArg(v0)
  2780  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2781  		v1.AuxInt = 0
  2782  		v.AddArg(v1)
  2783  		v2 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  2784  		v2.AuxInt = 32
  2785  		v2.AddArg(y)
  2786  		v.AddArg(v2)
  2787  		return true
  2788  	}
  2789  }
  2790  func rewriteValueMIPS_OpLsh8x64(v *Value, config *Config) bool {
  2791  	b := v.Block
  2792  	_ = b
  2793  	// match: (Lsh8x64 x (Const64 [c]))
  2794  	// cond: uint32(c) < 8
  2795  	// result: (SLLconst x [c])
  2796  	for {
  2797  		x := v.Args[0]
  2798  		v_1 := v.Args[1]
  2799  		if v_1.Op != OpConst64 {
  2800  			break
  2801  		}
  2802  		c := v_1.AuxInt
  2803  		if !(uint32(c) < 8) {
  2804  			break
  2805  		}
  2806  		v.reset(OpMIPSSLLconst)
  2807  		v.AuxInt = c
  2808  		v.AddArg(x)
  2809  		return true
  2810  	}
  2811  	// match: (Lsh8x64 _ (Const64 [c]))
  2812  	// cond: uint32(c) >= 8
  2813  	// result: (MOVWconst [0])
  2814  	for {
  2815  		v_1 := v.Args[1]
  2816  		if v_1.Op != OpConst64 {
  2817  			break
  2818  		}
  2819  		c := v_1.AuxInt
  2820  		if !(uint32(c) >= 8) {
  2821  			break
  2822  		}
  2823  		v.reset(OpMIPSMOVWconst)
  2824  		v.AuxInt = 0
  2825  		return true
  2826  	}
  2827  	return false
  2828  }
  2829  func rewriteValueMIPS_OpLsh8x8(v *Value, config *Config) bool {
  2830  	b := v.Block
  2831  	_ = b
  2832  	// match: (Lsh8x8 <t> x y)
  2833  	// cond:
  2834  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2835  	for {
  2836  		t := v.Type
  2837  		x := v.Args[0]
  2838  		y := v.Args[1]
  2839  		v.reset(OpMIPSCMOVZ)
  2840  		v0 := b.NewValue0(v.Line, OpMIPSSLL, t)
  2841  		v0.AddArg(x)
  2842  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  2843  		v1.AddArg(y)
  2844  		v0.AddArg(v1)
  2845  		v.AddArg(v0)
  2846  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2847  		v2.AuxInt = 0
  2848  		v.AddArg(v2)
  2849  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  2850  		v3.AuxInt = 32
  2851  		v4 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  2852  		v4.AddArg(y)
  2853  		v3.AddArg(v4)
  2854  		v.AddArg(v3)
  2855  		return true
  2856  	}
  2857  }
  2858  func rewriteValueMIPS_OpMIPSADD(v *Value, config *Config) bool {
  2859  	b := v.Block
  2860  	_ = b
  2861  	// match: (ADD (MOVWconst [c]) x)
  2862  	// cond:
  2863  	// result: (ADDconst [c] x)
  2864  	for {
  2865  		v_0 := v.Args[0]
  2866  		if v_0.Op != OpMIPSMOVWconst {
  2867  			break
  2868  		}
  2869  		c := v_0.AuxInt
  2870  		x := v.Args[1]
  2871  		v.reset(OpMIPSADDconst)
  2872  		v.AuxInt = c
  2873  		v.AddArg(x)
  2874  		return true
  2875  	}
  2876  	// match: (ADD x (MOVWconst [c]))
  2877  	// cond:
  2878  	// result: (ADDconst [c] x)
  2879  	for {
  2880  		x := v.Args[0]
  2881  		v_1 := v.Args[1]
  2882  		if v_1.Op != OpMIPSMOVWconst {
  2883  			break
  2884  		}
  2885  		c := v_1.AuxInt
  2886  		v.reset(OpMIPSADDconst)
  2887  		v.AuxInt = c
  2888  		v.AddArg(x)
  2889  		return true
  2890  	}
  2891  	// match: (ADD x (NEG y))
  2892  	// cond:
  2893  	// result: (SUB x y)
  2894  	for {
  2895  		x := v.Args[0]
  2896  		v_1 := v.Args[1]
  2897  		if v_1.Op != OpMIPSNEG {
  2898  			break
  2899  		}
  2900  		y := v_1.Args[0]
  2901  		v.reset(OpMIPSSUB)
  2902  		v.AddArg(x)
  2903  		v.AddArg(y)
  2904  		return true
  2905  	}
  2906  	// match: (ADD (NEG y) x)
  2907  	// cond:
  2908  	// result: (SUB x y)
  2909  	for {
  2910  		v_0 := v.Args[0]
  2911  		if v_0.Op != OpMIPSNEG {
  2912  			break
  2913  		}
  2914  		y := v_0.Args[0]
  2915  		x := v.Args[1]
  2916  		v.reset(OpMIPSSUB)
  2917  		v.AddArg(x)
  2918  		v.AddArg(y)
  2919  		return true
  2920  	}
  2921  	return false
  2922  }
  2923  func rewriteValueMIPS_OpMIPSADDconst(v *Value, config *Config) bool {
  2924  	b := v.Block
  2925  	_ = b
  2926  	// match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr))
  2927  	// cond:
  2928  	// result: (MOVWaddr [off1+off2] {sym} ptr)
  2929  	for {
  2930  		off1 := v.AuxInt
  2931  		v_0 := v.Args[0]
  2932  		if v_0.Op != OpMIPSMOVWaddr {
  2933  			break
  2934  		}
  2935  		off2 := v_0.AuxInt
  2936  		sym := v_0.Aux
  2937  		ptr := v_0.Args[0]
  2938  		v.reset(OpMIPSMOVWaddr)
  2939  		v.AuxInt = off1 + off2
  2940  		v.Aux = sym
  2941  		v.AddArg(ptr)
  2942  		return true
  2943  	}
  2944  	// match: (ADDconst [0]  x)
  2945  	// cond:
  2946  	// result: x
  2947  	for {
  2948  		if v.AuxInt != 0 {
  2949  			break
  2950  		}
  2951  		x := v.Args[0]
  2952  		v.reset(OpCopy)
  2953  		v.Type = x.Type
  2954  		v.AddArg(x)
  2955  		return true
  2956  	}
  2957  	// match: (ADDconst [c] (MOVWconst [d]))
  2958  	// cond:
  2959  	// result: (MOVWconst [int64(int32(c+d))])
  2960  	for {
  2961  		c := v.AuxInt
  2962  		v_0 := v.Args[0]
  2963  		if v_0.Op != OpMIPSMOVWconst {
  2964  			break
  2965  		}
  2966  		d := v_0.AuxInt
  2967  		v.reset(OpMIPSMOVWconst)
  2968  		v.AuxInt = int64(int32(c + d))
  2969  		return true
  2970  	}
  2971  	// match: (ADDconst [c] (ADDconst [d] x))
  2972  	// cond:
  2973  	// result: (ADDconst [int64(int32(c+d))] x)
  2974  	for {
  2975  		c := v.AuxInt
  2976  		v_0 := v.Args[0]
  2977  		if v_0.Op != OpMIPSADDconst {
  2978  			break
  2979  		}
  2980  		d := v_0.AuxInt
  2981  		x := v_0.Args[0]
  2982  		v.reset(OpMIPSADDconst)
  2983  		v.AuxInt = int64(int32(c + d))
  2984  		v.AddArg(x)
  2985  		return true
  2986  	}
  2987  	// match: (ADDconst [c] (SUBconst [d] x))
  2988  	// cond:
  2989  	// result: (ADDconst [int64(int32(c-d))] x)
  2990  	for {
  2991  		c := v.AuxInt
  2992  		v_0 := v.Args[0]
  2993  		if v_0.Op != OpMIPSSUBconst {
  2994  			break
  2995  		}
  2996  		d := v_0.AuxInt
  2997  		x := v_0.Args[0]
  2998  		v.reset(OpMIPSADDconst)
  2999  		v.AuxInt = int64(int32(c - d))
  3000  		v.AddArg(x)
  3001  		return true
  3002  	}
  3003  	return false
  3004  }
  3005  func rewriteValueMIPS_OpMIPSAND(v *Value, config *Config) bool {
  3006  	b := v.Block
  3007  	_ = b
  3008  	// match: (AND (MOVWconst [c]) x)
  3009  	// cond:
  3010  	// result: (ANDconst [c] x)
  3011  	for {
  3012  		v_0 := v.Args[0]
  3013  		if v_0.Op != OpMIPSMOVWconst {
  3014  			break
  3015  		}
  3016  		c := v_0.AuxInt
  3017  		x := v.Args[1]
  3018  		v.reset(OpMIPSANDconst)
  3019  		v.AuxInt = c
  3020  		v.AddArg(x)
  3021  		return true
  3022  	}
  3023  	// match: (AND x (MOVWconst [c]))
  3024  	// cond:
  3025  	// result: (ANDconst [c] x)
  3026  	for {
  3027  		x := v.Args[0]
  3028  		v_1 := v.Args[1]
  3029  		if v_1.Op != OpMIPSMOVWconst {
  3030  			break
  3031  		}
  3032  		c := v_1.AuxInt
  3033  		v.reset(OpMIPSANDconst)
  3034  		v.AuxInt = c
  3035  		v.AddArg(x)
  3036  		return true
  3037  	}
  3038  	// match: (AND x x)
  3039  	// cond:
  3040  	// result: x
  3041  	for {
  3042  		x := v.Args[0]
  3043  		if x != v.Args[1] {
  3044  			break
  3045  		}
  3046  		v.reset(OpCopy)
  3047  		v.Type = x.Type
  3048  		v.AddArg(x)
  3049  		return true
  3050  	}
  3051  	// match: (AND (SGTUconst [1] x) (SGTUconst [1] y))
  3052  	// cond:
  3053  	// result: (SGTUconst [1] (OR <x.Type> x y))
  3054  	for {
  3055  		v_0 := v.Args[0]
  3056  		if v_0.Op != OpMIPSSGTUconst {
  3057  			break
  3058  		}
  3059  		if v_0.AuxInt != 1 {
  3060  			break
  3061  		}
  3062  		x := v_0.Args[0]
  3063  		v_1 := v.Args[1]
  3064  		if v_1.Op != OpMIPSSGTUconst {
  3065  			break
  3066  		}
  3067  		if v_1.AuxInt != 1 {
  3068  			break
  3069  		}
  3070  		y := v_1.Args[0]
  3071  		v.reset(OpMIPSSGTUconst)
  3072  		v.AuxInt = 1
  3073  		v0 := b.NewValue0(v.Line, OpMIPSOR, x.Type)
  3074  		v0.AddArg(x)
  3075  		v0.AddArg(y)
  3076  		v.AddArg(v0)
  3077  		return true
  3078  	}
  3079  	return false
  3080  }
  3081  func rewriteValueMIPS_OpMIPSANDconst(v *Value, config *Config) bool {
  3082  	b := v.Block
  3083  	_ = b
  3084  	// match: (ANDconst [0]  _)
  3085  	// cond:
  3086  	// result: (MOVWconst [0])
  3087  	for {
  3088  		if v.AuxInt != 0 {
  3089  			break
  3090  		}
  3091  		v.reset(OpMIPSMOVWconst)
  3092  		v.AuxInt = 0
  3093  		return true
  3094  	}
  3095  	// match: (ANDconst [-1] x)
  3096  	// cond:
  3097  	// result: x
  3098  	for {
  3099  		if v.AuxInt != -1 {
  3100  			break
  3101  		}
  3102  		x := v.Args[0]
  3103  		v.reset(OpCopy)
  3104  		v.Type = x.Type
  3105  		v.AddArg(x)
  3106  		return true
  3107  	}
  3108  	// match: (ANDconst [c] (MOVWconst [d]))
  3109  	// cond:
  3110  	// result: (MOVWconst [c&d])
  3111  	for {
  3112  		c := v.AuxInt
  3113  		v_0 := v.Args[0]
  3114  		if v_0.Op != OpMIPSMOVWconst {
  3115  			break
  3116  		}
  3117  		d := v_0.AuxInt
  3118  		v.reset(OpMIPSMOVWconst)
  3119  		v.AuxInt = c & d
  3120  		return true
  3121  	}
  3122  	// match: (ANDconst [c] (ANDconst [d] x))
  3123  	// cond:
  3124  	// result: (ANDconst [c&d] x)
  3125  	for {
  3126  		c := v.AuxInt
  3127  		v_0 := v.Args[0]
  3128  		if v_0.Op != OpMIPSANDconst {
  3129  			break
  3130  		}
  3131  		d := v_0.AuxInt
  3132  		x := v_0.Args[0]
  3133  		v.reset(OpMIPSANDconst)
  3134  		v.AuxInt = c & d
  3135  		v.AddArg(x)
  3136  		return true
  3137  	}
  3138  	return false
  3139  }
  3140  func rewriteValueMIPS_OpMIPSCMOVZ(v *Value, config *Config) bool {
  3141  	b := v.Block
  3142  	_ = b
  3143  	// match: (CMOVZ _ b (MOVWconst [0]))
  3144  	// cond:
  3145  	// result: b
  3146  	for {
  3147  		b := v.Args[1]
  3148  		v_2 := v.Args[2]
  3149  		if v_2.Op != OpMIPSMOVWconst {
  3150  			break
  3151  		}
  3152  		if v_2.AuxInt != 0 {
  3153  			break
  3154  		}
  3155  		v.reset(OpCopy)
  3156  		v.Type = b.Type
  3157  		v.AddArg(b)
  3158  		return true
  3159  	}
  3160  	// match: (CMOVZ a _ (MOVWconst [c]))
  3161  	// cond: c!=0
  3162  	// result: a
  3163  	for {
  3164  		a := v.Args[0]
  3165  		v_2 := v.Args[2]
  3166  		if v_2.Op != OpMIPSMOVWconst {
  3167  			break
  3168  		}
  3169  		c := v_2.AuxInt
  3170  		if !(c != 0) {
  3171  			break
  3172  		}
  3173  		v.reset(OpCopy)
  3174  		v.Type = a.Type
  3175  		v.AddArg(a)
  3176  		return true
  3177  	}
  3178  	// match: (CMOVZ a (MOVWconst [0]) c)
  3179  	// cond:
  3180  	// result: (CMOVZzero a c)
  3181  	for {
  3182  		a := v.Args[0]
  3183  		v_1 := v.Args[1]
  3184  		if v_1.Op != OpMIPSMOVWconst {
  3185  			break
  3186  		}
  3187  		if v_1.AuxInt != 0 {
  3188  			break
  3189  		}
  3190  		c := v.Args[2]
  3191  		v.reset(OpMIPSCMOVZzero)
  3192  		v.AddArg(a)
  3193  		v.AddArg(c)
  3194  		return true
  3195  	}
  3196  	return false
  3197  }
  3198  func rewriteValueMIPS_OpMIPSCMOVZzero(v *Value, config *Config) bool {
  3199  	b := v.Block
  3200  	_ = b
  3201  	// match: (CMOVZzero _ (MOVWconst [0]))
  3202  	// cond:
  3203  	// result: (MOVWconst [0])
  3204  	for {
  3205  		v_1 := v.Args[1]
  3206  		if v_1.Op != OpMIPSMOVWconst {
  3207  			break
  3208  		}
  3209  		if v_1.AuxInt != 0 {
  3210  			break
  3211  		}
  3212  		v.reset(OpMIPSMOVWconst)
  3213  		v.AuxInt = 0
  3214  		return true
  3215  	}
  3216  	// match: (CMOVZzero a (MOVWconst [c]))
  3217  	// cond: c!=0
  3218  	// result: a
  3219  	for {
  3220  		a := v.Args[0]
  3221  		v_1 := v.Args[1]
  3222  		if v_1.Op != OpMIPSMOVWconst {
  3223  			break
  3224  		}
  3225  		c := v_1.AuxInt
  3226  		if !(c != 0) {
  3227  			break
  3228  		}
  3229  		v.reset(OpCopy)
  3230  		v.Type = a.Type
  3231  		v.AddArg(a)
  3232  		return true
  3233  	}
  3234  	return false
  3235  }
  3236  func rewriteValueMIPS_OpMIPSLoweredAtomicAdd(v *Value, config *Config) bool {
  3237  	b := v.Block
  3238  	_ = b
  3239  	// match: (LoweredAtomicAdd ptr (MOVWconst [c]) mem)
  3240  	// cond: is16Bit(c)
  3241  	// result: (LoweredAtomicAddconst [c] ptr mem)
  3242  	for {
  3243  		ptr := v.Args[0]
  3244  		v_1 := v.Args[1]
  3245  		if v_1.Op != OpMIPSMOVWconst {
  3246  			break
  3247  		}
  3248  		c := v_1.AuxInt
  3249  		mem := v.Args[2]
  3250  		if !(is16Bit(c)) {
  3251  			break
  3252  		}
  3253  		v.reset(OpMIPSLoweredAtomicAddconst)
  3254  		v.AuxInt = c
  3255  		v.AddArg(ptr)
  3256  		v.AddArg(mem)
  3257  		return true
  3258  	}
  3259  	return false
  3260  }
  3261  func rewriteValueMIPS_OpMIPSLoweredAtomicStore(v *Value, config *Config) bool {
  3262  	b := v.Block
  3263  	_ = b
  3264  	// match: (LoweredAtomicStore ptr (MOVWconst [0]) mem)
  3265  	// cond:
  3266  	// result: (LoweredAtomicStorezero ptr mem)
  3267  	for {
  3268  		ptr := v.Args[0]
  3269  		v_1 := v.Args[1]
  3270  		if v_1.Op != OpMIPSMOVWconst {
  3271  			break
  3272  		}
  3273  		if v_1.AuxInt != 0 {
  3274  			break
  3275  		}
  3276  		mem := v.Args[2]
  3277  		v.reset(OpMIPSLoweredAtomicStorezero)
  3278  		v.AddArg(ptr)
  3279  		v.AddArg(mem)
  3280  		return true
  3281  	}
  3282  	return false
  3283  }
  3284  func rewriteValueMIPS_OpMIPSMOVBUload(v *Value, config *Config) bool {
  3285  	b := v.Block
  3286  	_ = b
  3287  	// match: (MOVBUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3288  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3289  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3290  	for {
  3291  		off1 := v.AuxInt
  3292  		sym := v.Aux
  3293  		x := v.Args[0]
  3294  		if x.Op != OpMIPSADDconst {
  3295  			break
  3296  		}
  3297  		off2 := x.AuxInt
  3298  		ptr := x.Args[0]
  3299  		mem := v.Args[1]
  3300  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3301  			break
  3302  		}
  3303  		v.reset(OpMIPSMOVBUload)
  3304  		v.AuxInt = off1 + off2
  3305  		v.Aux = sym
  3306  		v.AddArg(ptr)
  3307  		v.AddArg(mem)
  3308  		return true
  3309  	}
  3310  	// match: (MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3311  	// cond: canMergeSym(sym1,sym2)
  3312  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3313  	for {
  3314  		off1 := v.AuxInt
  3315  		sym1 := v.Aux
  3316  		v_0 := v.Args[0]
  3317  		if v_0.Op != OpMIPSMOVWaddr {
  3318  			break
  3319  		}
  3320  		off2 := v_0.AuxInt
  3321  		sym2 := v_0.Aux
  3322  		ptr := v_0.Args[0]
  3323  		mem := v.Args[1]
  3324  		if !(canMergeSym(sym1, sym2)) {
  3325  			break
  3326  		}
  3327  		v.reset(OpMIPSMOVBUload)
  3328  		v.AuxInt = off1 + off2
  3329  		v.Aux = mergeSym(sym1, sym2)
  3330  		v.AddArg(ptr)
  3331  		v.AddArg(mem)
  3332  		return true
  3333  	}
  3334  	// match: (MOVBUload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3335  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3336  	// result: (MOVBUreg x)
  3337  	for {
  3338  		off := v.AuxInt
  3339  		sym := v.Aux
  3340  		ptr := v.Args[0]
  3341  		v_1 := v.Args[1]
  3342  		if v_1.Op != OpMIPSMOVBstore {
  3343  			break
  3344  		}
  3345  		off2 := v_1.AuxInt
  3346  		sym2 := v_1.Aux
  3347  		ptr2 := v_1.Args[0]
  3348  		x := v_1.Args[1]
  3349  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3350  			break
  3351  		}
  3352  		v.reset(OpMIPSMOVBUreg)
  3353  		v.AddArg(x)
  3354  		return true
  3355  	}
  3356  	return false
  3357  }
  3358  func rewriteValueMIPS_OpMIPSMOVBUreg(v *Value, config *Config) bool {
  3359  	b := v.Block
  3360  	_ = b
  3361  	// match: (MOVBUreg x:(MOVBUload _ _))
  3362  	// cond:
  3363  	// result: (MOVWreg x)
  3364  	for {
  3365  		x := v.Args[0]
  3366  		if x.Op != OpMIPSMOVBUload {
  3367  			break
  3368  		}
  3369  		v.reset(OpMIPSMOVWreg)
  3370  		v.AddArg(x)
  3371  		return true
  3372  	}
  3373  	// match: (MOVBUreg x:(MOVBUreg _))
  3374  	// cond:
  3375  	// result: (MOVWreg x)
  3376  	for {
  3377  		x := v.Args[0]
  3378  		if x.Op != OpMIPSMOVBUreg {
  3379  			break
  3380  		}
  3381  		v.reset(OpMIPSMOVWreg)
  3382  		v.AddArg(x)
  3383  		return true
  3384  	}
  3385  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  3386  	// cond: x.Uses == 1 && clobber(x)
  3387  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  3388  	for {
  3389  		t := v.Type
  3390  		x := v.Args[0]
  3391  		if x.Op != OpMIPSMOVBload {
  3392  			break
  3393  		}
  3394  		off := x.AuxInt
  3395  		sym := x.Aux
  3396  		ptr := x.Args[0]
  3397  		mem := x.Args[1]
  3398  		if !(x.Uses == 1 && clobber(x)) {
  3399  			break
  3400  		}
  3401  		b = x.Block
  3402  		v0 := b.NewValue0(v.Line, OpMIPSMOVBUload, t)
  3403  		v.reset(OpCopy)
  3404  		v.AddArg(v0)
  3405  		v0.AuxInt = off
  3406  		v0.Aux = sym
  3407  		v0.AddArg(ptr)
  3408  		v0.AddArg(mem)
  3409  		return true
  3410  	}
  3411  	// match: (MOVBUreg (ANDconst [c] x))
  3412  	// cond:
  3413  	// result: (ANDconst [c&0xff] x)
  3414  	for {
  3415  		v_0 := v.Args[0]
  3416  		if v_0.Op != OpMIPSANDconst {
  3417  			break
  3418  		}
  3419  		c := v_0.AuxInt
  3420  		x := v_0.Args[0]
  3421  		v.reset(OpMIPSANDconst)
  3422  		v.AuxInt = c & 0xff
  3423  		v.AddArg(x)
  3424  		return true
  3425  	}
  3426  	// match: (MOVBUreg (MOVWconst [c]))
  3427  	// cond:
  3428  	// result: (MOVWconst [int64(uint8(c))])
  3429  	for {
  3430  		v_0 := v.Args[0]
  3431  		if v_0.Op != OpMIPSMOVWconst {
  3432  			break
  3433  		}
  3434  		c := v_0.AuxInt
  3435  		v.reset(OpMIPSMOVWconst)
  3436  		v.AuxInt = int64(uint8(c))
  3437  		return true
  3438  	}
  3439  	return false
  3440  }
  3441  func rewriteValueMIPS_OpMIPSMOVBload(v *Value, config *Config) bool {
  3442  	b := v.Block
  3443  	_ = b
  3444  	// match: (MOVBload  [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3445  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3446  	// result: (MOVBload  [off1+off2] {sym} ptr mem)
  3447  	for {
  3448  		off1 := v.AuxInt
  3449  		sym := v.Aux
  3450  		x := v.Args[0]
  3451  		if x.Op != OpMIPSADDconst {
  3452  			break
  3453  		}
  3454  		off2 := x.AuxInt
  3455  		ptr := x.Args[0]
  3456  		mem := v.Args[1]
  3457  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3458  			break
  3459  		}
  3460  		v.reset(OpMIPSMOVBload)
  3461  		v.AuxInt = off1 + off2
  3462  		v.Aux = sym
  3463  		v.AddArg(ptr)
  3464  		v.AddArg(mem)
  3465  		return true
  3466  	}
  3467  	// match: (MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3468  	// cond: canMergeSym(sym1,sym2)
  3469  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3470  	for {
  3471  		off1 := v.AuxInt
  3472  		sym1 := v.Aux
  3473  		v_0 := v.Args[0]
  3474  		if v_0.Op != OpMIPSMOVWaddr {
  3475  			break
  3476  		}
  3477  		off2 := v_0.AuxInt
  3478  		sym2 := v_0.Aux
  3479  		ptr := v_0.Args[0]
  3480  		mem := v.Args[1]
  3481  		if !(canMergeSym(sym1, sym2)) {
  3482  			break
  3483  		}
  3484  		v.reset(OpMIPSMOVBload)
  3485  		v.AuxInt = off1 + off2
  3486  		v.Aux = mergeSym(sym1, sym2)
  3487  		v.AddArg(ptr)
  3488  		v.AddArg(mem)
  3489  		return true
  3490  	}
  3491  	// match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3492  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3493  	// result: (MOVBreg x)
  3494  	for {
  3495  		off := v.AuxInt
  3496  		sym := v.Aux
  3497  		ptr := v.Args[0]
  3498  		v_1 := v.Args[1]
  3499  		if v_1.Op != OpMIPSMOVBstore {
  3500  			break
  3501  		}
  3502  		off2 := v_1.AuxInt
  3503  		sym2 := v_1.Aux
  3504  		ptr2 := v_1.Args[0]
  3505  		x := v_1.Args[1]
  3506  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3507  			break
  3508  		}
  3509  		v.reset(OpMIPSMOVBreg)
  3510  		v.AddArg(x)
  3511  		return true
  3512  	}
  3513  	return false
  3514  }
  3515  func rewriteValueMIPS_OpMIPSMOVBreg(v *Value, config *Config) bool {
  3516  	b := v.Block
  3517  	_ = b
  3518  	// match: (MOVBreg x:(MOVBload _ _))
  3519  	// cond:
  3520  	// result: (MOVWreg x)
  3521  	for {
  3522  		x := v.Args[0]
  3523  		if x.Op != OpMIPSMOVBload {
  3524  			break
  3525  		}
  3526  		v.reset(OpMIPSMOVWreg)
  3527  		v.AddArg(x)
  3528  		return true
  3529  	}
  3530  	// match: (MOVBreg x:(MOVBreg _))
  3531  	// cond:
  3532  	// result: (MOVWreg x)
  3533  	for {
  3534  		x := v.Args[0]
  3535  		if x.Op != OpMIPSMOVBreg {
  3536  			break
  3537  		}
  3538  		v.reset(OpMIPSMOVWreg)
  3539  		v.AddArg(x)
  3540  		return true
  3541  	}
  3542  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  3543  	// cond: x.Uses == 1 && clobber(x)
  3544  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  3545  	for {
  3546  		t := v.Type
  3547  		x := v.Args[0]
  3548  		if x.Op != OpMIPSMOVBUload {
  3549  			break
  3550  		}
  3551  		off := x.AuxInt
  3552  		sym := x.Aux
  3553  		ptr := x.Args[0]
  3554  		mem := x.Args[1]
  3555  		if !(x.Uses == 1 && clobber(x)) {
  3556  			break
  3557  		}
  3558  		b = x.Block
  3559  		v0 := b.NewValue0(v.Line, OpMIPSMOVBload, t)
  3560  		v.reset(OpCopy)
  3561  		v.AddArg(v0)
  3562  		v0.AuxInt = off
  3563  		v0.Aux = sym
  3564  		v0.AddArg(ptr)
  3565  		v0.AddArg(mem)
  3566  		return true
  3567  	}
  3568  	// match: (MOVBreg (ANDconst [c] x))
  3569  	// cond: c & 0x80 == 0
  3570  	// result: (ANDconst [c&0x7f] x)
  3571  	for {
  3572  		v_0 := v.Args[0]
  3573  		if v_0.Op != OpMIPSANDconst {
  3574  			break
  3575  		}
  3576  		c := v_0.AuxInt
  3577  		x := v_0.Args[0]
  3578  		if !(c&0x80 == 0) {
  3579  			break
  3580  		}
  3581  		v.reset(OpMIPSANDconst)
  3582  		v.AuxInt = c & 0x7f
  3583  		v.AddArg(x)
  3584  		return true
  3585  	}
  3586  	// match: (MOVBreg  (MOVWconst [c]))
  3587  	// cond:
  3588  	// result: (MOVWconst [int64(int8(c))])
  3589  	for {
  3590  		v_0 := v.Args[0]
  3591  		if v_0.Op != OpMIPSMOVWconst {
  3592  			break
  3593  		}
  3594  		c := v_0.AuxInt
  3595  		v.reset(OpMIPSMOVWconst)
  3596  		v.AuxInt = int64(int8(c))
  3597  		return true
  3598  	}
  3599  	return false
  3600  }
  3601  func rewriteValueMIPS_OpMIPSMOVBstore(v *Value, config *Config) bool {
  3602  	b := v.Block
  3603  	_ = b
  3604  	// match: (MOVBstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3605  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3606  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3607  	for {
  3608  		off1 := v.AuxInt
  3609  		sym := v.Aux
  3610  		x := v.Args[0]
  3611  		if x.Op != OpMIPSADDconst {
  3612  			break
  3613  		}
  3614  		off2 := x.AuxInt
  3615  		ptr := x.Args[0]
  3616  		val := v.Args[1]
  3617  		mem := v.Args[2]
  3618  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3619  			break
  3620  		}
  3621  		v.reset(OpMIPSMOVBstore)
  3622  		v.AuxInt = off1 + off2
  3623  		v.Aux = sym
  3624  		v.AddArg(ptr)
  3625  		v.AddArg(val)
  3626  		v.AddArg(mem)
  3627  		return true
  3628  	}
  3629  	// match: (MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3630  	// cond: canMergeSym(sym1,sym2)
  3631  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3632  	for {
  3633  		off1 := v.AuxInt
  3634  		sym1 := v.Aux
  3635  		v_0 := v.Args[0]
  3636  		if v_0.Op != OpMIPSMOVWaddr {
  3637  			break
  3638  		}
  3639  		off2 := v_0.AuxInt
  3640  		sym2 := v_0.Aux
  3641  		ptr := v_0.Args[0]
  3642  		val := v.Args[1]
  3643  		mem := v.Args[2]
  3644  		if !(canMergeSym(sym1, sym2)) {
  3645  			break
  3646  		}
  3647  		v.reset(OpMIPSMOVBstore)
  3648  		v.AuxInt = off1 + off2
  3649  		v.Aux = mergeSym(sym1, sym2)
  3650  		v.AddArg(ptr)
  3651  		v.AddArg(val)
  3652  		v.AddArg(mem)
  3653  		return true
  3654  	}
  3655  	// match: (MOVBstore [off] {sym} ptr (MOVWconst [0]) mem)
  3656  	// cond:
  3657  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3658  	for {
  3659  		off := v.AuxInt
  3660  		sym := v.Aux
  3661  		ptr := v.Args[0]
  3662  		v_1 := v.Args[1]
  3663  		if v_1.Op != OpMIPSMOVWconst {
  3664  			break
  3665  		}
  3666  		if v_1.AuxInt != 0 {
  3667  			break
  3668  		}
  3669  		mem := v.Args[2]
  3670  		v.reset(OpMIPSMOVBstorezero)
  3671  		v.AuxInt = off
  3672  		v.Aux = sym
  3673  		v.AddArg(ptr)
  3674  		v.AddArg(mem)
  3675  		return true
  3676  	}
  3677  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3678  	// cond:
  3679  	// result: (MOVBstore [off] {sym} ptr x mem)
  3680  	for {
  3681  		off := v.AuxInt
  3682  		sym := v.Aux
  3683  		ptr := v.Args[0]
  3684  		v_1 := v.Args[1]
  3685  		if v_1.Op != OpMIPSMOVBreg {
  3686  			break
  3687  		}
  3688  		x := v_1.Args[0]
  3689  		mem := v.Args[2]
  3690  		v.reset(OpMIPSMOVBstore)
  3691  		v.AuxInt = off
  3692  		v.Aux = sym
  3693  		v.AddArg(ptr)
  3694  		v.AddArg(x)
  3695  		v.AddArg(mem)
  3696  		return true
  3697  	}
  3698  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3699  	// cond:
  3700  	// result: (MOVBstore [off] {sym} ptr x mem)
  3701  	for {
  3702  		off := v.AuxInt
  3703  		sym := v.Aux
  3704  		ptr := v.Args[0]
  3705  		v_1 := v.Args[1]
  3706  		if v_1.Op != OpMIPSMOVBUreg {
  3707  			break
  3708  		}
  3709  		x := v_1.Args[0]
  3710  		mem := v.Args[2]
  3711  		v.reset(OpMIPSMOVBstore)
  3712  		v.AuxInt = off
  3713  		v.Aux = sym
  3714  		v.AddArg(ptr)
  3715  		v.AddArg(x)
  3716  		v.AddArg(mem)
  3717  		return true
  3718  	}
  3719  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3720  	// cond:
  3721  	// result: (MOVBstore [off] {sym} ptr x mem)
  3722  	for {
  3723  		off := v.AuxInt
  3724  		sym := v.Aux
  3725  		ptr := v.Args[0]
  3726  		v_1 := v.Args[1]
  3727  		if v_1.Op != OpMIPSMOVHreg {
  3728  			break
  3729  		}
  3730  		x := v_1.Args[0]
  3731  		mem := v.Args[2]
  3732  		v.reset(OpMIPSMOVBstore)
  3733  		v.AuxInt = off
  3734  		v.Aux = sym
  3735  		v.AddArg(ptr)
  3736  		v.AddArg(x)
  3737  		v.AddArg(mem)
  3738  		return true
  3739  	}
  3740  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3741  	// cond:
  3742  	// result: (MOVBstore [off] {sym} ptr x mem)
  3743  	for {
  3744  		off := v.AuxInt
  3745  		sym := v.Aux
  3746  		ptr := v.Args[0]
  3747  		v_1 := v.Args[1]
  3748  		if v_1.Op != OpMIPSMOVHUreg {
  3749  			break
  3750  		}
  3751  		x := v_1.Args[0]
  3752  		mem := v.Args[2]
  3753  		v.reset(OpMIPSMOVBstore)
  3754  		v.AuxInt = off
  3755  		v.Aux = sym
  3756  		v.AddArg(ptr)
  3757  		v.AddArg(x)
  3758  		v.AddArg(mem)
  3759  		return true
  3760  	}
  3761  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3762  	// cond:
  3763  	// result: (MOVBstore [off] {sym} ptr x mem)
  3764  	for {
  3765  		off := v.AuxInt
  3766  		sym := v.Aux
  3767  		ptr := v.Args[0]
  3768  		v_1 := v.Args[1]
  3769  		if v_1.Op != OpMIPSMOVWreg {
  3770  			break
  3771  		}
  3772  		x := v_1.Args[0]
  3773  		mem := v.Args[2]
  3774  		v.reset(OpMIPSMOVBstore)
  3775  		v.AuxInt = off
  3776  		v.Aux = sym
  3777  		v.AddArg(ptr)
  3778  		v.AddArg(x)
  3779  		v.AddArg(mem)
  3780  		return true
  3781  	}
  3782  	return false
  3783  }
  3784  func rewriteValueMIPS_OpMIPSMOVBstorezero(v *Value, config *Config) bool {
  3785  	b := v.Block
  3786  	_ = b
  3787  	// match: (MOVBstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3788  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3789  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3790  	for {
  3791  		off1 := v.AuxInt
  3792  		sym := v.Aux
  3793  		x := v.Args[0]
  3794  		if x.Op != OpMIPSADDconst {
  3795  			break
  3796  		}
  3797  		off2 := x.AuxInt
  3798  		ptr := x.Args[0]
  3799  		mem := v.Args[1]
  3800  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3801  			break
  3802  		}
  3803  		v.reset(OpMIPSMOVBstorezero)
  3804  		v.AuxInt = off1 + off2
  3805  		v.Aux = sym
  3806  		v.AddArg(ptr)
  3807  		v.AddArg(mem)
  3808  		return true
  3809  	}
  3810  	// match: (MOVBstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3811  	// cond: canMergeSym(sym1,sym2)
  3812  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3813  	for {
  3814  		off1 := v.AuxInt
  3815  		sym1 := v.Aux
  3816  		v_0 := v.Args[0]
  3817  		if v_0.Op != OpMIPSMOVWaddr {
  3818  			break
  3819  		}
  3820  		off2 := v_0.AuxInt
  3821  		sym2 := v_0.Aux
  3822  		ptr := v_0.Args[0]
  3823  		mem := v.Args[1]
  3824  		if !(canMergeSym(sym1, sym2)) {
  3825  			break
  3826  		}
  3827  		v.reset(OpMIPSMOVBstorezero)
  3828  		v.AuxInt = off1 + off2
  3829  		v.Aux = mergeSym(sym1, sym2)
  3830  		v.AddArg(ptr)
  3831  		v.AddArg(mem)
  3832  		return true
  3833  	}
  3834  	return false
  3835  }
  3836  func rewriteValueMIPS_OpMIPSMOVDload(v *Value, config *Config) bool {
  3837  	b := v.Block
  3838  	_ = b
  3839  	// match: (MOVDload  [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3840  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3841  	// result: (MOVDload  [off1+off2] {sym} ptr mem)
  3842  	for {
  3843  		off1 := v.AuxInt
  3844  		sym := v.Aux
  3845  		x := v.Args[0]
  3846  		if x.Op != OpMIPSADDconst {
  3847  			break
  3848  		}
  3849  		off2 := x.AuxInt
  3850  		ptr := x.Args[0]
  3851  		mem := v.Args[1]
  3852  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3853  			break
  3854  		}
  3855  		v.reset(OpMIPSMOVDload)
  3856  		v.AuxInt = off1 + off2
  3857  		v.Aux = sym
  3858  		v.AddArg(ptr)
  3859  		v.AddArg(mem)
  3860  		return true
  3861  	}
  3862  	// match: (MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3863  	// cond: canMergeSym(sym1,sym2)
  3864  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3865  	for {
  3866  		off1 := v.AuxInt
  3867  		sym1 := v.Aux
  3868  		v_0 := v.Args[0]
  3869  		if v_0.Op != OpMIPSMOVWaddr {
  3870  			break
  3871  		}
  3872  		off2 := v_0.AuxInt
  3873  		sym2 := v_0.Aux
  3874  		ptr := v_0.Args[0]
  3875  		mem := v.Args[1]
  3876  		if !(canMergeSym(sym1, sym2)) {
  3877  			break
  3878  		}
  3879  		v.reset(OpMIPSMOVDload)
  3880  		v.AuxInt = off1 + off2
  3881  		v.Aux = mergeSym(sym1, sym2)
  3882  		v.AddArg(ptr)
  3883  		v.AddArg(mem)
  3884  		return true
  3885  	}
  3886  	// match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _))
  3887  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3888  	// result: x
  3889  	for {
  3890  		off := v.AuxInt
  3891  		sym := v.Aux
  3892  		ptr := v.Args[0]
  3893  		v_1 := v.Args[1]
  3894  		if v_1.Op != OpMIPSMOVDstore {
  3895  			break
  3896  		}
  3897  		off2 := v_1.AuxInt
  3898  		sym2 := v_1.Aux
  3899  		ptr2 := v_1.Args[0]
  3900  		x := v_1.Args[1]
  3901  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3902  			break
  3903  		}
  3904  		v.reset(OpCopy)
  3905  		v.Type = x.Type
  3906  		v.AddArg(x)
  3907  		return true
  3908  	}
  3909  	return false
  3910  }
  3911  func rewriteValueMIPS_OpMIPSMOVDstore(v *Value, config *Config) bool {
  3912  	b := v.Block
  3913  	_ = b
  3914  	// match: (MOVDstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3915  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3916  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  3917  	for {
  3918  		off1 := v.AuxInt
  3919  		sym := v.Aux
  3920  		x := v.Args[0]
  3921  		if x.Op != OpMIPSADDconst {
  3922  			break
  3923  		}
  3924  		off2 := x.AuxInt
  3925  		ptr := x.Args[0]
  3926  		val := v.Args[1]
  3927  		mem := v.Args[2]
  3928  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3929  			break
  3930  		}
  3931  		v.reset(OpMIPSMOVDstore)
  3932  		v.AuxInt = off1 + off2
  3933  		v.Aux = sym
  3934  		v.AddArg(ptr)
  3935  		v.AddArg(val)
  3936  		v.AddArg(mem)
  3937  		return true
  3938  	}
  3939  	// match: (MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3940  	// cond: canMergeSym(sym1,sym2)
  3941  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3942  	for {
  3943  		off1 := v.AuxInt
  3944  		sym1 := v.Aux
  3945  		v_0 := v.Args[0]
  3946  		if v_0.Op != OpMIPSMOVWaddr {
  3947  			break
  3948  		}
  3949  		off2 := v_0.AuxInt
  3950  		sym2 := v_0.Aux
  3951  		ptr := v_0.Args[0]
  3952  		val := v.Args[1]
  3953  		mem := v.Args[2]
  3954  		if !(canMergeSym(sym1, sym2)) {
  3955  			break
  3956  		}
  3957  		v.reset(OpMIPSMOVDstore)
  3958  		v.AuxInt = off1 + off2
  3959  		v.Aux = mergeSym(sym1, sym2)
  3960  		v.AddArg(ptr)
  3961  		v.AddArg(val)
  3962  		v.AddArg(mem)
  3963  		return true
  3964  	}
  3965  	return false
  3966  }
  3967  func rewriteValueMIPS_OpMIPSMOVFload(v *Value, config *Config) bool {
  3968  	b := v.Block
  3969  	_ = b
  3970  	// match: (MOVFload  [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3971  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3972  	// result: (MOVFload  [off1+off2] {sym} ptr mem)
  3973  	for {
  3974  		off1 := v.AuxInt
  3975  		sym := v.Aux
  3976  		x := v.Args[0]
  3977  		if x.Op != OpMIPSADDconst {
  3978  			break
  3979  		}
  3980  		off2 := x.AuxInt
  3981  		ptr := x.Args[0]
  3982  		mem := v.Args[1]
  3983  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3984  			break
  3985  		}
  3986  		v.reset(OpMIPSMOVFload)
  3987  		v.AuxInt = off1 + off2
  3988  		v.Aux = sym
  3989  		v.AddArg(ptr)
  3990  		v.AddArg(mem)
  3991  		return true
  3992  	}
  3993  	// match: (MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3994  	// cond: canMergeSym(sym1,sym2)
  3995  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3996  	for {
  3997  		off1 := v.AuxInt
  3998  		sym1 := v.Aux
  3999  		v_0 := v.Args[0]
  4000  		if v_0.Op != OpMIPSMOVWaddr {
  4001  			break
  4002  		}
  4003  		off2 := v_0.AuxInt
  4004  		sym2 := v_0.Aux
  4005  		ptr := v_0.Args[0]
  4006  		mem := v.Args[1]
  4007  		if !(canMergeSym(sym1, sym2)) {
  4008  			break
  4009  		}
  4010  		v.reset(OpMIPSMOVFload)
  4011  		v.AuxInt = off1 + off2
  4012  		v.Aux = mergeSym(sym1, sym2)
  4013  		v.AddArg(ptr)
  4014  		v.AddArg(mem)
  4015  		return true
  4016  	}
  4017  	// match: (MOVFload [off] {sym} ptr (MOVFstore [off2] {sym2} ptr2 x _))
  4018  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4019  	// result: x
  4020  	for {
  4021  		off := v.AuxInt
  4022  		sym := v.Aux
  4023  		ptr := v.Args[0]
  4024  		v_1 := v.Args[1]
  4025  		if v_1.Op != OpMIPSMOVFstore {
  4026  			break
  4027  		}
  4028  		off2 := v_1.AuxInt
  4029  		sym2 := v_1.Aux
  4030  		ptr2 := v_1.Args[0]
  4031  		x := v_1.Args[1]
  4032  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4033  			break
  4034  		}
  4035  		v.reset(OpCopy)
  4036  		v.Type = x.Type
  4037  		v.AddArg(x)
  4038  		return true
  4039  	}
  4040  	return false
  4041  }
  4042  func rewriteValueMIPS_OpMIPSMOVFstore(v *Value, config *Config) bool {
  4043  	b := v.Block
  4044  	_ = b
  4045  	// match: (MOVFstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4046  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4047  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  4048  	for {
  4049  		off1 := v.AuxInt
  4050  		sym := v.Aux
  4051  		x := v.Args[0]
  4052  		if x.Op != OpMIPSADDconst {
  4053  			break
  4054  		}
  4055  		off2 := x.AuxInt
  4056  		ptr := x.Args[0]
  4057  		val := v.Args[1]
  4058  		mem := v.Args[2]
  4059  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4060  			break
  4061  		}
  4062  		v.reset(OpMIPSMOVFstore)
  4063  		v.AuxInt = off1 + off2
  4064  		v.Aux = sym
  4065  		v.AddArg(ptr)
  4066  		v.AddArg(val)
  4067  		v.AddArg(mem)
  4068  		return true
  4069  	}
  4070  	// match: (MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4071  	// cond: canMergeSym(sym1,sym2)
  4072  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4073  	for {
  4074  		off1 := v.AuxInt
  4075  		sym1 := v.Aux
  4076  		v_0 := v.Args[0]
  4077  		if v_0.Op != OpMIPSMOVWaddr {
  4078  			break
  4079  		}
  4080  		off2 := v_0.AuxInt
  4081  		sym2 := v_0.Aux
  4082  		ptr := v_0.Args[0]
  4083  		val := v.Args[1]
  4084  		mem := v.Args[2]
  4085  		if !(canMergeSym(sym1, sym2)) {
  4086  			break
  4087  		}
  4088  		v.reset(OpMIPSMOVFstore)
  4089  		v.AuxInt = off1 + off2
  4090  		v.Aux = mergeSym(sym1, sym2)
  4091  		v.AddArg(ptr)
  4092  		v.AddArg(val)
  4093  		v.AddArg(mem)
  4094  		return true
  4095  	}
  4096  	return false
  4097  }
  4098  func rewriteValueMIPS_OpMIPSMOVHUload(v *Value, config *Config) bool {
  4099  	b := v.Block
  4100  	_ = b
  4101  	// match: (MOVHUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4102  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4103  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4104  	for {
  4105  		off1 := v.AuxInt
  4106  		sym := v.Aux
  4107  		x := v.Args[0]
  4108  		if x.Op != OpMIPSADDconst {
  4109  			break
  4110  		}
  4111  		off2 := x.AuxInt
  4112  		ptr := x.Args[0]
  4113  		mem := v.Args[1]
  4114  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4115  			break
  4116  		}
  4117  		v.reset(OpMIPSMOVHUload)
  4118  		v.AuxInt = off1 + off2
  4119  		v.Aux = sym
  4120  		v.AddArg(ptr)
  4121  		v.AddArg(mem)
  4122  		return true
  4123  	}
  4124  	// match: (MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4125  	// cond: canMergeSym(sym1,sym2)
  4126  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4127  	for {
  4128  		off1 := v.AuxInt
  4129  		sym1 := v.Aux
  4130  		v_0 := v.Args[0]
  4131  		if v_0.Op != OpMIPSMOVWaddr {
  4132  			break
  4133  		}
  4134  		off2 := v_0.AuxInt
  4135  		sym2 := v_0.Aux
  4136  		ptr := v_0.Args[0]
  4137  		mem := v.Args[1]
  4138  		if !(canMergeSym(sym1, sym2)) {
  4139  			break
  4140  		}
  4141  		v.reset(OpMIPSMOVHUload)
  4142  		v.AuxInt = off1 + off2
  4143  		v.Aux = mergeSym(sym1, sym2)
  4144  		v.AddArg(ptr)
  4145  		v.AddArg(mem)
  4146  		return true
  4147  	}
  4148  	// match: (MOVHUload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4149  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4150  	// result: (MOVHUreg x)
  4151  	for {
  4152  		off := v.AuxInt
  4153  		sym := v.Aux
  4154  		ptr := v.Args[0]
  4155  		v_1 := v.Args[1]
  4156  		if v_1.Op != OpMIPSMOVHstore {
  4157  			break
  4158  		}
  4159  		off2 := v_1.AuxInt
  4160  		sym2 := v_1.Aux
  4161  		ptr2 := v_1.Args[0]
  4162  		x := v_1.Args[1]
  4163  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4164  			break
  4165  		}
  4166  		v.reset(OpMIPSMOVHUreg)
  4167  		v.AddArg(x)
  4168  		return true
  4169  	}
  4170  	return false
  4171  }
  4172  func rewriteValueMIPS_OpMIPSMOVHUreg(v *Value, config *Config) bool {
  4173  	b := v.Block
  4174  	_ = b
  4175  	// match: (MOVHUreg x:(MOVBUload _ _))
  4176  	// cond:
  4177  	// result: (MOVWreg x)
  4178  	for {
  4179  		x := v.Args[0]
  4180  		if x.Op != OpMIPSMOVBUload {
  4181  			break
  4182  		}
  4183  		v.reset(OpMIPSMOVWreg)
  4184  		v.AddArg(x)
  4185  		return true
  4186  	}
  4187  	// match: (MOVHUreg x:(MOVHUload _ _))
  4188  	// cond:
  4189  	// result: (MOVWreg x)
  4190  	for {
  4191  		x := v.Args[0]
  4192  		if x.Op != OpMIPSMOVHUload {
  4193  			break
  4194  		}
  4195  		v.reset(OpMIPSMOVWreg)
  4196  		v.AddArg(x)
  4197  		return true
  4198  	}
  4199  	// match: (MOVHUreg x:(MOVBUreg _))
  4200  	// cond:
  4201  	// result: (MOVWreg x)
  4202  	for {
  4203  		x := v.Args[0]
  4204  		if x.Op != OpMIPSMOVBUreg {
  4205  			break
  4206  		}
  4207  		v.reset(OpMIPSMOVWreg)
  4208  		v.AddArg(x)
  4209  		return true
  4210  	}
  4211  	// match: (MOVHUreg x:(MOVHUreg _))
  4212  	// cond:
  4213  	// result: (MOVWreg x)
  4214  	for {
  4215  		x := v.Args[0]
  4216  		if x.Op != OpMIPSMOVHUreg {
  4217  			break
  4218  		}
  4219  		v.reset(OpMIPSMOVWreg)
  4220  		v.AddArg(x)
  4221  		return true
  4222  	}
  4223  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  4224  	// cond: x.Uses == 1 && clobber(x)
  4225  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  4226  	for {
  4227  		t := v.Type
  4228  		x := v.Args[0]
  4229  		if x.Op != OpMIPSMOVHload {
  4230  			break
  4231  		}
  4232  		off := x.AuxInt
  4233  		sym := x.Aux
  4234  		ptr := x.Args[0]
  4235  		mem := x.Args[1]
  4236  		if !(x.Uses == 1 && clobber(x)) {
  4237  			break
  4238  		}
  4239  		b = x.Block
  4240  		v0 := b.NewValue0(v.Line, OpMIPSMOVHUload, t)
  4241  		v.reset(OpCopy)
  4242  		v.AddArg(v0)
  4243  		v0.AuxInt = off
  4244  		v0.Aux = sym
  4245  		v0.AddArg(ptr)
  4246  		v0.AddArg(mem)
  4247  		return true
  4248  	}
  4249  	// match: (MOVHUreg (ANDconst [c] x))
  4250  	// cond:
  4251  	// result: (ANDconst [c&0xffff] x)
  4252  	for {
  4253  		v_0 := v.Args[0]
  4254  		if v_0.Op != OpMIPSANDconst {
  4255  			break
  4256  		}
  4257  		c := v_0.AuxInt
  4258  		x := v_0.Args[0]
  4259  		v.reset(OpMIPSANDconst)
  4260  		v.AuxInt = c & 0xffff
  4261  		v.AddArg(x)
  4262  		return true
  4263  	}
  4264  	// match: (MOVHUreg (MOVWconst [c]))
  4265  	// cond:
  4266  	// result: (MOVWconst [int64(uint16(c))])
  4267  	for {
  4268  		v_0 := v.Args[0]
  4269  		if v_0.Op != OpMIPSMOVWconst {
  4270  			break
  4271  		}
  4272  		c := v_0.AuxInt
  4273  		v.reset(OpMIPSMOVWconst)
  4274  		v.AuxInt = int64(uint16(c))
  4275  		return true
  4276  	}
  4277  	return false
  4278  }
  4279  func rewriteValueMIPS_OpMIPSMOVHload(v *Value, config *Config) bool {
  4280  	b := v.Block
  4281  	_ = b
  4282  	// match: (MOVHload  [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4283  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4284  	// result: (MOVHload  [off1+off2] {sym} ptr mem)
  4285  	for {
  4286  		off1 := v.AuxInt
  4287  		sym := v.Aux
  4288  		x := v.Args[0]
  4289  		if x.Op != OpMIPSADDconst {
  4290  			break
  4291  		}
  4292  		off2 := x.AuxInt
  4293  		ptr := x.Args[0]
  4294  		mem := v.Args[1]
  4295  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4296  			break
  4297  		}
  4298  		v.reset(OpMIPSMOVHload)
  4299  		v.AuxInt = off1 + off2
  4300  		v.Aux = sym
  4301  		v.AddArg(ptr)
  4302  		v.AddArg(mem)
  4303  		return true
  4304  	}
  4305  	// match: (MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4306  	// cond: canMergeSym(sym1,sym2)
  4307  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4308  	for {
  4309  		off1 := v.AuxInt
  4310  		sym1 := v.Aux
  4311  		v_0 := v.Args[0]
  4312  		if v_0.Op != OpMIPSMOVWaddr {
  4313  			break
  4314  		}
  4315  		off2 := v_0.AuxInt
  4316  		sym2 := v_0.Aux
  4317  		ptr := v_0.Args[0]
  4318  		mem := v.Args[1]
  4319  		if !(canMergeSym(sym1, sym2)) {
  4320  			break
  4321  		}
  4322  		v.reset(OpMIPSMOVHload)
  4323  		v.AuxInt = off1 + off2
  4324  		v.Aux = mergeSym(sym1, sym2)
  4325  		v.AddArg(ptr)
  4326  		v.AddArg(mem)
  4327  		return true
  4328  	}
  4329  	// match: (MOVHload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4330  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4331  	// result: (MOVHreg x)
  4332  	for {
  4333  		off := v.AuxInt
  4334  		sym := v.Aux
  4335  		ptr := v.Args[0]
  4336  		v_1 := v.Args[1]
  4337  		if v_1.Op != OpMIPSMOVHstore {
  4338  			break
  4339  		}
  4340  		off2 := v_1.AuxInt
  4341  		sym2 := v_1.Aux
  4342  		ptr2 := v_1.Args[0]
  4343  		x := v_1.Args[1]
  4344  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4345  			break
  4346  		}
  4347  		v.reset(OpMIPSMOVHreg)
  4348  		v.AddArg(x)
  4349  		return true
  4350  	}
  4351  	return false
  4352  }
  4353  func rewriteValueMIPS_OpMIPSMOVHreg(v *Value, config *Config) bool {
  4354  	b := v.Block
  4355  	_ = b
  4356  	// match: (MOVHreg x:(MOVBload _ _))
  4357  	// cond:
  4358  	// result: (MOVWreg x)
  4359  	for {
  4360  		x := v.Args[0]
  4361  		if x.Op != OpMIPSMOVBload {
  4362  			break
  4363  		}
  4364  		v.reset(OpMIPSMOVWreg)
  4365  		v.AddArg(x)
  4366  		return true
  4367  	}
  4368  	// match: (MOVHreg x:(MOVBUload _ _))
  4369  	// cond:
  4370  	// result: (MOVWreg x)
  4371  	for {
  4372  		x := v.Args[0]
  4373  		if x.Op != OpMIPSMOVBUload {
  4374  			break
  4375  		}
  4376  		v.reset(OpMIPSMOVWreg)
  4377  		v.AddArg(x)
  4378  		return true
  4379  	}
  4380  	// match: (MOVHreg x:(MOVHload _ _))
  4381  	// cond:
  4382  	// result: (MOVWreg x)
  4383  	for {
  4384  		x := v.Args[0]
  4385  		if x.Op != OpMIPSMOVHload {
  4386  			break
  4387  		}
  4388  		v.reset(OpMIPSMOVWreg)
  4389  		v.AddArg(x)
  4390  		return true
  4391  	}
  4392  	// match: (MOVHreg x:(MOVBreg _))
  4393  	// cond:
  4394  	// result: (MOVWreg x)
  4395  	for {
  4396  		x := v.Args[0]
  4397  		if x.Op != OpMIPSMOVBreg {
  4398  			break
  4399  		}
  4400  		v.reset(OpMIPSMOVWreg)
  4401  		v.AddArg(x)
  4402  		return true
  4403  	}
  4404  	// match: (MOVHreg x:(MOVBUreg _))
  4405  	// cond:
  4406  	// result: (MOVWreg x)
  4407  	for {
  4408  		x := v.Args[0]
  4409  		if x.Op != OpMIPSMOVBUreg {
  4410  			break
  4411  		}
  4412  		v.reset(OpMIPSMOVWreg)
  4413  		v.AddArg(x)
  4414  		return true
  4415  	}
  4416  	// match: (MOVHreg x:(MOVHreg _))
  4417  	// cond:
  4418  	// result: (MOVWreg x)
  4419  	for {
  4420  		x := v.Args[0]
  4421  		if x.Op != OpMIPSMOVHreg {
  4422  			break
  4423  		}
  4424  		v.reset(OpMIPSMOVWreg)
  4425  		v.AddArg(x)
  4426  		return true
  4427  	}
  4428  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  4429  	// cond: x.Uses == 1 && clobber(x)
  4430  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  4431  	for {
  4432  		t := v.Type
  4433  		x := v.Args[0]
  4434  		if x.Op != OpMIPSMOVHUload {
  4435  			break
  4436  		}
  4437  		off := x.AuxInt
  4438  		sym := x.Aux
  4439  		ptr := x.Args[0]
  4440  		mem := x.Args[1]
  4441  		if !(x.Uses == 1 && clobber(x)) {
  4442  			break
  4443  		}
  4444  		b = x.Block
  4445  		v0 := b.NewValue0(v.Line, OpMIPSMOVHload, t)
  4446  		v.reset(OpCopy)
  4447  		v.AddArg(v0)
  4448  		v0.AuxInt = off
  4449  		v0.Aux = sym
  4450  		v0.AddArg(ptr)
  4451  		v0.AddArg(mem)
  4452  		return true
  4453  	}
  4454  	// match: (MOVHreg (ANDconst [c] x))
  4455  	// cond: c & 0x8000 == 0
  4456  	// result: (ANDconst [c&0x7fff] x)
  4457  	for {
  4458  		v_0 := v.Args[0]
  4459  		if v_0.Op != OpMIPSANDconst {
  4460  			break
  4461  		}
  4462  		c := v_0.AuxInt
  4463  		x := v_0.Args[0]
  4464  		if !(c&0x8000 == 0) {
  4465  			break
  4466  		}
  4467  		v.reset(OpMIPSANDconst)
  4468  		v.AuxInt = c & 0x7fff
  4469  		v.AddArg(x)
  4470  		return true
  4471  	}
  4472  	// match: (MOVHreg  (MOVWconst [c]))
  4473  	// cond:
  4474  	// result: (MOVWconst [int64(int16(c))])
  4475  	for {
  4476  		v_0 := v.Args[0]
  4477  		if v_0.Op != OpMIPSMOVWconst {
  4478  			break
  4479  		}
  4480  		c := v_0.AuxInt
  4481  		v.reset(OpMIPSMOVWconst)
  4482  		v.AuxInt = int64(int16(c))
  4483  		return true
  4484  	}
  4485  	return false
  4486  }
  4487  func rewriteValueMIPS_OpMIPSMOVHstore(v *Value, config *Config) bool {
  4488  	b := v.Block
  4489  	_ = b
  4490  	// match: (MOVHstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4491  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4492  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4493  	for {
  4494  		off1 := v.AuxInt
  4495  		sym := v.Aux
  4496  		x := v.Args[0]
  4497  		if x.Op != OpMIPSADDconst {
  4498  			break
  4499  		}
  4500  		off2 := x.AuxInt
  4501  		ptr := x.Args[0]
  4502  		val := v.Args[1]
  4503  		mem := v.Args[2]
  4504  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4505  			break
  4506  		}
  4507  		v.reset(OpMIPSMOVHstore)
  4508  		v.AuxInt = off1 + off2
  4509  		v.Aux = sym
  4510  		v.AddArg(ptr)
  4511  		v.AddArg(val)
  4512  		v.AddArg(mem)
  4513  		return true
  4514  	}
  4515  	// match: (MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4516  	// cond: canMergeSym(sym1,sym2)
  4517  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4518  	for {
  4519  		off1 := v.AuxInt
  4520  		sym1 := v.Aux
  4521  		v_0 := v.Args[0]
  4522  		if v_0.Op != OpMIPSMOVWaddr {
  4523  			break
  4524  		}
  4525  		off2 := v_0.AuxInt
  4526  		sym2 := v_0.Aux
  4527  		ptr := v_0.Args[0]
  4528  		val := v.Args[1]
  4529  		mem := v.Args[2]
  4530  		if !(canMergeSym(sym1, sym2)) {
  4531  			break
  4532  		}
  4533  		v.reset(OpMIPSMOVHstore)
  4534  		v.AuxInt = off1 + off2
  4535  		v.Aux = mergeSym(sym1, sym2)
  4536  		v.AddArg(ptr)
  4537  		v.AddArg(val)
  4538  		v.AddArg(mem)
  4539  		return true
  4540  	}
  4541  	// match: (MOVHstore [off] {sym} ptr (MOVWconst [0]) mem)
  4542  	// cond:
  4543  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4544  	for {
  4545  		off := v.AuxInt
  4546  		sym := v.Aux
  4547  		ptr := v.Args[0]
  4548  		v_1 := v.Args[1]
  4549  		if v_1.Op != OpMIPSMOVWconst {
  4550  			break
  4551  		}
  4552  		if v_1.AuxInt != 0 {
  4553  			break
  4554  		}
  4555  		mem := v.Args[2]
  4556  		v.reset(OpMIPSMOVHstorezero)
  4557  		v.AuxInt = off
  4558  		v.Aux = sym
  4559  		v.AddArg(ptr)
  4560  		v.AddArg(mem)
  4561  		return true
  4562  	}
  4563  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4564  	// cond:
  4565  	// result: (MOVHstore [off] {sym} ptr x mem)
  4566  	for {
  4567  		off := v.AuxInt
  4568  		sym := v.Aux
  4569  		ptr := v.Args[0]
  4570  		v_1 := v.Args[1]
  4571  		if v_1.Op != OpMIPSMOVHreg {
  4572  			break
  4573  		}
  4574  		x := v_1.Args[0]
  4575  		mem := v.Args[2]
  4576  		v.reset(OpMIPSMOVHstore)
  4577  		v.AuxInt = off
  4578  		v.Aux = sym
  4579  		v.AddArg(ptr)
  4580  		v.AddArg(x)
  4581  		v.AddArg(mem)
  4582  		return true
  4583  	}
  4584  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4585  	// cond:
  4586  	// result: (MOVHstore [off] {sym} ptr x mem)
  4587  	for {
  4588  		off := v.AuxInt
  4589  		sym := v.Aux
  4590  		ptr := v.Args[0]
  4591  		v_1 := v.Args[1]
  4592  		if v_1.Op != OpMIPSMOVHUreg {
  4593  			break
  4594  		}
  4595  		x := v_1.Args[0]
  4596  		mem := v.Args[2]
  4597  		v.reset(OpMIPSMOVHstore)
  4598  		v.AuxInt = off
  4599  		v.Aux = sym
  4600  		v.AddArg(ptr)
  4601  		v.AddArg(x)
  4602  		v.AddArg(mem)
  4603  		return true
  4604  	}
  4605  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4606  	// cond:
  4607  	// result: (MOVHstore [off] {sym} ptr x mem)
  4608  	for {
  4609  		off := v.AuxInt
  4610  		sym := v.Aux
  4611  		ptr := v.Args[0]
  4612  		v_1 := v.Args[1]
  4613  		if v_1.Op != OpMIPSMOVWreg {
  4614  			break
  4615  		}
  4616  		x := v_1.Args[0]
  4617  		mem := v.Args[2]
  4618  		v.reset(OpMIPSMOVHstore)
  4619  		v.AuxInt = off
  4620  		v.Aux = sym
  4621  		v.AddArg(ptr)
  4622  		v.AddArg(x)
  4623  		v.AddArg(mem)
  4624  		return true
  4625  	}
  4626  	return false
  4627  }
  4628  func rewriteValueMIPS_OpMIPSMOVHstorezero(v *Value, config *Config) bool {
  4629  	b := v.Block
  4630  	_ = b
  4631  	// match: (MOVHstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4632  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4633  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4634  	for {
  4635  		off1 := v.AuxInt
  4636  		sym := v.Aux
  4637  		x := v.Args[0]
  4638  		if x.Op != OpMIPSADDconst {
  4639  			break
  4640  		}
  4641  		off2 := x.AuxInt
  4642  		ptr := x.Args[0]
  4643  		mem := v.Args[1]
  4644  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4645  			break
  4646  		}
  4647  		v.reset(OpMIPSMOVHstorezero)
  4648  		v.AuxInt = off1 + off2
  4649  		v.Aux = sym
  4650  		v.AddArg(ptr)
  4651  		v.AddArg(mem)
  4652  		return true
  4653  	}
  4654  	// match: (MOVHstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4655  	// cond: canMergeSym(sym1,sym2)
  4656  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4657  	for {
  4658  		off1 := v.AuxInt
  4659  		sym1 := v.Aux
  4660  		v_0 := v.Args[0]
  4661  		if v_0.Op != OpMIPSMOVWaddr {
  4662  			break
  4663  		}
  4664  		off2 := v_0.AuxInt
  4665  		sym2 := v_0.Aux
  4666  		ptr := v_0.Args[0]
  4667  		mem := v.Args[1]
  4668  		if !(canMergeSym(sym1, sym2)) {
  4669  			break
  4670  		}
  4671  		v.reset(OpMIPSMOVHstorezero)
  4672  		v.AuxInt = off1 + off2
  4673  		v.Aux = mergeSym(sym1, sym2)
  4674  		v.AddArg(ptr)
  4675  		v.AddArg(mem)
  4676  		return true
  4677  	}
  4678  	return false
  4679  }
  4680  func rewriteValueMIPS_OpMIPSMOVWload(v *Value, config *Config) bool {
  4681  	b := v.Block
  4682  	_ = b
  4683  	// match: (MOVWload  [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4684  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4685  	// result: (MOVWload  [off1+off2] {sym} ptr mem)
  4686  	for {
  4687  		off1 := v.AuxInt
  4688  		sym := v.Aux
  4689  		x := v.Args[0]
  4690  		if x.Op != OpMIPSADDconst {
  4691  			break
  4692  		}
  4693  		off2 := x.AuxInt
  4694  		ptr := x.Args[0]
  4695  		mem := v.Args[1]
  4696  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4697  			break
  4698  		}
  4699  		v.reset(OpMIPSMOVWload)
  4700  		v.AuxInt = off1 + off2
  4701  		v.Aux = sym
  4702  		v.AddArg(ptr)
  4703  		v.AddArg(mem)
  4704  		return true
  4705  	}
  4706  	// match: (MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4707  	// cond: canMergeSym(sym1,sym2)
  4708  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4709  	for {
  4710  		off1 := v.AuxInt
  4711  		sym1 := v.Aux
  4712  		v_0 := v.Args[0]
  4713  		if v_0.Op != OpMIPSMOVWaddr {
  4714  			break
  4715  		}
  4716  		off2 := v_0.AuxInt
  4717  		sym2 := v_0.Aux
  4718  		ptr := v_0.Args[0]
  4719  		mem := v.Args[1]
  4720  		if !(canMergeSym(sym1, sym2)) {
  4721  			break
  4722  		}
  4723  		v.reset(OpMIPSMOVWload)
  4724  		v.AuxInt = off1 + off2
  4725  		v.Aux = mergeSym(sym1, sym2)
  4726  		v.AddArg(ptr)
  4727  		v.AddArg(mem)
  4728  		return true
  4729  	}
  4730  	// match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
  4731  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4732  	// result: x
  4733  	for {
  4734  		off := v.AuxInt
  4735  		sym := v.Aux
  4736  		ptr := v.Args[0]
  4737  		v_1 := v.Args[1]
  4738  		if v_1.Op != OpMIPSMOVWstore {
  4739  			break
  4740  		}
  4741  		off2 := v_1.AuxInt
  4742  		sym2 := v_1.Aux
  4743  		ptr2 := v_1.Args[0]
  4744  		x := v_1.Args[1]
  4745  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4746  			break
  4747  		}
  4748  		v.reset(OpCopy)
  4749  		v.Type = x.Type
  4750  		v.AddArg(x)
  4751  		return true
  4752  	}
  4753  	return false
  4754  }
  4755  func rewriteValueMIPS_OpMIPSMOVWreg(v *Value, config *Config) bool {
  4756  	b := v.Block
  4757  	_ = b
  4758  	// match: (MOVWreg x)
  4759  	// cond: x.Uses == 1
  4760  	// result: (MOVWnop x)
  4761  	for {
  4762  		x := v.Args[0]
  4763  		if !(x.Uses == 1) {
  4764  			break
  4765  		}
  4766  		v.reset(OpMIPSMOVWnop)
  4767  		v.AddArg(x)
  4768  		return true
  4769  	}
  4770  	// match: (MOVWreg  (MOVWconst [c]))
  4771  	// cond:
  4772  	// result: (MOVWconst [c])
  4773  	for {
  4774  		v_0 := v.Args[0]
  4775  		if v_0.Op != OpMIPSMOVWconst {
  4776  			break
  4777  		}
  4778  		c := v_0.AuxInt
  4779  		v.reset(OpMIPSMOVWconst)
  4780  		v.AuxInt = c
  4781  		return true
  4782  	}
  4783  	return false
  4784  }
  4785  func rewriteValueMIPS_OpMIPSMOVWstore(v *Value, config *Config) bool {
  4786  	b := v.Block
  4787  	_ = b
  4788  	// match: (MOVWstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4789  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4790  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  4791  	for {
  4792  		off1 := v.AuxInt
  4793  		sym := v.Aux
  4794  		x := v.Args[0]
  4795  		if x.Op != OpMIPSADDconst {
  4796  			break
  4797  		}
  4798  		off2 := x.AuxInt
  4799  		ptr := x.Args[0]
  4800  		val := v.Args[1]
  4801  		mem := v.Args[2]
  4802  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4803  			break
  4804  		}
  4805  		v.reset(OpMIPSMOVWstore)
  4806  		v.AuxInt = off1 + off2
  4807  		v.Aux = sym
  4808  		v.AddArg(ptr)
  4809  		v.AddArg(val)
  4810  		v.AddArg(mem)
  4811  		return true
  4812  	}
  4813  	// match: (MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4814  	// cond: canMergeSym(sym1,sym2)
  4815  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4816  	for {
  4817  		off1 := v.AuxInt
  4818  		sym1 := v.Aux
  4819  		v_0 := v.Args[0]
  4820  		if v_0.Op != OpMIPSMOVWaddr {
  4821  			break
  4822  		}
  4823  		off2 := v_0.AuxInt
  4824  		sym2 := v_0.Aux
  4825  		ptr := v_0.Args[0]
  4826  		val := v.Args[1]
  4827  		mem := v.Args[2]
  4828  		if !(canMergeSym(sym1, sym2)) {
  4829  			break
  4830  		}
  4831  		v.reset(OpMIPSMOVWstore)
  4832  		v.AuxInt = off1 + off2
  4833  		v.Aux = mergeSym(sym1, sym2)
  4834  		v.AddArg(ptr)
  4835  		v.AddArg(val)
  4836  		v.AddArg(mem)
  4837  		return true
  4838  	}
  4839  	// match: (MOVWstore [off] {sym} ptr (MOVWconst [0]) mem)
  4840  	// cond:
  4841  	// result: (MOVWstorezero [off] {sym} ptr mem)
  4842  	for {
  4843  		off := v.AuxInt
  4844  		sym := v.Aux
  4845  		ptr := v.Args[0]
  4846  		v_1 := v.Args[1]
  4847  		if v_1.Op != OpMIPSMOVWconst {
  4848  			break
  4849  		}
  4850  		if v_1.AuxInt != 0 {
  4851  			break
  4852  		}
  4853  		mem := v.Args[2]
  4854  		v.reset(OpMIPSMOVWstorezero)
  4855  		v.AuxInt = off
  4856  		v.Aux = sym
  4857  		v.AddArg(ptr)
  4858  		v.AddArg(mem)
  4859  		return true
  4860  	}
  4861  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4862  	// cond:
  4863  	// result: (MOVWstore [off] {sym} ptr x mem)
  4864  	for {
  4865  		off := v.AuxInt
  4866  		sym := v.Aux
  4867  		ptr := v.Args[0]
  4868  		v_1 := v.Args[1]
  4869  		if v_1.Op != OpMIPSMOVWreg {
  4870  			break
  4871  		}
  4872  		x := v_1.Args[0]
  4873  		mem := v.Args[2]
  4874  		v.reset(OpMIPSMOVWstore)
  4875  		v.AuxInt = off
  4876  		v.Aux = sym
  4877  		v.AddArg(ptr)
  4878  		v.AddArg(x)
  4879  		v.AddArg(mem)
  4880  		return true
  4881  	}
  4882  	return false
  4883  }
  4884  func rewriteValueMIPS_OpMIPSMOVWstorezero(v *Value, config *Config) bool {
  4885  	b := v.Block
  4886  	_ = b
  4887  	// match: (MOVWstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4888  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4889  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  4890  	for {
  4891  		off1 := v.AuxInt
  4892  		sym := v.Aux
  4893  		x := v.Args[0]
  4894  		if x.Op != OpMIPSADDconst {
  4895  			break
  4896  		}
  4897  		off2 := x.AuxInt
  4898  		ptr := x.Args[0]
  4899  		mem := v.Args[1]
  4900  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4901  			break
  4902  		}
  4903  		v.reset(OpMIPSMOVWstorezero)
  4904  		v.AuxInt = off1 + off2
  4905  		v.Aux = sym
  4906  		v.AddArg(ptr)
  4907  		v.AddArg(mem)
  4908  		return true
  4909  	}
  4910  	// match: (MOVWstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4911  	// cond: canMergeSym(sym1,sym2)
  4912  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4913  	for {
  4914  		off1 := v.AuxInt
  4915  		sym1 := v.Aux
  4916  		v_0 := v.Args[0]
  4917  		if v_0.Op != OpMIPSMOVWaddr {
  4918  			break
  4919  		}
  4920  		off2 := v_0.AuxInt
  4921  		sym2 := v_0.Aux
  4922  		ptr := v_0.Args[0]
  4923  		mem := v.Args[1]
  4924  		if !(canMergeSym(sym1, sym2)) {
  4925  			break
  4926  		}
  4927  		v.reset(OpMIPSMOVWstorezero)
  4928  		v.AuxInt = off1 + off2
  4929  		v.Aux = mergeSym(sym1, sym2)
  4930  		v.AddArg(ptr)
  4931  		v.AddArg(mem)
  4932  		return true
  4933  	}
  4934  	return false
  4935  }
  4936  func rewriteValueMIPS_OpMIPSMUL(v *Value, config *Config) bool {
  4937  	b := v.Block
  4938  	_ = b
  4939  	// match: (MUL (MOVWconst [0]) _ )
  4940  	// cond:
  4941  	// result: (MOVWconst [0])
  4942  	for {
  4943  		v_0 := v.Args[0]
  4944  		if v_0.Op != OpMIPSMOVWconst {
  4945  			break
  4946  		}
  4947  		if v_0.AuxInt != 0 {
  4948  			break
  4949  		}
  4950  		v.reset(OpMIPSMOVWconst)
  4951  		v.AuxInt = 0
  4952  		return true
  4953  	}
  4954  	// match: (MUL (MOVWconst [1]) x )
  4955  	// cond:
  4956  	// result: x
  4957  	for {
  4958  		v_0 := v.Args[0]
  4959  		if v_0.Op != OpMIPSMOVWconst {
  4960  			break
  4961  		}
  4962  		if v_0.AuxInt != 1 {
  4963  			break
  4964  		}
  4965  		x := v.Args[1]
  4966  		v.reset(OpCopy)
  4967  		v.Type = x.Type
  4968  		v.AddArg(x)
  4969  		return true
  4970  	}
  4971  	// match: (MUL (MOVWconst [-1]) x )
  4972  	// cond:
  4973  	// result: (NEG x)
  4974  	for {
  4975  		v_0 := v.Args[0]
  4976  		if v_0.Op != OpMIPSMOVWconst {
  4977  			break
  4978  		}
  4979  		if v_0.AuxInt != -1 {
  4980  			break
  4981  		}
  4982  		x := v.Args[1]
  4983  		v.reset(OpMIPSNEG)
  4984  		v.AddArg(x)
  4985  		return true
  4986  	}
  4987  	// match: (MUL (MOVWconst [c]) x )
  4988  	// cond: isPowerOfTwo(int64(uint32(c)))
  4989  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  4990  	for {
  4991  		v_0 := v.Args[0]
  4992  		if v_0.Op != OpMIPSMOVWconst {
  4993  			break
  4994  		}
  4995  		c := v_0.AuxInt
  4996  		x := v.Args[1]
  4997  		if !(isPowerOfTwo(int64(uint32(c)))) {
  4998  			break
  4999  		}
  5000  		v.reset(OpMIPSSLLconst)
  5001  		v.AuxInt = log2(int64(uint32(c)))
  5002  		v.AddArg(x)
  5003  		return true
  5004  	}
  5005  	// match: (MUL (MOVWconst [c]) (MOVWconst [d]))
  5006  	// cond:
  5007  	// result: (MOVWconst [int64(int32(c)*int32(d))])
  5008  	for {
  5009  		v_0 := v.Args[0]
  5010  		if v_0.Op != OpMIPSMOVWconst {
  5011  			break
  5012  		}
  5013  		c := v_0.AuxInt
  5014  		v_1 := v.Args[1]
  5015  		if v_1.Op != OpMIPSMOVWconst {
  5016  			break
  5017  		}
  5018  		d := v_1.AuxInt
  5019  		v.reset(OpMIPSMOVWconst)
  5020  		v.AuxInt = int64(int32(c) * int32(d))
  5021  		return true
  5022  	}
  5023  	return false
  5024  }
  5025  func rewriteValueMIPS_OpMIPSNEG(v *Value, config *Config) bool {
  5026  	b := v.Block
  5027  	_ = b
  5028  	// match: (NEG (MOVWconst [c]))
  5029  	// cond:
  5030  	// result: (MOVWconst [int64(int32(-c))])
  5031  	for {
  5032  		v_0 := v.Args[0]
  5033  		if v_0.Op != OpMIPSMOVWconst {
  5034  			break
  5035  		}
  5036  		c := v_0.AuxInt
  5037  		v.reset(OpMIPSMOVWconst)
  5038  		v.AuxInt = int64(int32(-c))
  5039  		return true
  5040  	}
  5041  	return false
  5042  }
  5043  func rewriteValueMIPS_OpMIPSNOR(v *Value, config *Config) bool {
  5044  	b := v.Block
  5045  	_ = b
  5046  	// match: (NOR (MOVWconst [c]) x)
  5047  	// cond:
  5048  	// result: (NORconst [c] x)
  5049  	for {
  5050  		v_0 := v.Args[0]
  5051  		if v_0.Op != OpMIPSMOVWconst {
  5052  			break
  5053  		}
  5054  		c := v_0.AuxInt
  5055  		x := v.Args[1]
  5056  		v.reset(OpMIPSNORconst)
  5057  		v.AuxInt = c
  5058  		v.AddArg(x)
  5059  		return true
  5060  	}
  5061  	// match: (NOR x (MOVWconst [c]))
  5062  	// cond:
  5063  	// result: (NORconst [c] x)
  5064  	for {
  5065  		x := v.Args[0]
  5066  		v_1 := v.Args[1]
  5067  		if v_1.Op != OpMIPSMOVWconst {
  5068  			break
  5069  		}
  5070  		c := v_1.AuxInt
  5071  		v.reset(OpMIPSNORconst)
  5072  		v.AuxInt = c
  5073  		v.AddArg(x)
  5074  		return true
  5075  	}
  5076  	return false
  5077  }
  5078  func rewriteValueMIPS_OpMIPSNORconst(v *Value, config *Config) bool {
  5079  	b := v.Block
  5080  	_ = b
  5081  	// match: (NORconst [c] (MOVWconst [d]))
  5082  	// cond:
  5083  	// result: (MOVWconst [^(c|d)])
  5084  	for {
  5085  		c := v.AuxInt
  5086  		v_0 := v.Args[0]
  5087  		if v_0.Op != OpMIPSMOVWconst {
  5088  			break
  5089  		}
  5090  		d := v_0.AuxInt
  5091  		v.reset(OpMIPSMOVWconst)
  5092  		v.AuxInt = ^(c | d)
  5093  		return true
  5094  	}
  5095  	return false
  5096  }
  5097  func rewriteValueMIPS_OpMIPSOR(v *Value, config *Config) bool {
  5098  	b := v.Block
  5099  	_ = b
  5100  	// match: (OR  (MOVWconst [c]) x)
  5101  	// cond:
  5102  	// result: (ORconst  [c] x)
  5103  	for {
  5104  		v_0 := v.Args[0]
  5105  		if v_0.Op != OpMIPSMOVWconst {
  5106  			break
  5107  		}
  5108  		c := v_0.AuxInt
  5109  		x := v.Args[1]
  5110  		v.reset(OpMIPSORconst)
  5111  		v.AuxInt = c
  5112  		v.AddArg(x)
  5113  		return true
  5114  	}
  5115  	// match: (OR  x (MOVWconst [c]))
  5116  	// cond:
  5117  	// result: (ORconst  [c] x)
  5118  	for {
  5119  		x := v.Args[0]
  5120  		v_1 := v.Args[1]
  5121  		if v_1.Op != OpMIPSMOVWconst {
  5122  			break
  5123  		}
  5124  		c := v_1.AuxInt
  5125  		v.reset(OpMIPSORconst)
  5126  		v.AuxInt = c
  5127  		v.AddArg(x)
  5128  		return true
  5129  	}
  5130  	// match: (OR  x x)
  5131  	// cond:
  5132  	// result: x
  5133  	for {
  5134  		x := v.Args[0]
  5135  		if x != v.Args[1] {
  5136  			break
  5137  		}
  5138  		v.reset(OpCopy)
  5139  		v.Type = x.Type
  5140  		v.AddArg(x)
  5141  		return true
  5142  	}
  5143  	// match: (OR (SGTUzero x) (SGTUzero y))
  5144  	// cond:
  5145  	// result: (SGTUzero (OR <x.Type> x y))
  5146  	for {
  5147  		v_0 := v.Args[0]
  5148  		if v_0.Op != OpMIPSSGTUzero {
  5149  			break
  5150  		}
  5151  		x := v_0.Args[0]
  5152  		v_1 := v.Args[1]
  5153  		if v_1.Op != OpMIPSSGTUzero {
  5154  			break
  5155  		}
  5156  		y := v_1.Args[0]
  5157  		v.reset(OpMIPSSGTUzero)
  5158  		v0 := b.NewValue0(v.Line, OpMIPSOR, x.Type)
  5159  		v0.AddArg(x)
  5160  		v0.AddArg(y)
  5161  		v.AddArg(v0)
  5162  		return true
  5163  	}
  5164  	return false
  5165  }
  5166  func rewriteValueMIPS_OpMIPSORconst(v *Value, config *Config) bool {
  5167  	b := v.Block
  5168  	_ = b
  5169  	// match: (ORconst  [0]  x)
  5170  	// cond:
  5171  	// result: x
  5172  	for {
  5173  		if v.AuxInt != 0 {
  5174  			break
  5175  		}
  5176  		x := v.Args[0]
  5177  		v.reset(OpCopy)
  5178  		v.Type = x.Type
  5179  		v.AddArg(x)
  5180  		return true
  5181  	}
  5182  	// match: (ORconst  [-1] _)
  5183  	// cond:
  5184  	// result: (MOVWconst [-1])
  5185  	for {
  5186  		if v.AuxInt != -1 {
  5187  			break
  5188  		}
  5189  		v.reset(OpMIPSMOVWconst)
  5190  		v.AuxInt = -1
  5191  		return true
  5192  	}
  5193  	// match: (ORconst [c] (MOVWconst [d]))
  5194  	// cond:
  5195  	// result: (MOVWconst [c|d])
  5196  	for {
  5197  		c := v.AuxInt
  5198  		v_0 := v.Args[0]
  5199  		if v_0.Op != OpMIPSMOVWconst {
  5200  			break
  5201  		}
  5202  		d := v_0.AuxInt
  5203  		v.reset(OpMIPSMOVWconst)
  5204  		v.AuxInt = c | d
  5205  		return true
  5206  	}
  5207  	// match: (ORconst [c] (ORconst [d] x))
  5208  	// cond:
  5209  	// result: (ORconst [c|d] x)
  5210  	for {
  5211  		c := v.AuxInt
  5212  		v_0 := v.Args[0]
  5213  		if v_0.Op != OpMIPSORconst {
  5214  			break
  5215  		}
  5216  		d := v_0.AuxInt
  5217  		x := v_0.Args[0]
  5218  		v.reset(OpMIPSORconst)
  5219  		v.AuxInt = c | d
  5220  		v.AddArg(x)
  5221  		return true
  5222  	}
  5223  	return false
  5224  }
  5225  func rewriteValueMIPS_OpMIPSSGT(v *Value, config *Config) bool {
  5226  	b := v.Block
  5227  	_ = b
  5228  	// match: (SGT  (MOVWconst [c]) x)
  5229  	// cond:
  5230  	// result: (SGTconst  [c] x)
  5231  	for {
  5232  		v_0 := v.Args[0]
  5233  		if v_0.Op != OpMIPSMOVWconst {
  5234  			break
  5235  		}
  5236  		c := v_0.AuxInt
  5237  		x := v.Args[1]
  5238  		v.reset(OpMIPSSGTconst)
  5239  		v.AuxInt = c
  5240  		v.AddArg(x)
  5241  		return true
  5242  	}
  5243  	// match: (SGT x (MOVWconst [0]))
  5244  	// cond:
  5245  	// result: (SGTzero x)
  5246  	for {
  5247  		x := v.Args[0]
  5248  		v_1 := v.Args[1]
  5249  		if v_1.Op != OpMIPSMOVWconst {
  5250  			break
  5251  		}
  5252  		if v_1.AuxInt != 0 {
  5253  			break
  5254  		}
  5255  		v.reset(OpMIPSSGTzero)
  5256  		v.AddArg(x)
  5257  		return true
  5258  	}
  5259  	return false
  5260  }
  5261  func rewriteValueMIPS_OpMIPSSGTU(v *Value, config *Config) bool {
  5262  	b := v.Block
  5263  	_ = b
  5264  	// match: (SGTU (MOVWconst [c]) x)
  5265  	// cond:
  5266  	// result: (SGTUconst [c] x)
  5267  	for {
  5268  		v_0 := v.Args[0]
  5269  		if v_0.Op != OpMIPSMOVWconst {
  5270  			break
  5271  		}
  5272  		c := v_0.AuxInt
  5273  		x := v.Args[1]
  5274  		v.reset(OpMIPSSGTUconst)
  5275  		v.AuxInt = c
  5276  		v.AddArg(x)
  5277  		return true
  5278  	}
  5279  	// match: (SGTU x (MOVWconst [0]))
  5280  	// cond:
  5281  	// result: (SGTUzero x)
  5282  	for {
  5283  		x := v.Args[0]
  5284  		v_1 := v.Args[1]
  5285  		if v_1.Op != OpMIPSMOVWconst {
  5286  			break
  5287  		}
  5288  		if v_1.AuxInt != 0 {
  5289  			break
  5290  		}
  5291  		v.reset(OpMIPSSGTUzero)
  5292  		v.AddArg(x)
  5293  		return true
  5294  	}
  5295  	return false
  5296  }
  5297  func rewriteValueMIPS_OpMIPSSGTUconst(v *Value, config *Config) bool {
  5298  	b := v.Block
  5299  	_ = b
  5300  	// match: (SGTUconst [c] (MOVWconst [d]))
  5301  	// cond: uint32(c)>uint32(d)
  5302  	// result: (MOVWconst [1])
  5303  	for {
  5304  		c := v.AuxInt
  5305  		v_0 := v.Args[0]
  5306  		if v_0.Op != OpMIPSMOVWconst {
  5307  			break
  5308  		}
  5309  		d := v_0.AuxInt
  5310  		if !(uint32(c) > uint32(d)) {
  5311  			break
  5312  		}
  5313  		v.reset(OpMIPSMOVWconst)
  5314  		v.AuxInt = 1
  5315  		return true
  5316  	}
  5317  	// match: (SGTUconst [c] (MOVWconst [d]))
  5318  	// cond: uint32(c)<=uint32(d)
  5319  	// result: (MOVWconst [0])
  5320  	for {
  5321  		c := v.AuxInt
  5322  		v_0 := v.Args[0]
  5323  		if v_0.Op != OpMIPSMOVWconst {
  5324  			break
  5325  		}
  5326  		d := v_0.AuxInt
  5327  		if !(uint32(c) <= uint32(d)) {
  5328  			break
  5329  		}
  5330  		v.reset(OpMIPSMOVWconst)
  5331  		v.AuxInt = 0
  5332  		return true
  5333  	}
  5334  	// match: (SGTUconst [c] (MOVBUreg _))
  5335  	// cond: 0xff < uint32(c)
  5336  	// result: (MOVWconst [1])
  5337  	for {
  5338  		c := v.AuxInt
  5339  		v_0 := v.Args[0]
  5340  		if v_0.Op != OpMIPSMOVBUreg {
  5341  			break
  5342  		}
  5343  		if !(0xff < uint32(c)) {
  5344  			break
  5345  		}
  5346  		v.reset(OpMIPSMOVWconst)
  5347  		v.AuxInt = 1
  5348  		return true
  5349  	}
  5350  	// match: (SGTUconst [c] (MOVHUreg _))
  5351  	// cond: 0xffff < uint32(c)
  5352  	// result: (MOVWconst [1])
  5353  	for {
  5354  		c := v.AuxInt
  5355  		v_0 := v.Args[0]
  5356  		if v_0.Op != OpMIPSMOVHUreg {
  5357  			break
  5358  		}
  5359  		if !(0xffff < uint32(c)) {
  5360  			break
  5361  		}
  5362  		v.reset(OpMIPSMOVWconst)
  5363  		v.AuxInt = 1
  5364  		return true
  5365  	}
  5366  	// match: (SGTUconst [c] (ANDconst [m] _))
  5367  	// cond: uint32(m) < uint32(c)
  5368  	// result: (MOVWconst [1])
  5369  	for {
  5370  		c := v.AuxInt
  5371  		v_0 := v.Args[0]
  5372  		if v_0.Op != OpMIPSANDconst {
  5373  			break
  5374  		}
  5375  		m := v_0.AuxInt
  5376  		if !(uint32(m) < uint32(c)) {
  5377  			break
  5378  		}
  5379  		v.reset(OpMIPSMOVWconst)
  5380  		v.AuxInt = 1
  5381  		return true
  5382  	}
  5383  	// match: (SGTUconst [c] (SRLconst _ [d]))
  5384  	// cond: uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)
  5385  	// result: (MOVWconst [1])
  5386  	for {
  5387  		c := v.AuxInt
  5388  		v_0 := v.Args[0]
  5389  		if v_0.Op != OpMIPSSRLconst {
  5390  			break
  5391  		}
  5392  		d := v_0.AuxInt
  5393  		if !(uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)) {
  5394  			break
  5395  		}
  5396  		v.reset(OpMIPSMOVWconst)
  5397  		v.AuxInt = 1
  5398  		return true
  5399  	}
  5400  	return false
  5401  }
  5402  func rewriteValueMIPS_OpMIPSSGTUzero(v *Value, config *Config) bool {
  5403  	b := v.Block
  5404  	_ = b
  5405  	// match: (SGTUzero (MOVWconst [d]))
  5406  	// cond: uint32(d) != 0
  5407  	// result: (MOVWconst [1])
  5408  	for {
  5409  		v_0 := v.Args[0]
  5410  		if v_0.Op != OpMIPSMOVWconst {
  5411  			break
  5412  		}
  5413  		d := v_0.AuxInt
  5414  		if !(uint32(d) != 0) {
  5415  			break
  5416  		}
  5417  		v.reset(OpMIPSMOVWconst)
  5418  		v.AuxInt = 1
  5419  		return true
  5420  	}
  5421  	// match: (SGTUzero (MOVWconst [d]))
  5422  	// cond: uint32(d) == 0
  5423  	// result: (MOVWconst [0])
  5424  	for {
  5425  		v_0 := v.Args[0]
  5426  		if v_0.Op != OpMIPSMOVWconst {
  5427  			break
  5428  		}
  5429  		d := v_0.AuxInt
  5430  		if !(uint32(d) == 0) {
  5431  			break
  5432  		}
  5433  		v.reset(OpMIPSMOVWconst)
  5434  		v.AuxInt = 0
  5435  		return true
  5436  	}
  5437  	return false
  5438  }
  5439  func rewriteValueMIPS_OpMIPSSGTconst(v *Value, config *Config) bool {
  5440  	b := v.Block
  5441  	_ = b
  5442  	// match: (SGTconst [c] (MOVWconst [d]))
  5443  	// cond: int32(c) > int32(d)
  5444  	// result: (MOVWconst [1])
  5445  	for {
  5446  		c := v.AuxInt
  5447  		v_0 := v.Args[0]
  5448  		if v_0.Op != OpMIPSMOVWconst {
  5449  			break
  5450  		}
  5451  		d := v_0.AuxInt
  5452  		if !(int32(c) > int32(d)) {
  5453  			break
  5454  		}
  5455  		v.reset(OpMIPSMOVWconst)
  5456  		v.AuxInt = 1
  5457  		return true
  5458  	}
  5459  	// match: (SGTconst [c] (MOVWconst [d]))
  5460  	// cond: int32(c) <= int32(d)
  5461  	// result: (MOVWconst [0])
  5462  	for {
  5463  		c := v.AuxInt
  5464  		v_0 := v.Args[0]
  5465  		if v_0.Op != OpMIPSMOVWconst {
  5466  			break
  5467  		}
  5468  		d := v_0.AuxInt
  5469  		if !(int32(c) <= int32(d)) {
  5470  			break
  5471  		}
  5472  		v.reset(OpMIPSMOVWconst)
  5473  		v.AuxInt = 0
  5474  		return true
  5475  	}
  5476  	// match: (SGTconst [c] (MOVBreg _))
  5477  	// cond: 0x7f < int32(c)
  5478  	// result: (MOVWconst [1])
  5479  	for {
  5480  		c := v.AuxInt
  5481  		v_0 := v.Args[0]
  5482  		if v_0.Op != OpMIPSMOVBreg {
  5483  			break
  5484  		}
  5485  		if !(0x7f < int32(c)) {
  5486  			break
  5487  		}
  5488  		v.reset(OpMIPSMOVWconst)
  5489  		v.AuxInt = 1
  5490  		return true
  5491  	}
  5492  	// match: (SGTconst [c] (MOVBreg _))
  5493  	// cond: int32(c) <= -0x80
  5494  	// result: (MOVWconst [0])
  5495  	for {
  5496  		c := v.AuxInt
  5497  		v_0 := v.Args[0]
  5498  		if v_0.Op != OpMIPSMOVBreg {
  5499  			break
  5500  		}
  5501  		if !(int32(c) <= -0x80) {
  5502  			break
  5503  		}
  5504  		v.reset(OpMIPSMOVWconst)
  5505  		v.AuxInt = 0
  5506  		return true
  5507  	}
  5508  	// match: (SGTconst [c] (MOVBUreg _))
  5509  	// cond: 0xff < int32(c)
  5510  	// result: (MOVWconst [1])
  5511  	for {
  5512  		c := v.AuxInt
  5513  		v_0 := v.Args[0]
  5514  		if v_0.Op != OpMIPSMOVBUreg {
  5515  			break
  5516  		}
  5517  		if !(0xff < int32(c)) {
  5518  			break
  5519  		}
  5520  		v.reset(OpMIPSMOVWconst)
  5521  		v.AuxInt = 1
  5522  		return true
  5523  	}
  5524  	// match: (SGTconst [c] (MOVBUreg _))
  5525  	// cond: int32(c) < 0
  5526  	// result: (MOVWconst [0])
  5527  	for {
  5528  		c := v.AuxInt
  5529  		v_0 := v.Args[0]
  5530  		if v_0.Op != OpMIPSMOVBUreg {
  5531  			break
  5532  		}
  5533  		if !(int32(c) < 0) {
  5534  			break
  5535  		}
  5536  		v.reset(OpMIPSMOVWconst)
  5537  		v.AuxInt = 0
  5538  		return true
  5539  	}
  5540  	// match: (SGTconst [c] (MOVHreg _))
  5541  	// cond: 0x7fff < int32(c)
  5542  	// result: (MOVWconst [1])
  5543  	for {
  5544  		c := v.AuxInt
  5545  		v_0 := v.Args[0]
  5546  		if v_0.Op != OpMIPSMOVHreg {
  5547  			break
  5548  		}
  5549  		if !(0x7fff < int32(c)) {
  5550  			break
  5551  		}
  5552  		v.reset(OpMIPSMOVWconst)
  5553  		v.AuxInt = 1
  5554  		return true
  5555  	}
  5556  	// match: (SGTconst [c] (MOVHreg _))
  5557  	// cond: int32(c) <= -0x8000
  5558  	// result: (MOVWconst [0])
  5559  	for {
  5560  		c := v.AuxInt
  5561  		v_0 := v.Args[0]
  5562  		if v_0.Op != OpMIPSMOVHreg {
  5563  			break
  5564  		}
  5565  		if !(int32(c) <= -0x8000) {
  5566  			break
  5567  		}
  5568  		v.reset(OpMIPSMOVWconst)
  5569  		v.AuxInt = 0
  5570  		return true
  5571  	}
  5572  	// match: (SGTconst [c] (MOVHUreg _))
  5573  	// cond: 0xffff < int32(c)
  5574  	// result: (MOVWconst [1])
  5575  	for {
  5576  		c := v.AuxInt
  5577  		v_0 := v.Args[0]
  5578  		if v_0.Op != OpMIPSMOVHUreg {
  5579  			break
  5580  		}
  5581  		if !(0xffff < int32(c)) {
  5582  			break
  5583  		}
  5584  		v.reset(OpMIPSMOVWconst)
  5585  		v.AuxInt = 1
  5586  		return true
  5587  	}
  5588  	// match: (SGTconst [c] (MOVHUreg _))
  5589  	// cond: int32(c) < 0
  5590  	// result: (MOVWconst [0])
  5591  	for {
  5592  		c := v.AuxInt
  5593  		v_0 := v.Args[0]
  5594  		if v_0.Op != OpMIPSMOVHUreg {
  5595  			break
  5596  		}
  5597  		if !(int32(c) < 0) {
  5598  			break
  5599  		}
  5600  		v.reset(OpMIPSMOVWconst)
  5601  		v.AuxInt = 0
  5602  		return true
  5603  	}
  5604  	// match: (SGTconst [c] (ANDconst [m] _))
  5605  	// cond: 0 <= int32(m) && int32(m) < int32(c)
  5606  	// result: (MOVWconst [1])
  5607  	for {
  5608  		c := v.AuxInt
  5609  		v_0 := v.Args[0]
  5610  		if v_0.Op != OpMIPSANDconst {
  5611  			break
  5612  		}
  5613  		m := v_0.AuxInt
  5614  		if !(0 <= int32(m) && int32(m) < int32(c)) {
  5615  			break
  5616  		}
  5617  		v.reset(OpMIPSMOVWconst)
  5618  		v.AuxInt = 1
  5619  		return true
  5620  	}
  5621  	// match: (SGTconst [c] (SRLconst _ [d]))
  5622  	// cond: 0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)
  5623  	// result: (MOVWconst [1])
  5624  	for {
  5625  		c := v.AuxInt
  5626  		v_0 := v.Args[0]
  5627  		if v_0.Op != OpMIPSSRLconst {
  5628  			break
  5629  		}
  5630  		d := v_0.AuxInt
  5631  		if !(0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)) {
  5632  			break
  5633  		}
  5634  		v.reset(OpMIPSMOVWconst)
  5635  		v.AuxInt = 1
  5636  		return true
  5637  	}
  5638  	return false
  5639  }
  5640  func rewriteValueMIPS_OpMIPSSGTzero(v *Value, config *Config) bool {
  5641  	b := v.Block
  5642  	_ = b
  5643  	// match: (SGTzero (MOVWconst [d]))
  5644  	// cond: int32(d) > 0
  5645  	// result: (MOVWconst [1])
  5646  	for {
  5647  		v_0 := v.Args[0]
  5648  		if v_0.Op != OpMIPSMOVWconst {
  5649  			break
  5650  		}
  5651  		d := v_0.AuxInt
  5652  		if !(int32(d) > 0) {
  5653  			break
  5654  		}
  5655  		v.reset(OpMIPSMOVWconst)
  5656  		v.AuxInt = 1
  5657  		return true
  5658  	}
  5659  	// match: (SGTzero (MOVWconst [d]))
  5660  	// cond: int32(d) <= 0
  5661  	// result: (MOVWconst [0])
  5662  	for {
  5663  		v_0 := v.Args[0]
  5664  		if v_0.Op != OpMIPSMOVWconst {
  5665  			break
  5666  		}
  5667  		d := v_0.AuxInt
  5668  		if !(int32(d) <= 0) {
  5669  			break
  5670  		}
  5671  		v.reset(OpMIPSMOVWconst)
  5672  		v.AuxInt = 0
  5673  		return true
  5674  	}
  5675  	return false
  5676  }
  5677  func rewriteValueMIPS_OpMIPSSLL(v *Value, config *Config) bool {
  5678  	b := v.Block
  5679  	_ = b
  5680  	// match: (SLL _ (MOVWconst [c]))
  5681  	// cond: uint32(c)>=32
  5682  	// result: (MOVWconst [0])
  5683  	for {
  5684  		v_1 := v.Args[1]
  5685  		if v_1.Op != OpMIPSMOVWconst {
  5686  			break
  5687  		}
  5688  		c := v_1.AuxInt
  5689  		if !(uint32(c) >= 32) {
  5690  			break
  5691  		}
  5692  		v.reset(OpMIPSMOVWconst)
  5693  		v.AuxInt = 0
  5694  		return true
  5695  	}
  5696  	// match: (SLL x (MOVWconst [c]))
  5697  	// cond:
  5698  	// result: (SLLconst x [c])
  5699  	for {
  5700  		x := v.Args[0]
  5701  		v_1 := v.Args[1]
  5702  		if v_1.Op != OpMIPSMOVWconst {
  5703  			break
  5704  		}
  5705  		c := v_1.AuxInt
  5706  		v.reset(OpMIPSSLLconst)
  5707  		v.AuxInt = c
  5708  		v.AddArg(x)
  5709  		return true
  5710  	}
  5711  	return false
  5712  }
  5713  func rewriteValueMIPS_OpMIPSSLLconst(v *Value, config *Config) bool {
  5714  	b := v.Block
  5715  	_ = b
  5716  	// match: (SLLconst [c] (MOVWconst [d]))
  5717  	// cond:
  5718  	// result: (MOVWconst [int64(int32(uint32(d)<<uint32(c)))])
  5719  	for {
  5720  		c := v.AuxInt
  5721  		v_0 := v.Args[0]
  5722  		if v_0.Op != OpMIPSMOVWconst {
  5723  			break
  5724  		}
  5725  		d := v_0.AuxInt
  5726  		v.reset(OpMIPSMOVWconst)
  5727  		v.AuxInt = int64(int32(uint32(d) << uint32(c)))
  5728  		return true
  5729  	}
  5730  	return false
  5731  }
  5732  func rewriteValueMIPS_OpMIPSSRA(v *Value, config *Config) bool {
  5733  	b := v.Block
  5734  	_ = b
  5735  	// match: (SRA x (MOVWconst [c]))
  5736  	// cond: uint32(c)>=32
  5737  	// result: (SRAconst x [31])
  5738  	for {
  5739  		x := v.Args[0]
  5740  		v_1 := v.Args[1]
  5741  		if v_1.Op != OpMIPSMOVWconst {
  5742  			break
  5743  		}
  5744  		c := v_1.AuxInt
  5745  		if !(uint32(c) >= 32) {
  5746  			break
  5747  		}
  5748  		v.reset(OpMIPSSRAconst)
  5749  		v.AuxInt = 31
  5750  		v.AddArg(x)
  5751  		return true
  5752  	}
  5753  	// match: (SRA x (MOVWconst [c]))
  5754  	// cond:
  5755  	// result: (SRAconst x [c])
  5756  	for {
  5757  		x := v.Args[0]
  5758  		v_1 := v.Args[1]
  5759  		if v_1.Op != OpMIPSMOVWconst {
  5760  			break
  5761  		}
  5762  		c := v_1.AuxInt
  5763  		v.reset(OpMIPSSRAconst)
  5764  		v.AuxInt = c
  5765  		v.AddArg(x)
  5766  		return true
  5767  	}
  5768  	return false
  5769  }
  5770  func rewriteValueMIPS_OpMIPSSRAconst(v *Value, config *Config) bool {
  5771  	b := v.Block
  5772  	_ = b
  5773  	// match: (SRAconst [c] (MOVWconst [d]))
  5774  	// cond:
  5775  	// result: (MOVWconst [int64(int32(d)>>uint32(c))])
  5776  	for {
  5777  		c := v.AuxInt
  5778  		v_0 := v.Args[0]
  5779  		if v_0.Op != OpMIPSMOVWconst {
  5780  			break
  5781  		}
  5782  		d := v_0.AuxInt
  5783  		v.reset(OpMIPSMOVWconst)
  5784  		v.AuxInt = int64(int32(d) >> uint32(c))
  5785  		return true
  5786  	}
  5787  	return false
  5788  }
  5789  func rewriteValueMIPS_OpMIPSSRL(v *Value, config *Config) bool {
  5790  	b := v.Block
  5791  	_ = b
  5792  	// match: (SRL _ (MOVWconst [c]))
  5793  	// cond: uint32(c)>=32
  5794  	// result: (MOVWconst [0])
  5795  	for {
  5796  		v_1 := v.Args[1]
  5797  		if v_1.Op != OpMIPSMOVWconst {
  5798  			break
  5799  		}
  5800  		c := v_1.AuxInt
  5801  		if !(uint32(c) >= 32) {
  5802  			break
  5803  		}
  5804  		v.reset(OpMIPSMOVWconst)
  5805  		v.AuxInt = 0
  5806  		return true
  5807  	}
  5808  	// match: (SRL x (MOVWconst [c]))
  5809  	// cond:
  5810  	// result: (SRLconst x [c])
  5811  	for {
  5812  		x := v.Args[0]
  5813  		v_1 := v.Args[1]
  5814  		if v_1.Op != OpMIPSMOVWconst {
  5815  			break
  5816  		}
  5817  		c := v_1.AuxInt
  5818  		v.reset(OpMIPSSRLconst)
  5819  		v.AuxInt = c
  5820  		v.AddArg(x)
  5821  		return true
  5822  	}
  5823  	return false
  5824  }
  5825  func rewriteValueMIPS_OpMIPSSRLconst(v *Value, config *Config) bool {
  5826  	b := v.Block
  5827  	_ = b
  5828  	// match: (SRLconst [c] (MOVWconst [d]))
  5829  	// cond:
  5830  	// result: (MOVWconst [int64(uint32(d)>>uint32(c))])
  5831  	for {
  5832  		c := v.AuxInt
  5833  		v_0 := v.Args[0]
  5834  		if v_0.Op != OpMIPSMOVWconst {
  5835  			break
  5836  		}
  5837  		d := v_0.AuxInt
  5838  		v.reset(OpMIPSMOVWconst)
  5839  		v.AuxInt = int64(uint32(d) >> uint32(c))
  5840  		return true
  5841  	}
  5842  	return false
  5843  }
  5844  func rewriteValueMIPS_OpMIPSSUB(v *Value, config *Config) bool {
  5845  	b := v.Block
  5846  	_ = b
  5847  	// match: (SUB x (MOVWconst [c]))
  5848  	// cond:
  5849  	// result: (SUBconst [c] x)
  5850  	for {
  5851  		x := v.Args[0]
  5852  		v_1 := v.Args[1]
  5853  		if v_1.Op != OpMIPSMOVWconst {
  5854  			break
  5855  		}
  5856  		c := v_1.AuxInt
  5857  		v.reset(OpMIPSSUBconst)
  5858  		v.AuxInt = c
  5859  		v.AddArg(x)
  5860  		return true
  5861  	}
  5862  	// match: (SUB x x)
  5863  	// cond:
  5864  	// result: (MOVWconst [0])
  5865  	for {
  5866  		x := v.Args[0]
  5867  		if x != v.Args[1] {
  5868  			break
  5869  		}
  5870  		v.reset(OpMIPSMOVWconst)
  5871  		v.AuxInt = 0
  5872  		return true
  5873  	}
  5874  	// match: (SUB (MOVWconst [0]) x)
  5875  	// cond:
  5876  	// result: (NEG x)
  5877  	for {
  5878  		v_0 := v.Args[0]
  5879  		if v_0.Op != OpMIPSMOVWconst {
  5880  			break
  5881  		}
  5882  		if v_0.AuxInt != 0 {
  5883  			break
  5884  		}
  5885  		x := v.Args[1]
  5886  		v.reset(OpMIPSNEG)
  5887  		v.AddArg(x)
  5888  		return true
  5889  	}
  5890  	return false
  5891  }
  5892  func rewriteValueMIPS_OpMIPSSUBconst(v *Value, config *Config) bool {
  5893  	b := v.Block
  5894  	_ = b
  5895  	// match: (SUBconst [0]  x)
  5896  	// cond:
  5897  	// result: x
  5898  	for {
  5899  		if v.AuxInt != 0 {
  5900  			break
  5901  		}
  5902  		x := v.Args[0]
  5903  		v.reset(OpCopy)
  5904  		v.Type = x.Type
  5905  		v.AddArg(x)
  5906  		return true
  5907  	}
  5908  	// match: (SUBconst [c] (MOVWconst [d]))
  5909  	// cond:
  5910  	// result: (MOVWconst [int64(int32(d-c))])
  5911  	for {
  5912  		c := v.AuxInt
  5913  		v_0 := v.Args[0]
  5914  		if v_0.Op != OpMIPSMOVWconst {
  5915  			break
  5916  		}
  5917  		d := v_0.AuxInt
  5918  		v.reset(OpMIPSMOVWconst)
  5919  		v.AuxInt = int64(int32(d - c))
  5920  		return true
  5921  	}
  5922  	// match: (SUBconst [c] (SUBconst [d] x))
  5923  	// cond:
  5924  	// result: (ADDconst [int64(int32(-c-d))] x)
  5925  	for {
  5926  		c := v.AuxInt
  5927  		v_0 := v.Args[0]
  5928  		if v_0.Op != OpMIPSSUBconst {
  5929  			break
  5930  		}
  5931  		d := v_0.AuxInt
  5932  		x := v_0.Args[0]
  5933  		v.reset(OpMIPSADDconst)
  5934  		v.AuxInt = int64(int32(-c - d))
  5935  		v.AddArg(x)
  5936  		return true
  5937  	}
  5938  	// match: (SUBconst [c] (ADDconst [d] x))
  5939  	// cond:
  5940  	// result: (ADDconst [int64(int32(-c+d))] x)
  5941  	for {
  5942  		c := v.AuxInt
  5943  		v_0 := v.Args[0]
  5944  		if v_0.Op != OpMIPSADDconst {
  5945  			break
  5946  		}
  5947  		d := v_0.AuxInt
  5948  		x := v_0.Args[0]
  5949  		v.reset(OpMIPSADDconst)
  5950  		v.AuxInt = int64(int32(-c + d))
  5951  		v.AddArg(x)
  5952  		return true
  5953  	}
  5954  	return false
  5955  }
  5956  func rewriteValueMIPS_OpMIPSXOR(v *Value, config *Config) bool {
  5957  	b := v.Block
  5958  	_ = b
  5959  	// match: (XOR (MOVWconst [c]) x)
  5960  	// cond:
  5961  	// result: (XORconst [c] x)
  5962  	for {
  5963  		v_0 := v.Args[0]
  5964  		if v_0.Op != OpMIPSMOVWconst {
  5965  			break
  5966  		}
  5967  		c := v_0.AuxInt
  5968  		x := v.Args[1]
  5969  		v.reset(OpMIPSXORconst)
  5970  		v.AuxInt = c
  5971  		v.AddArg(x)
  5972  		return true
  5973  	}
  5974  	// match: (XOR x (MOVWconst [c]))
  5975  	// cond:
  5976  	// result: (XORconst [c] x)
  5977  	for {
  5978  		x := v.Args[0]
  5979  		v_1 := v.Args[1]
  5980  		if v_1.Op != OpMIPSMOVWconst {
  5981  			break
  5982  		}
  5983  		c := v_1.AuxInt
  5984  		v.reset(OpMIPSXORconst)
  5985  		v.AuxInt = c
  5986  		v.AddArg(x)
  5987  		return true
  5988  	}
  5989  	// match: (XOR x x)
  5990  	// cond:
  5991  	// result: (MOVWconst [0])
  5992  	for {
  5993  		x := v.Args[0]
  5994  		if x != v.Args[1] {
  5995  			break
  5996  		}
  5997  		v.reset(OpMIPSMOVWconst)
  5998  		v.AuxInt = 0
  5999  		return true
  6000  	}
  6001  	return false
  6002  }
  6003  func rewriteValueMIPS_OpMIPSXORconst(v *Value, config *Config) bool {
  6004  	b := v.Block
  6005  	_ = b
  6006  	// match: (XORconst [0]  x)
  6007  	// cond:
  6008  	// result: x
  6009  	for {
  6010  		if v.AuxInt != 0 {
  6011  			break
  6012  		}
  6013  		x := v.Args[0]
  6014  		v.reset(OpCopy)
  6015  		v.Type = x.Type
  6016  		v.AddArg(x)
  6017  		return true
  6018  	}
  6019  	// match: (XORconst [-1] x)
  6020  	// cond:
  6021  	// result: (NORconst [0] x)
  6022  	for {
  6023  		if v.AuxInt != -1 {
  6024  			break
  6025  		}
  6026  		x := v.Args[0]
  6027  		v.reset(OpMIPSNORconst)
  6028  		v.AuxInt = 0
  6029  		v.AddArg(x)
  6030  		return true
  6031  	}
  6032  	// match: (XORconst [c] (MOVWconst [d]))
  6033  	// cond:
  6034  	// result: (MOVWconst [c^d])
  6035  	for {
  6036  		c := v.AuxInt
  6037  		v_0 := v.Args[0]
  6038  		if v_0.Op != OpMIPSMOVWconst {
  6039  			break
  6040  		}
  6041  		d := v_0.AuxInt
  6042  		v.reset(OpMIPSMOVWconst)
  6043  		v.AuxInt = c ^ d
  6044  		return true
  6045  	}
  6046  	// match: (XORconst [c] (XORconst [d] x))
  6047  	// cond:
  6048  	// result: (XORconst [c^d] x)
  6049  	for {
  6050  		c := v.AuxInt
  6051  		v_0 := v.Args[0]
  6052  		if v_0.Op != OpMIPSXORconst {
  6053  			break
  6054  		}
  6055  		d := v_0.AuxInt
  6056  		x := v_0.Args[0]
  6057  		v.reset(OpMIPSXORconst)
  6058  		v.AuxInt = c ^ d
  6059  		v.AddArg(x)
  6060  		return true
  6061  	}
  6062  	return false
  6063  }
  6064  func rewriteValueMIPS_OpMod16(v *Value, config *Config) bool {
  6065  	b := v.Block
  6066  	_ = b
  6067  	// match: (Mod16 x y)
  6068  	// cond:
  6069  	// result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  6070  	for {
  6071  		x := v.Args[0]
  6072  		y := v.Args[1]
  6073  		v.reset(OpSelect0)
  6074  		v0 := b.NewValue0(v.Line, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  6075  		v1 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  6076  		v1.AddArg(x)
  6077  		v0.AddArg(v1)
  6078  		v2 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  6079  		v2.AddArg(y)
  6080  		v0.AddArg(v2)
  6081  		v.AddArg(v0)
  6082  		return true
  6083  	}
  6084  }
  6085  func rewriteValueMIPS_OpMod16u(v *Value, config *Config) bool {
  6086  	b := v.Block
  6087  	_ = b
  6088  	// match: (Mod16u x y)
  6089  	// cond:
  6090  	// result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  6091  	for {
  6092  		x := v.Args[0]
  6093  		y := v.Args[1]
  6094  		v.reset(OpSelect0)
  6095  		v0 := b.NewValue0(v.Line, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  6096  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  6097  		v1.AddArg(x)
  6098  		v0.AddArg(v1)
  6099  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  6100  		v2.AddArg(y)
  6101  		v0.AddArg(v2)
  6102  		v.AddArg(v0)
  6103  		return true
  6104  	}
  6105  }
  6106  func rewriteValueMIPS_OpMod32(v *Value, config *Config) bool {
  6107  	b := v.Block
  6108  	_ = b
  6109  	// match: (Mod32 x y)
  6110  	// cond:
  6111  	// result: (Select0 (DIV x y))
  6112  	for {
  6113  		x := v.Args[0]
  6114  		y := v.Args[1]
  6115  		v.reset(OpSelect0)
  6116  		v0 := b.NewValue0(v.Line, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  6117  		v0.AddArg(x)
  6118  		v0.AddArg(y)
  6119  		v.AddArg(v0)
  6120  		return true
  6121  	}
  6122  }
  6123  func rewriteValueMIPS_OpMod32u(v *Value, config *Config) bool {
  6124  	b := v.Block
  6125  	_ = b
  6126  	// match: (Mod32u x y)
  6127  	// cond:
  6128  	// result: (Select0 (DIVU x y))
  6129  	for {
  6130  		x := v.Args[0]
  6131  		y := v.Args[1]
  6132  		v.reset(OpSelect0)
  6133  		v0 := b.NewValue0(v.Line, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  6134  		v0.AddArg(x)
  6135  		v0.AddArg(y)
  6136  		v.AddArg(v0)
  6137  		return true
  6138  	}
  6139  }
  6140  func rewriteValueMIPS_OpMod8(v *Value, config *Config) bool {
  6141  	b := v.Block
  6142  	_ = b
  6143  	// match: (Mod8 x y)
  6144  	// cond:
  6145  	// result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  6146  	for {
  6147  		x := v.Args[0]
  6148  		y := v.Args[1]
  6149  		v.reset(OpSelect0)
  6150  		v0 := b.NewValue0(v.Line, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  6151  		v1 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  6152  		v1.AddArg(x)
  6153  		v0.AddArg(v1)
  6154  		v2 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
  6155  		v2.AddArg(y)
  6156  		v0.AddArg(v2)
  6157  		v.AddArg(v0)
  6158  		return true
  6159  	}
  6160  }
  6161  func rewriteValueMIPS_OpMod8u(v *Value, config *Config) bool {
  6162  	b := v.Block
  6163  	_ = b
  6164  	// match: (Mod8u x y)
  6165  	// cond:
  6166  	// result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  6167  	for {
  6168  		x := v.Args[0]
  6169  		y := v.Args[1]
  6170  		v.reset(OpSelect0)
  6171  		v0 := b.NewValue0(v.Line, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  6172  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  6173  		v1.AddArg(x)
  6174  		v0.AddArg(v1)
  6175  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  6176  		v2.AddArg(y)
  6177  		v0.AddArg(v2)
  6178  		v.AddArg(v0)
  6179  		return true
  6180  	}
  6181  }
  6182  func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
  6183  	b := v.Block
  6184  	_ = b
  6185  	// match: (Move [s] _ _ mem)
  6186  	// cond: SizeAndAlign(s).Size() == 0
  6187  	// result: mem
  6188  	for {
  6189  		s := v.AuxInt
  6190  		mem := v.Args[2]
  6191  		if !(SizeAndAlign(s).Size() == 0) {
  6192  			break
  6193  		}
  6194  		v.reset(OpCopy)
  6195  		v.Type = mem.Type
  6196  		v.AddArg(mem)
  6197  		return true
  6198  	}
  6199  	// match: (Move [s] dst src mem)
  6200  	// cond: SizeAndAlign(s).Size() == 1
  6201  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  6202  	for {
  6203  		s := v.AuxInt
  6204  		dst := v.Args[0]
  6205  		src := v.Args[1]
  6206  		mem := v.Args[2]
  6207  		if !(SizeAndAlign(s).Size() == 1) {
  6208  			break
  6209  		}
  6210  		v.reset(OpMIPSMOVBstore)
  6211  		v.AddArg(dst)
  6212  		v0 := b.NewValue0(v.Line, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6213  		v0.AddArg(src)
  6214  		v0.AddArg(mem)
  6215  		v.AddArg(v0)
  6216  		v.AddArg(mem)
  6217  		return true
  6218  	}
  6219  	// match: (Move [s] dst src mem)
  6220  	// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0
  6221  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  6222  	for {
  6223  		s := v.AuxInt
  6224  		dst := v.Args[0]
  6225  		src := v.Args[1]
  6226  		mem := v.Args[2]
  6227  		if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) {
  6228  			break
  6229  		}
  6230  		v.reset(OpMIPSMOVHstore)
  6231  		v.AddArg(dst)
  6232  		v0 := b.NewValue0(v.Line, OpMIPSMOVHUload, config.fe.TypeUInt16())
  6233  		v0.AddArg(src)
  6234  		v0.AddArg(mem)
  6235  		v.AddArg(v0)
  6236  		v.AddArg(mem)
  6237  		return true
  6238  	}
  6239  	// match: (Move [s] dst src mem)
  6240  	// cond: SizeAndAlign(s).Size() == 2
  6241  	// result: (MOVBstore [1] dst (MOVBUload [1] src mem) 		(MOVBstore dst (MOVBUload src mem) mem))
  6242  	for {
  6243  		s := v.AuxInt
  6244  		dst := v.Args[0]
  6245  		src := v.Args[1]
  6246  		mem := v.Args[2]
  6247  		if !(SizeAndAlign(s).Size() == 2) {
  6248  			break
  6249  		}
  6250  		v.reset(OpMIPSMOVBstore)
  6251  		v.AuxInt = 1
  6252  		v.AddArg(dst)
  6253  		v0 := b.NewValue0(v.Line, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6254  		v0.AuxInt = 1
  6255  		v0.AddArg(src)
  6256  		v0.AddArg(mem)
  6257  		v.AddArg(v0)
  6258  		v1 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  6259  		v1.AddArg(dst)
  6260  		v2 := b.NewValue0(v.Line, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6261  		v2.AddArg(src)
  6262  		v2.AddArg(mem)
  6263  		v1.AddArg(v2)
  6264  		v1.AddArg(mem)
  6265  		v.AddArg(v1)
  6266  		return true
  6267  	}
  6268  	// match: (Move [s] dst src mem)
  6269  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0
  6270  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6271  	for {
  6272  		s := v.AuxInt
  6273  		dst := v.Args[0]
  6274  		src := v.Args[1]
  6275  		mem := v.Args[2]
  6276  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) {
  6277  			break
  6278  		}
  6279  		v.reset(OpMIPSMOVWstore)
  6280  		v.AddArg(dst)
  6281  		v0 := b.NewValue0(v.Line, OpMIPSMOVWload, config.fe.TypeUInt32())
  6282  		v0.AddArg(src)
  6283  		v0.AddArg(mem)
  6284  		v.AddArg(v0)
  6285  		v.AddArg(mem)
  6286  		return true
  6287  	}
  6288  	// match: (Move [s] dst src mem)
  6289  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0
  6290  	// result: (MOVHstore [2] dst (MOVHUload [2] src mem) 		(MOVHstore dst (MOVHUload src mem) mem))
  6291  	for {
  6292  		s := v.AuxInt
  6293  		dst := v.Args[0]
  6294  		src := v.Args[1]
  6295  		mem := v.Args[2]
  6296  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) {
  6297  			break
  6298  		}
  6299  		v.reset(OpMIPSMOVHstore)
  6300  		v.AuxInt = 2
  6301  		v.AddArg(dst)
  6302  		v0 := b.NewValue0(v.Line, OpMIPSMOVHUload, config.fe.TypeUInt16())
  6303  		v0.AuxInt = 2
  6304  		v0.AddArg(src)
  6305  		v0.AddArg(mem)
  6306  		v.AddArg(v0)
  6307  		v1 := b.NewValue0(v.Line, OpMIPSMOVHstore, TypeMem)
  6308  		v1.AddArg(dst)
  6309  		v2 := b.NewValue0(v.Line, OpMIPSMOVHUload, config.fe.TypeUInt16())
  6310  		v2.AddArg(src)
  6311  		v2.AddArg(mem)
  6312  		v1.AddArg(v2)
  6313  		v1.AddArg(mem)
  6314  		v.AddArg(v1)
  6315  		return true
  6316  	}
  6317  	// match: (Move [s] dst src mem)
  6318  	// cond: SizeAndAlign(s).Size() == 4
  6319  	// result: (MOVBstore [3] dst (MOVBUload [3] src mem) 		(MOVBstore [2] dst (MOVBUload [2] src mem) 			(MOVBstore [1] dst (MOVBUload [1] src mem) 				(MOVBstore dst (MOVBUload src mem) mem))))
  6320  	for {
  6321  		s := v.AuxInt
  6322  		dst := v.Args[0]
  6323  		src := v.Args[1]
  6324  		mem := v.Args[2]
  6325  		if !(SizeAndAlign(s).Size() == 4) {
  6326  			break
  6327  		}
  6328  		v.reset(OpMIPSMOVBstore)
  6329  		v.AuxInt = 3
  6330  		v.AddArg(dst)
  6331  		v0 := b.NewValue0(v.Line, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6332  		v0.AuxInt = 3
  6333  		v0.AddArg(src)
  6334  		v0.AddArg(mem)
  6335  		v.AddArg(v0)
  6336  		v1 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  6337  		v1.AuxInt = 2
  6338  		v1.AddArg(dst)
  6339  		v2 := b.NewValue0(v.Line, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6340  		v2.AuxInt = 2
  6341  		v2.AddArg(src)
  6342  		v2.AddArg(mem)
  6343  		v1.AddArg(v2)
  6344  		v3 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  6345  		v3.AuxInt = 1
  6346  		v3.AddArg(dst)
  6347  		v4 := b.NewValue0(v.Line, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6348  		v4.AuxInt = 1
  6349  		v4.AddArg(src)
  6350  		v4.AddArg(mem)
  6351  		v3.AddArg(v4)
  6352  		v5 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  6353  		v5.AddArg(dst)
  6354  		v6 := b.NewValue0(v.Line, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6355  		v6.AddArg(src)
  6356  		v6.AddArg(mem)
  6357  		v5.AddArg(v6)
  6358  		v5.AddArg(mem)
  6359  		v3.AddArg(v5)
  6360  		v1.AddArg(v3)
  6361  		v.AddArg(v1)
  6362  		return true
  6363  	}
  6364  	// match: (Move [s] dst src mem)
  6365  	// cond: SizeAndAlign(s).Size() == 3
  6366  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) 		(MOVBstore [1] dst (MOVBUload [1] src mem) 			(MOVBstore dst (MOVBUload src mem) mem)))
  6367  	for {
  6368  		s := v.AuxInt
  6369  		dst := v.Args[0]
  6370  		src := v.Args[1]
  6371  		mem := v.Args[2]
  6372  		if !(SizeAndAlign(s).Size() == 3) {
  6373  			break
  6374  		}
  6375  		v.reset(OpMIPSMOVBstore)
  6376  		v.AuxInt = 2
  6377  		v.AddArg(dst)
  6378  		v0 := b.NewValue0(v.Line, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6379  		v0.AuxInt = 2
  6380  		v0.AddArg(src)
  6381  		v0.AddArg(mem)
  6382  		v.AddArg(v0)
  6383  		v1 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  6384  		v1.AuxInt = 1
  6385  		v1.AddArg(dst)
  6386  		v2 := b.NewValue0(v.Line, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6387  		v2.AuxInt = 1
  6388  		v2.AddArg(src)
  6389  		v2.AddArg(mem)
  6390  		v1.AddArg(v2)
  6391  		v3 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  6392  		v3.AddArg(dst)
  6393  		v4 := b.NewValue0(v.Line, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6394  		v4.AddArg(src)
  6395  		v4.AddArg(mem)
  6396  		v3.AddArg(v4)
  6397  		v3.AddArg(mem)
  6398  		v1.AddArg(v3)
  6399  		v.AddArg(v1)
  6400  		return true
  6401  	}
  6402  	// match: (Move [s] dst src mem)
  6403  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0
  6404  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) 		(MOVWstore dst (MOVWload src mem) mem))
  6405  	for {
  6406  		s := v.AuxInt
  6407  		dst := v.Args[0]
  6408  		src := v.Args[1]
  6409  		mem := v.Args[2]
  6410  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) {
  6411  			break
  6412  		}
  6413  		v.reset(OpMIPSMOVWstore)
  6414  		v.AuxInt = 4
  6415  		v.AddArg(dst)
  6416  		v0 := b.NewValue0(v.Line, OpMIPSMOVWload, config.fe.TypeUInt32())
  6417  		v0.AuxInt = 4
  6418  		v0.AddArg(src)
  6419  		v0.AddArg(mem)
  6420  		v.AddArg(v0)
  6421  		v1 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  6422  		v1.AddArg(dst)
  6423  		v2 := b.NewValue0(v.Line, OpMIPSMOVWload, config.fe.TypeUInt32())
  6424  		v2.AddArg(src)
  6425  		v2.AddArg(mem)
  6426  		v1.AddArg(v2)
  6427  		v1.AddArg(mem)
  6428  		v.AddArg(v1)
  6429  		return true
  6430  	}
  6431  	// match: (Move [s] dst src mem)
  6432  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0
  6433  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) 		(MOVHstore [4] dst (MOVHload [4] src mem) 			(MOVHstore [2] dst (MOVHload [2] src mem) 				(MOVHstore dst (MOVHload src mem) mem))))
  6434  	for {
  6435  		s := v.AuxInt
  6436  		dst := v.Args[0]
  6437  		src := v.Args[1]
  6438  		mem := v.Args[2]
  6439  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0) {
  6440  			break
  6441  		}
  6442  		v.reset(OpMIPSMOVHstore)
  6443  		v.AuxInt = 6
  6444  		v.AddArg(dst)
  6445  		v0 := b.NewValue0(v.Line, OpMIPSMOVHload, config.fe.TypeInt16())
  6446  		v0.AuxInt = 6
  6447  		v0.AddArg(src)
  6448  		v0.AddArg(mem)
  6449  		v.AddArg(v0)
  6450  		v1 := b.NewValue0(v.Line, OpMIPSMOVHstore, TypeMem)
  6451  		v1.AuxInt = 4
  6452  		v1.AddArg(dst)
  6453  		v2 := b.NewValue0(v.Line, OpMIPSMOVHload, config.fe.TypeInt16())
  6454  		v2.AuxInt = 4
  6455  		v2.AddArg(src)
  6456  		v2.AddArg(mem)
  6457  		v1.AddArg(v2)
  6458  		v3 := b.NewValue0(v.Line, OpMIPSMOVHstore, TypeMem)
  6459  		v3.AuxInt = 2
  6460  		v3.AddArg(dst)
  6461  		v4 := b.NewValue0(v.Line, OpMIPSMOVHload, config.fe.TypeInt16())
  6462  		v4.AuxInt = 2
  6463  		v4.AddArg(src)
  6464  		v4.AddArg(mem)
  6465  		v3.AddArg(v4)
  6466  		v5 := b.NewValue0(v.Line, OpMIPSMOVHstore, TypeMem)
  6467  		v5.AddArg(dst)
  6468  		v6 := b.NewValue0(v.Line, OpMIPSMOVHload, config.fe.TypeInt16())
  6469  		v6.AddArg(src)
  6470  		v6.AddArg(mem)
  6471  		v5.AddArg(v6)
  6472  		v5.AddArg(mem)
  6473  		v3.AddArg(v5)
  6474  		v1.AddArg(v3)
  6475  		v.AddArg(v1)
  6476  		return true
  6477  	}
  6478  	// match: (Move [s] dst src mem)
  6479  	// cond: SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0
  6480  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) 		(MOVHstore [2] dst (MOVHload [2] src mem) 			(MOVHstore dst (MOVHload src mem) mem)))
  6481  	for {
  6482  		s := v.AuxInt
  6483  		dst := v.Args[0]
  6484  		src := v.Args[1]
  6485  		mem := v.Args[2]
  6486  		if !(SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0) {
  6487  			break
  6488  		}
  6489  		v.reset(OpMIPSMOVHstore)
  6490  		v.AuxInt = 4
  6491  		v.AddArg(dst)
  6492  		v0 := b.NewValue0(v.Line, OpMIPSMOVHload, config.fe.TypeInt16())
  6493  		v0.AuxInt = 4
  6494  		v0.AddArg(src)
  6495  		v0.AddArg(mem)
  6496  		v.AddArg(v0)
  6497  		v1 := b.NewValue0(v.Line, OpMIPSMOVHstore, TypeMem)
  6498  		v1.AuxInt = 2
  6499  		v1.AddArg(dst)
  6500  		v2 := b.NewValue0(v.Line, OpMIPSMOVHload, config.fe.TypeInt16())
  6501  		v2.AuxInt = 2
  6502  		v2.AddArg(src)
  6503  		v2.AddArg(mem)
  6504  		v1.AddArg(v2)
  6505  		v3 := b.NewValue0(v.Line, OpMIPSMOVHstore, TypeMem)
  6506  		v3.AddArg(dst)
  6507  		v4 := b.NewValue0(v.Line, OpMIPSMOVHload, config.fe.TypeInt16())
  6508  		v4.AddArg(src)
  6509  		v4.AddArg(mem)
  6510  		v3.AddArg(v4)
  6511  		v3.AddArg(mem)
  6512  		v1.AddArg(v3)
  6513  		v.AddArg(v1)
  6514  		return true
  6515  	}
  6516  	// match: (Move [s] dst src mem)
  6517  	// cond: SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0
  6518  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) 		(MOVWstore [4] dst (MOVWload [4] src mem) 			(MOVWstore dst (MOVWload src mem) mem)))
  6519  	for {
  6520  		s := v.AuxInt
  6521  		dst := v.Args[0]
  6522  		src := v.Args[1]
  6523  		mem := v.Args[2]
  6524  		if !(SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0) {
  6525  			break
  6526  		}
  6527  		v.reset(OpMIPSMOVWstore)
  6528  		v.AuxInt = 8
  6529  		v.AddArg(dst)
  6530  		v0 := b.NewValue0(v.Line, OpMIPSMOVWload, config.fe.TypeUInt32())
  6531  		v0.AuxInt = 8
  6532  		v0.AddArg(src)
  6533  		v0.AddArg(mem)
  6534  		v.AddArg(v0)
  6535  		v1 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  6536  		v1.AuxInt = 4
  6537  		v1.AddArg(dst)
  6538  		v2 := b.NewValue0(v.Line, OpMIPSMOVWload, config.fe.TypeUInt32())
  6539  		v2.AuxInt = 4
  6540  		v2.AddArg(src)
  6541  		v2.AddArg(mem)
  6542  		v1.AddArg(v2)
  6543  		v3 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  6544  		v3.AddArg(dst)
  6545  		v4 := b.NewValue0(v.Line, OpMIPSMOVWload, config.fe.TypeUInt32())
  6546  		v4.AddArg(src)
  6547  		v4.AddArg(mem)
  6548  		v3.AddArg(v4)
  6549  		v3.AddArg(mem)
  6550  		v1.AddArg(v3)
  6551  		v.AddArg(v1)
  6552  		return true
  6553  	}
  6554  	// match: (Move [s] dst src mem)
  6555  	// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0
  6556  	// result: (MOVWstore [12] dst (MOVWload [12] src mem) 		(MOVWstore [8] dst (MOVWload [8] src mem) 			(MOVWstore [4] dst (MOVWload [4] src mem) 				(MOVWstore dst (MOVWload src mem) mem))))
  6557  	for {
  6558  		s := v.AuxInt
  6559  		dst := v.Args[0]
  6560  		src := v.Args[1]
  6561  		mem := v.Args[2]
  6562  		if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0) {
  6563  			break
  6564  		}
  6565  		v.reset(OpMIPSMOVWstore)
  6566  		v.AuxInt = 12
  6567  		v.AddArg(dst)
  6568  		v0 := b.NewValue0(v.Line, OpMIPSMOVWload, config.fe.TypeUInt32())
  6569  		v0.AuxInt = 12
  6570  		v0.AddArg(src)
  6571  		v0.AddArg(mem)
  6572  		v.AddArg(v0)
  6573  		v1 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  6574  		v1.AuxInt = 8
  6575  		v1.AddArg(dst)
  6576  		v2 := b.NewValue0(v.Line, OpMIPSMOVWload, config.fe.TypeUInt32())
  6577  		v2.AuxInt = 8
  6578  		v2.AddArg(src)
  6579  		v2.AddArg(mem)
  6580  		v1.AddArg(v2)
  6581  		v3 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  6582  		v3.AuxInt = 4
  6583  		v3.AddArg(dst)
  6584  		v4 := b.NewValue0(v.Line, OpMIPSMOVWload, config.fe.TypeUInt32())
  6585  		v4.AuxInt = 4
  6586  		v4.AddArg(src)
  6587  		v4.AddArg(mem)
  6588  		v3.AddArg(v4)
  6589  		v5 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  6590  		v5.AddArg(dst)
  6591  		v6 := b.NewValue0(v.Line, OpMIPSMOVWload, config.fe.TypeUInt32())
  6592  		v6.AddArg(src)
  6593  		v6.AddArg(mem)
  6594  		v5.AddArg(v6)
  6595  		v5.AddArg(mem)
  6596  		v3.AddArg(v5)
  6597  		v1.AddArg(v3)
  6598  		v.AddArg(v1)
  6599  		return true
  6600  	}
  6601  	// match: (Move [s] dst src mem)
  6602  	// cond: (SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0)
  6603  	// result: (LoweredMove [SizeAndAlign(s).Align()] 		dst 		src 		(ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) 		mem)
  6604  	for {
  6605  		s := v.AuxInt
  6606  		dst := v.Args[0]
  6607  		src := v.Args[1]
  6608  		mem := v.Args[2]
  6609  		if !(SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0) {
  6610  			break
  6611  		}
  6612  		v.reset(OpMIPSLoweredMove)
  6613  		v.AuxInt = SizeAndAlign(s).Align()
  6614  		v.AddArg(dst)
  6615  		v.AddArg(src)
  6616  		v0 := b.NewValue0(v.Line, OpMIPSADDconst, src.Type)
  6617  		v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config)
  6618  		v0.AddArg(src)
  6619  		v.AddArg(v0)
  6620  		v.AddArg(mem)
  6621  		return true
  6622  	}
  6623  	return false
  6624  }
  6625  func rewriteValueMIPS_OpMul16(v *Value, config *Config) bool {
  6626  	b := v.Block
  6627  	_ = b
  6628  	// match: (Mul16 x y)
  6629  	// cond:
  6630  	// result: (MUL x y)
  6631  	for {
  6632  		x := v.Args[0]
  6633  		y := v.Args[1]
  6634  		v.reset(OpMIPSMUL)
  6635  		v.AddArg(x)
  6636  		v.AddArg(y)
  6637  		return true
  6638  	}
  6639  }
  6640  func rewriteValueMIPS_OpMul32(v *Value, config *Config) bool {
  6641  	b := v.Block
  6642  	_ = b
  6643  	// match: (Mul32 x y)
  6644  	// cond:
  6645  	// result: (MUL x y)
  6646  	for {
  6647  		x := v.Args[0]
  6648  		y := v.Args[1]
  6649  		v.reset(OpMIPSMUL)
  6650  		v.AddArg(x)
  6651  		v.AddArg(y)
  6652  		return true
  6653  	}
  6654  }
  6655  func rewriteValueMIPS_OpMul32F(v *Value, config *Config) bool {
  6656  	b := v.Block
  6657  	_ = b
  6658  	// match: (Mul32F x y)
  6659  	// cond:
  6660  	// result: (MULF x y)
  6661  	for {
  6662  		x := v.Args[0]
  6663  		y := v.Args[1]
  6664  		v.reset(OpMIPSMULF)
  6665  		v.AddArg(x)
  6666  		v.AddArg(y)
  6667  		return true
  6668  	}
  6669  }
  6670  func rewriteValueMIPS_OpMul32uhilo(v *Value, config *Config) bool {
  6671  	b := v.Block
  6672  	_ = b
  6673  	// match: (Mul32uhilo x y)
  6674  	// cond:
  6675  	// result: (MULTU x y)
  6676  	for {
  6677  		x := v.Args[0]
  6678  		y := v.Args[1]
  6679  		v.reset(OpMIPSMULTU)
  6680  		v.AddArg(x)
  6681  		v.AddArg(y)
  6682  		return true
  6683  	}
  6684  }
  6685  func rewriteValueMIPS_OpMul64F(v *Value, config *Config) bool {
  6686  	b := v.Block
  6687  	_ = b
  6688  	// match: (Mul64F x y)
  6689  	// cond:
  6690  	// result: (MULD x y)
  6691  	for {
  6692  		x := v.Args[0]
  6693  		y := v.Args[1]
  6694  		v.reset(OpMIPSMULD)
  6695  		v.AddArg(x)
  6696  		v.AddArg(y)
  6697  		return true
  6698  	}
  6699  }
  6700  func rewriteValueMIPS_OpMul8(v *Value, config *Config) bool {
  6701  	b := v.Block
  6702  	_ = b
  6703  	// match: (Mul8 x y)
  6704  	// cond:
  6705  	// result: (MUL x y)
  6706  	for {
  6707  		x := v.Args[0]
  6708  		y := v.Args[1]
  6709  		v.reset(OpMIPSMUL)
  6710  		v.AddArg(x)
  6711  		v.AddArg(y)
  6712  		return true
  6713  	}
  6714  }
  6715  func rewriteValueMIPS_OpNeg16(v *Value, config *Config) bool {
  6716  	b := v.Block
  6717  	_ = b
  6718  	// match: (Neg16 x)
  6719  	// cond:
  6720  	// result: (NEG x)
  6721  	for {
  6722  		x := v.Args[0]
  6723  		v.reset(OpMIPSNEG)
  6724  		v.AddArg(x)
  6725  		return true
  6726  	}
  6727  }
  6728  func rewriteValueMIPS_OpNeg32(v *Value, config *Config) bool {
  6729  	b := v.Block
  6730  	_ = b
  6731  	// match: (Neg32 x)
  6732  	// cond:
  6733  	// result: (NEG x)
  6734  	for {
  6735  		x := v.Args[0]
  6736  		v.reset(OpMIPSNEG)
  6737  		v.AddArg(x)
  6738  		return true
  6739  	}
  6740  }
  6741  func rewriteValueMIPS_OpNeg32F(v *Value, config *Config) bool {
  6742  	b := v.Block
  6743  	_ = b
  6744  	// match: (Neg32F x)
  6745  	// cond:
  6746  	// result: (NEGF x)
  6747  	for {
  6748  		x := v.Args[0]
  6749  		v.reset(OpMIPSNEGF)
  6750  		v.AddArg(x)
  6751  		return true
  6752  	}
  6753  }
  6754  func rewriteValueMIPS_OpNeg64F(v *Value, config *Config) bool {
  6755  	b := v.Block
  6756  	_ = b
  6757  	// match: (Neg64F x)
  6758  	// cond:
  6759  	// result: (NEGD x)
  6760  	for {
  6761  		x := v.Args[0]
  6762  		v.reset(OpMIPSNEGD)
  6763  		v.AddArg(x)
  6764  		return true
  6765  	}
  6766  }
  6767  func rewriteValueMIPS_OpNeg8(v *Value, config *Config) bool {
  6768  	b := v.Block
  6769  	_ = b
  6770  	// match: (Neg8 x)
  6771  	// cond:
  6772  	// result: (NEG x)
  6773  	for {
  6774  		x := v.Args[0]
  6775  		v.reset(OpMIPSNEG)
  6776  		v.AddArg(x)
  6777  		return true
  6778  	}
  6779  }
  6780  func rewriteValueMIPS_OpNeq16(v *Value, config *Config) bool {
  6781  	b := v.Block
  6782  	_ = b
  6783  	// match: (Neq16 x y)
  6784  	// cond:
  6785  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0]))
  6786  	for {
  6787  		x := v.Args[0]
  6788  		y := v.Args[1]
  6789  		v.reset(OpMIPSSGTU)
  6790  		v0 := b.NewValue0(v.Line, OpMIPSXOR, config.fe.TypeUInt32())
  6791  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  6792  		v1.AddArg(x)
  6793  		v0.AddArg(v1)
  6794  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  6795  		v2.AddArg(y)
  6796  		v0.AddArg(v2)
  6797  		v.AddArg(v0)
  6798  		v3 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  6799  		v3.AuxInt = 0
  6800  		v.AddArg(v3)
  6801  		return true
  6802  	}
  6803  }
  6804  func rewriteValueMIPS_OpNeq32(v *Value, config *Config) bool {
  6805  	b := v.Block
  6806  	_ = b
  6807  	// match: (Neq32 x y)
  6808  	// cond:
  6809  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  6810  	for {
  6811  		x := v.Args[0]
  6812  		y := v.Args[1]
  6813  		v.reset(OpMIPSSGTU)
  6814  		v0 := b.NewValue0(v.Line, OpMIPSXOR, config.fe.TypeUInt32())
  6815  		v0.AddArg(x)
  6816  		v0.AddArg(y)
  6817  		v.AddArg(v0)
  6818  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  6819  		v1.AuxInt = 0
  6820  		v.AddArg(v1)
  6821  		return true
  6822  	}
  6823  }
  6824  func rewriteValueMIPS_OpNeq32F(v *Value, config *Config) bool {
  6825  	b := v.Block
  6826  	_ = b
  6827  	// match: (Neq32F x y)
  6828  	// cond:
  6829  	// result: (FPFlagFalse (CMPEQF x y))
  6830  	for {
  6831  		x := v.Args[0]
  6832  		y := v.Args[1]
  6833  		v.reset(OpMIPSFPFlagFalse)
  6834  		v0 := b.NewValue0(v.Line, OpMIPSCMPEQF, TypeFlags)
  6835  		v0.AddArg(x)
  6836  		v0.AddArg(y)
  6837  		v.AddArg(v0)
  6838  		return true
  6839  	}
  6840  }
  6841  func rewriteValueMIPS_OpNeq64F(v *Value, config *Config) bool {
  6842  	b := v.Block
  6843  	_ = b
  6844  	// match: (Neq64F x y)
  6845  	// cond:
  6846  	// result: (FPFlagFalse (CMPEQD x y))
  6847  	for {
  6848  		x := v.Args[0]
  6849  		y := v.Args[1]
  6850  		v.reset(OpMIPSFPFlagFalse)
  6851  		v0 := b.NewValue0(v.Line, OpMIPSCMPEQD, TypeFlags)
  6852  		v0.AddArg(x)
  6853  		v0.AddArg(y)
  6854  		v.AddArg(v0)
  6855  		return true
  6856  	}
  6857  }
  6858  func rewriteValueMIPS_OpNeq8(v *Value, config *Config) bool {
  6859  	b := v.Block
  6860  	_ = b
  6861  	// match: (Neq8 x y)
  6862  	// cond:
  6863  	// result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0]))
  6864  	for {
  6865  		x := v.Args[0]
  6866  		y := v.Args[1]
  6867  		v.reset(OpMIPSSGTU)
  6868  		v0 := b.NewValue0(v.Line, OpMIPSXOR, config.fe.TypeUInt32())
  6869  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  6870  		v1.AddArg(x)
  6871  		v0.AddArg(v1)
  6872  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  6873  		v2.AddArg(y)
  6874  		v0.AddArg(v2)
  6875  		v.AddArg(v0)
  6876  		v3 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  6877  		v3.AuxInt = 0
  6878  		v.AddArg(v3)
  6879  		return true
  6880  	}
  6881  }
  6882  func rewriteValueMIPS_OpNeqB(v *Value, config *Config) bool {
  6883  	b := v.Block
  6884  	_ = b
  6885  	// match: (NeqB x y)
  6886  	// cond:
  6887  	// result: (XOR x y)
  6888  	for {
  6889  		x := v.Args[0]
  6890  		y := v.Args[1]
  6891  		v.reset(OpMIPSXOR)
  6892  		v.AddArg(x)
  6893  		v.AddArg(y)
  6894  		return true
  6895  	}
  6896  }
  6897  func rewriteValueMIPS_OpNeqPtr(v *Value, config *Config) bool {
  6898  	b := v.Block
  6899  	_ = b
  6900  	// match: (NeqPtr x y)
  6901  	// cond:
  6902  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  6903  	for {
  6904  		x := v.Args[0]
  6905  		y := v.Args[1]
  6906  		v.reset(OpMIPSSGTU)
  6907  		v0 := b.NewValue0(v.Line, OpMIPSXOR, config.fe.TypeUInt32())
  6908  		v0.AddArg(x)
  6909  		v0.AddArg(y)
  6910  		v.AddArg(v0)
  6911  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  6912  		v1.AuxInt = 0
  6913  		v.AddArg(v1)
  6914  		return true
  6915  	}
  6916  }
  6917  func rewriteValueMIPS_OpNilCheck(v *Value, config *Config) bool {
  6918  	b := v.Block
  6919  	_ = b
  6920  	// match: (NilCheck ptr mem)
  6921  	// cond:
  6922  	// result: (LoweredNilCheck ptr mem)
  6923  	for {
  6924  		ptr := v.Args[0]
  6925  		mem := v.Args[1]
  6926  		v.reset(OpMIPSLoweredNilCheck)
  6927  		v.AddArg(ptr)
  6928  		v.AddArg(mem)
  6929  		return true
  6930  	}
  6931  }
  6932  func rewriteValueMIPS_OpNot(v *Value, config *Config) bool {
  6933  	b := v.Block
  6934  	_ = b
  6935  	// match: (Not x)
  6936  	// cond:
  6937  	// result: (XORconst [1] x)
  6938  	for {
  6939  		x := v.Args[0]
  6940  		v.reset(OpMIPSXORconst)
  6941  		v.AuxInt = 1
  6942  		v.AddArg(x)
  6943  		return true
  6944  	}
  6945  }
  6946  func rewriteValueMIPS_OpOffPtr(v *Value, config *Config) bool {
  6947  	b := v.Block
  6948  	_ = b
  6949  	// match: (OffPtr [off] ptr:(SP))
  6950  	// cond:
  6951  	// result: (MOVWaddr [off] ptr)
  6952  	for {
  6953  		off := v.AuxInt
  6954  		ptr := v.Args[0]
  6955  		if ptr.Op != OpSP {
  6956  			break
  6957  		}
  6958  		v.reset(OpMIPSMOVWaddr)
  6959  		v.AuxInt = off
  6960  		v.AddArg(ptr)
  6961  		return true
  6962  	}
  6963  	// match: (OffPtr [off] ptr)
  6964  	// cond:
  6965  	// result: (ADDconst [off] ptr)
  6966  	for {
  6967  		off := v.AuxInt
  6968  		ptr := v.Args[0]
  6969  		v.reset(OpMIPSADDconst)
  6970  		v.AuxInt = off
  6971  		v.AddArg(ptr)
  6972  		return true
  6973  	}
  6974  }
  6975  func rewriteValueMIPS_OpOr16(v *Value, config *Config) bool {
  6976  	b := v.Block
  6977  	_ = b
  6978  	// match: (Or16 x y)
  6979  	// cond:
  6980  	// result: (OR x y)
  6981  	for {
  6982  		x := v.Args[0]
  6983  		y := v.Args[1]
  6984  		v.reset(OpMIPSOR)
  6985  		v.AddArg(x)
  6986  		v.AddArg(y)
  6987  		return true
  6988  	}
  6989  }
  6990  func rewriteValueMIPS_OpOr32(v *Value, config *Config) bool {
  6991  	b := v.Block
  6992  	_ = b
  6993  	// match: (Or32 x y)
  6994  	// cond:
  6995  	// result: (OR x y)
  6996  	for {
  6997  		x := v.Args[0]
  6998  		y := v.Args[1]
  6999  		v.reset(OpMIPSOR)
  7000  		v.AddArg(x)
  7001  		v.AddArg(y)
  7002  		return true
  7003  	}
  7004  }
  7005  func rewriteValueMIPS_OpOr8(v *Value, config *Config) bool {
  7006  	b := v.Block
  7007  	_ = b
  7008  	// match: (Or8 x y)
  7009  	// cond:
  7010  	// result: (OR x y)
  7011  	for {
  7012  		x := v.Args[0]
  7013  		y := v.Args[1]
  7014  		v.reset(OpMIPSOR)
  7015  		v.AddArg(x)
  7016  		v.AddArg(y)
  7017  		return true
  7018  	}
  7019  }
  7020  func rewriteValueMIPS_OpOrB(v *Value, config *Config) bool {
  7021  	b := v.Block
  7022  	_ = b
  7023  	// match: (OrB x y)
  7024  	// cond:
  7025  	// result: (OR x y)
  7026  	for {
  7027  		x := v.Args[0]
  7028  		y := v.Args[1]
  7029  		v.reset(OpMIPSOR)
  7030  		v.AddArg(x)
  7031  		v.AddArg(y)
  7032  		return true
  7033  	}
  7034  }
  7035  func rewriteValueMIPS_OpRsh16Ux16(v *Value, config *Config) bool {
  7036  	b := v.Block
  7037  	_ = b
  7038  	// match: (Rsh16Ux16 <t> x y)
  7039  	// cond:
  7040  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7041  	for {
  7042  		t := v.Type
  7043  		x := v.Args[0]
  7044  		y := v.Args[1]
  7045  		v.reset(OpMIPSCMOVZ)
  7046  		v0 := b.NewValue0(v.Line, OpMIPSSRL, t)
  7047  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7048  		v1.AddArg(x)
  7049  		v0.AddArg(v1)
  7050  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7051  		v2.AddArg(y)
  7052  		v0.AddArg(v2)
  7053  		v.AddArg(v0)
  7054  		v3 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7055  		v3.AuxInt = 0
  7056  		v.AddArg(v3)
  7057  		v4 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7058  		v4.AuxInt = 32
  7059  		v5 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7060  		v5.AddArg(y)
  7061  		v4.AddArg(v5)
  7062  		v.AddArg(v4)
  7063  		return true
  7064  	}
  7065  }
  7066  func rewriteValueMIPS_OpRsh16Ux32(v *Value, config *Config) bool {
  7067  	b := v.Block
  7068  	_ = b
  7069  	// match: (Rsh16Ux32 <t> x y)
  7070  	// cond:
  7071  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7072  	for {
  7073  		t := v.Type
  7074  		x := v.Args[0]
  7075  		y := v.Args[1]
  7076  		v.reset(OpMIPSCMOVZ)
  7077  		v0 := b.NewValue0(v.Line, OpMIPSSRL, t)
  7078  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7079  		v1.AddArg(x)
  7080  		v0.AddArg(v1)
  7081  		v0.AddArg(y)
  7082  		v.AddArg(v0)
  7083  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7084  		v2.AuxInt = 0
  7085  		v.AddArg(v2)
  7086  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7087  		v3.AuxInt = 32
  7088  		v3.AddArg(y)
  7089  		v.AddArg(v3)
  7090  		return true
  7091  	}
  7092  }
  7093  func rewriteValueMIPS_OpRsh16Ux64(v *Value, config *Config) bool {
  7094  	b := v.Block
  7095  	_ = b
  7096  	// match: (Rsh16Ux64 x (Const64 [c]))
  7097  	// cond: uint32(c) < 16
  7098  	// result: (SRLconst (SLLconst <config.fe.TypeUInt32()> x [16]) [c+16])
  7099  	for {
  7100  		x := v.Args[0]
  7101  		v_1 := v.Args[1]
  7102  		if v_1.Op != OpConst64 {
  7103  			break
  7104  		}
  7105  		c := v_1.AuxInt
  7106  		if !(uint32(c) < 16) {
  7107  			break
  7108  		}
  7109  		v.reset(OpMIPSSRLconst)
  7110  		v.AuxInt = c + 16
  7111  		v0 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
  7112  		v0.AuxInt = 16
  7113  		v0.AddArg(x)
  7114  		v.AddArg(v0)
  7115  		return true
  7116  	}
  7117  	// match: (Rsh16Ux64 _ (Const64 [c]))
  7118  	// cond: uint32(c) >= 16
  7119  	// result: (MOVWconst [0])
  7120  	for {
  7121  		v_1 := v.Args[1]
  7122  		if v_1.Op != OpConst64 {
  7123  			break
  7124  		}
  7125  		c := v_1.AuxInt
  7126  		if !(uint32(c) >= 16) {
  7127  			break
  7128  		}
  7129  		v.reset(OpMIPSMOVWconst)
  7130  		v.AuxInt = 0
  7131  		return true
  7132  	}
  7133  	return false
  7134  }
  7135  func rewriteValueMIPS_OpRsh16Ux8(v *Value, config *Config) bool {
  7136  	b := v.Block
  7137  	_ = b
  7138  	// match: (Rsh16Ux8 <t> x y)
  7139  	// cond:
  7140  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7141  	for {
  7142  		t := v.Type
  7143  		x := v.Args[0]
  7144  		y := v.Args[1]
  7145  		v.reset(OpMIPSCMOVZ)
  7146  		v0 := b.NewValue0(v.Line, OpMIPSSRL, t)
  7147  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7148  		v1.AddArg(x)
  7149  		v0.AddArg(v1)
  7150  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7151  		v2.AddArg(y)
  7152  		v0.AddArg(v2)
  7153  		v.AddArg(v0)
  7154  		v3 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7155  		v3.AuxInt = 0
  7156  		v.AddArg(v3)
  7157  		v4 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7158  		v4.AuxInt = 32
  7159  		v5 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7160  		v5.AddArg(y)
  7161  		v4.AddArg(v5)
  7162  		v.AddArg(v4)
  7163  		return true
  7164  	}
  7165  }
  7166  func rewriteValueMIPS_OpRsh16x16(v *Value, config *Config) bool {
  7167  	b := v.Block
  7168  	_ = b
  7169  	// match: (Rsh16x16 x y)
  7170  	// cond:
  7171  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7172  	for {
  7173  		x := v.Args[0]
  7174  		y := v.Args[1]
  7175  		v.reset(OpMIPSSRA)
  7176  		v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  7177  		v0.AddArg(x)
  7178  		v.AddArg(v0)
  7179  		v1 := b.NewValue0(v.Line, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7180  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7181  		v2.AddArg(y)
  7182  		v1.AddArg(v2)
  7183  		v3 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7184  		v3.AuxInt = -1
  7185  		v1.AddArg(v3)
  7186  		v4 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7187  		v4.AuxInt = 32
  7188  		v5 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7189  		v5.AddArg(y)
  7190  		v4.AddArg(v5)
  7191  		v1.AddArg(v4)
  7192  		v.AddArg(v1)
  7193  		return true
  7194  	}
  7195  }
  7196  func rewriteValueMIPS_OpRsh16x32(v *Value, config *Config) bool {
  7197  	b := v.Block
  7198  	_ = b
  7199  	// match: (Rsh16x32 x y)
  7200  	// cond:
  7201  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7202  	for {
  7203  		x := v.Args[0]
  7204  		y := v.Args[1]
  7205  		v.reset(OpMIPSSRA)
  7206  		v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  7207  		v0.AddArg(x)
  7208  		v.AddArg(v0)
  7209  		v1 := b.NewValue0(v.Line, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7210  		v1.AddArg(y)
  7211  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7212  		v2.AuxInt = -1
  7213  		v1.AddArg(v2)
  7214  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7215  		v3.AuxInt = 32
  7216  		v3.AddArg(y)
  7217  		v1.AddArg(v3)
  7218  		v.AddArg(v1)
  7219  		return true
  7220  	}
  7221  }
  7222  func rewriteValueMIPS_OpRsh16x64(v *Value, config *Config) bool {
  7223  	b := v.Block
  7224  	_ = b
  7225  	// match: (Rsh16x64 x (Const64 [c]))
  7226  	// cond: uint32(c) < 16
  7227  	// result: (SRAconst (SLLconst <config.fe.TypeUInt32()> x [16]) [c+16])
  7228  	for {
  7229  		x := v.Args[0]
  7230  		v_1 := v.Args[1]
  7231  		if v_1.Op != OpConst64 {
  7232  			break
  7233  		}
  7234  		c := v_1.AuxInt
  7235  		if !(uint32(c) < 16) {
  7236  			break
  7237  		}
  7238  		v.reset(OpMIPSSRAconst)
  7239  		v.AuxInt = c + 16
  7240  		v0 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
  7241  		v0.AuxInt = 16
  7242  		v0.AddArg(x)
  7243  		v.AddArg(v0)
  7244  		return true
  7245  	}
  7246  	// match: (Rsh16x64 x (Const64 [c]))
  7247  	// cond: uint32(c) >= 16
  7248  	// result: (SRAconst (SLLconst <config.fe.TypeUInt32()> x [16]) [31])
  7249  	for {
  7250  		x := v.Args[0]
  7251  		v_1 := v.Args[1]
  7252  		if v_1.Op != OpConst64 {
  7253  			break
  7254  		}
  7255  		c := v_1.AuxInt
  7256  		if !(uint32(c) >= 16) {
  7257  			break
  7258  		}
  7259  		v.reset(OpMIPSSRAconst)
  7260  		v.AuxInt = 31
  7261  		v0 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
  7262  		v0.AuxInt = 16
  7263  		v0.AddArg(x)
  7264  		v.AddArg(v0)
  7265  		return true
  7266  	}
  7267  	return false
  7268  }
  7269  func rewriteValueMIPS_OpRsh16x8(v *Value, config *Config) bool {
  7270  	b := v.Block
  7271  	_ = b
  7272  	// match: (Rsh16x8 x y)
  7273  	// cond:
  7274  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7275  	for {
  7276  		x := v.Args[0]
  7277  		y := v.Args[1]
  7278  		v.reset(OpMIPSSRA)
  7279  		v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  7280  		v0.AddArg(x)
  7281  		v.AddArg(v0)
  7282  		v1 := b.NewValue0(v.Line, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7283  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7284  		v2.AddArg(y)
  7285  		v1.AddArg(v2)
  7286  		v3 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7287  		v3.AuxInt = -1
  7288  		v1.AddArg(v3)
  7289  		v4 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7290  		v4.AuxInt = 32
  7291  		v5 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7292  		v5.AddArg(y)
  7293  		v4.AddArg(v5)
  7294  		v1.AddArg(v4)
  7295  		v.AddArg(v1)
  7296  		return true
  7297  	}
  7298  }
  7299  func rewriteValueMIPS_OpRsh32Ux16(v *Value, config *Config) bool {
  7300  	b := v.Block
  7301  	_ = b
  7302  	// match: (Rsh32Ux16 <t> x y)
  7303  	// cond:
  7304  	// result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7305  	for {
  7306  		t := v.Type
  7307  		x := v.Args[0]
  7308  		y := v.Args[1]
  7309  		v.reset(OpMIPSCMOVZ)
  7310  		v0 := b.NewValue0(v.Line, OpMIPSSRL, t)
  7311  		v0.AddArg(x)
  7312  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7313  		v1.AddArg(y)
  7314  		v0.AddArg(v1)
  7315  		v.AddArg(v0)
  7316  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7317  		v2.AuxInt = 0
  7318  		v.AddArg(v2)
  7319  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7320  		v3.AuxInt = 32
  7321  		v4 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7322  		v4.AddArg(y)
  7323  		v3.AddArg(v4)
  7324  		v.AddArg(v3)
  7325  		return true
  7326  	}
  7327  }
  7328  func rewriteValueMIPS_OpRsh32Ux32(v *Value, config *Config) bool {
  7329  	b := v.Block
  7330  	_ = b
  7331  	// match: (Rsh32Ux32 <t> x y)
  7332  	// cond:
  7333  	// result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  7334  	for {
  7335  		t := v.Type
  7336  		x := v.Args[0]
  7337  		y := v.Args[1]
  7338  		v.reset(OpMIPSCMOVZ)
  7339  		v0 := b.NewValue0(v.Line, OpMIPSSRL, t)
  7340  		v0.AddArg(x)
  7341  		v0.AddArg(y)
  7342  		v.AddArg(v0)
  7343  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7344  		v1.AuxInt = 0
  7345  		v.AddArg(v1)
  7346  		v2 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7347  		v2.AuxInt = 32
  7348  		v2.AddArg(y)
  7349  		v.AddArg(v2)
  7350  		return true
  7351  	}
  7352  }
  7353  func rewriteValueMIPS_OpRsh32Ux64(v *Value, config *Config) bool {
  7354  	b := v.Block
  7355  	_ = b
  7356  	// match: (Rsh32Ux64 x (Const64 [c]))
  7357  	// cond: uint32(c) < 32
  7358  	// result: (SRLconst x [c])
  7359  	for {
  7360  		x := v.Args[0]
  7361  		v_1 := v.Args[1]
  7362  		if v_1.Op != OpConst64 {
  7363  			break
  7364  		}
  7365  		c := v_1.AuxInt
  7366  		if !(uint32(c) < 32) {
  7367  			break
  7368  		}
  7369  		v.reset(OpMIPSSRLconst)
  7370  		v.AuxInt = c
  7371  		v.AddArg(x)
  7372  		return true
  7373  	}
  7374  	// match: (Rsh32Ux64 _ (Const64 [c]))
  7375  	// cond: uint32(c) >= 32
  7376  	// result: (MOVWconst [0])
  7377  	for {
  7378  		v_1 := v.Args[1]
  7379  		if v_1.Op != OpConst64 {
  7380  			break
  7381  		}
  7382  		c := v_1.AuxInt
  7383  		if !(uint32(c) >= 32) {
  7384  			break
  7385  		}
  7386  		v.reset(OpMIPSMOVWconst)
  7387  		v.AuxInt = 0
  7388  		return true
  7389  	}
  7390  	return false
  7391  }
  7392  func rewriteValueMIPS_OpRsh32Ux8(v *Value, config *Config) bool {
  7393  	b := v.Block
  7394  	_ = b
  7395  	// match: (Rsh32Ux8 <t> x y)
  7396  	// cond:
  7397  	// result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7398  	for {
  7399  		t := v.Type
  7400  		x := v.Args[0]
  7401  		y := v.Args[1]
  7402  		v.reset(OpMIPSCMOVZ)
  7403  		v0 := b.NewValue0(v.Line, OpMIPSSRL, t)
  7404  		v0.AddArg(x)
  7405  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7406  		v1.AddArg(y)
  7407  		v0.AddArg(v1)
  7408  		v.AddArg(v0)
  7409  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7410  		v2.AuxInt = 0
  7411  		v.AddArg(v2)
  7412  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7413  		v3.AuxInt = 32
  7414  		v4 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7415  		v4.AddArg(y)
  7416  		v3.AddArg(v4)
  7417  		v.AddArg(v3)
  7418  		return true
  7419  	}
  7420  }
  7421  func rewriteValueMIPS_OpRsh32x16(v *Value, config *Config) bool {
  7422  	b := v.Block
  7423  	_ = b
  7424  	// match: (Rsh32x16 x y)
  7425  	// cond:
  7426  	// result: (SRA x ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7427  	for {
  7428  		x := v.Args[0]
  7429  		y := v.Args[1]
  7430  		v.reset(OpMIPSSRA)
  7431  		v.AddArg(x)
  7432  		v0 := b.NewValue0(v.Line, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7433  		v1 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7434  		v1.AddArg(y)
  7435  		v0.AddArg(v1)
  7436  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7437  		v2.AuxInt = -1
  7438  		v0.AddArg(v2)
  7439  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7440  		v3.AuxInt = 32
  7441  		v4 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7442  		v4.AddArg(y)
  7443  		v3.AddArg(v4)
  7444  		v0.AddArg(v3)
  7445  		v.AddArg(v0)
  7446  		return true
  7447  	}
  7448  }
  7449  func rewriteValueMIPS_OpRsh32x32(v *Value, config *Config) bool {
  7450  	b := v.Block
  7451  	_ = b
  7452  	// match: (Rsh32x32 x y)
  7453  	// cond:
  7454  	// result: (SRA x ( CMOVZ <config.fe.TypeUInt32()> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7455  	for {
  7456  		x := v.Args[0]
  7457  		y := v.Args[1]
  7458  		v.reset(OpMIPSSRA)
  7459  		v.AddArg(x)
  7460  		v0 := b.NewValue0(v.Line, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7461  		v0.AddArg(y)
  7462  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7463  		v1.AuxInt = -1
  7464  		v0.AddArg(v1)
  7465  		v2 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7466  		v2.AuxInt = 32
  7467  		v2.AddArg(y)
  7468  		v0.AddArg(v2)
  7469  		v.AddArg(v0)
  7470  		return true
  7471  	}
  7472  }
  7473  func rewriteValueMIPS_OpRsh32x64(v *Value, config *Config) bool {
  7474  	b := v.Block
  7475  	_ = b
  7476  	// match: (Rsh32x64 x (Const64 [c]))
  7477  	// cond: uint32(c) < 32
  7478  	// result: (SRAconst x [c])
  7479  	for {
  7480  		x := v.Args[0]
  7481  		v_1 := v.Args[1]
  7482  		if v_1.Op != OpConst64 {
  7483  			break
  7484  		}
  7485  		c := v_1.AuxInt
  7486  		if !(uint32(c) < 32) {
  7487  			break
  7488  		}
  7489  		v.reset(OpMIPSSRAconst)
  7490  		v.AuxInt = c
  7491  		v.AddArg(x)
  7492  		return true
  7493  	}
  7494  	// match: (Rsh32x64 x (Const64 [c]))
  7495  	// cond: uint32(c) >= 32
  7496  	// result: (SRAconst x [31])
  7497  	for {
  7498  		x := v.Args[0]
  7499  		v_1 := v.Args[1]
  7500  		if v_1.Op != OpConst64 {
  7501  			break
  7502  		}
  7503  		c := v_1.AuxInt
  7504  		if !(uint32(c) >= 32) {
  7505  			break
  7506  		}
  7507  		v.reset(OpMIPSSRAconst)
  7508  		v.AuxInt = 31
  7509  		v.AddArg(x)
  7510  		return true
  7511  	}
  7512  	return false
  7513  }
  7514  func rewriteValueMIPS_OpRsh32x8(v *Value, config *Config) bool {
  7515  	b := v.Block
  7516  	_ = b
  7517  	// match: (Rsh32x8 x y)
  7518  	// cond:
  7519  	// result: (SRA x ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7520  	for {
  7521  		x := v.Args[0]
  7522  		y := v.Args[1]
  7523  		v.reset(OpMIPSSRA)
  7524  		v.AddArg(x)
  7525  		v0 := b.NewValue0(v.Line, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7526  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7527  		v1.AddArg(y)
  7528  		v0.AddArg(v1)
  7529  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7530  		v2.AuxInt = -1
  7531  		v0.AddArg(v2)
  7532  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7533  		v3.AuxInt = 32
  7534  		v4 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7535  		v4.AddArg(y)
  7536  		v3.AddArg(v4)
  7537  		v0.AddArg(v3)
  7538  		v.AddArg(v0)
  7539  		return true
  7540  	}
  7541  }
  7542  func rewriteValueMIPS_OpRsh8Ux16(v *Value, config *Config) bool {
  7543  	b := v.Block
  7544  	_ = b
  7545  	// match: (Rsh8Ux16 <t> x y)
  7546  	// cond:
  7547  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7548  	for {
  7549  		t := v.Type
  7550  		x := v.Args[0]
  7551  		y := v.Args[1]
  7552  		v.reset(OpMIPSCMOVZ)
  7553  		v0 := b.NewValue0(v.Line, OpMIPSSRL, t)
  7554  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7555  		v1.AddArg(x)
  7556  		v0.AddArg(v1)
  7557  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7558  		v2.AddArg(y)
  7559  		v0.AddArg(v2)
  7560  		v.AddArg(v0)
  7561  		v3 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7562  		v3.AuxInt = 0
  7563  		v.AddArg(v3)
  7564  		v4 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7565  		v4.AuxInt = 32
  7566  		v5 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7567  		v5.AddArg(y)
  7568  		v4.AddArg(v5)
  7569  		v.AddArg(v4)
  7570  		return true
  7571  	}
  7572  }
  7573  func rewriteValueMIPS_OpRsh8Ux32(v *Value, config *Config) bool {
  7574  	b := v.Block
  7575  	_ = b
  7576  	// match: (Rsh8Ux32 <t> x y)
  7577  	// cond:
  7578  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7579  	for {
  7580  		t := v.Type
  7581  		x := v.Args[0]
  7582  		y := v.Args[1]
  7583  		v.reset(OpMIPSCMOVZ)
  7584  		v0 := b.NewValue0(v.Line, OpMIPSSRL, t)
  7585  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7586  		v1.AddArg(x)
  7587  		v0.AddArg(v1)
  7588  		v0.AddArg(y)
  7589  		v.AddArg(v0)
  7590  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7591  		v2.AuxInt = 0
  7592  		v.AddArg(v2)
  7593  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7594  		v3.AuxInt = 32
  7595  		v3.AddArg(y)
  7596  		v.AddArg(v3)
  7597  		return true
  7598  	}
  7599  }
  7600  func rewriteValueMIPS_OpRsh8Ux64(v *Value, config *Config) bool {
  7601  	b := v.Block
  7602  	_ = b
  7603  	// match: (Rsh8Ux64 x (Const64 [c]))
  7604  	// cond: uint32(c) < 8
  7605  	// result: (SRLconst (SLLconst <config.fe.TypeUInt32()> x [24]) [c+24])
  7606  	for {
  7607  		x := v.Args[0]
  7608  		v_1 := v.Args[1]
  7609  		if v_1.Op != OpConst64 {
  7610  			break
  7611  		}
  7612  		c := v_1.AuxInt
  7613  		if !(uint32(c) < 8) {
  7614  			break
  7615  		}
  7616  		v.reset(OpMIPSSRLconst)
  7617  		v.AuxInt = c + 24
  7618  		v0 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
  7619  		v0.AuxInt = 24
  7620  		v0.AddArg(x)
  7621  		v.AddArg(v0)
  7622  		return true
  7623  	}
  7624  	// match: (Rsh8Ux64 _ (Const64 [c]))
  7625  	// cond: uint32(c) >= 8
  7626  	// result: (MOVWconst [0])
  7627  	for {
  7628  		v_1 := v.Args[1]
  7629  		if v_1.Op != OpConst64 {
  7630  			break
  7631  		}
  7632  		c := v_1.AuxInt
  7633  		if !(uint32(c) >= 8) {
  7634  			break
  7635  		}
  7636  		v.reset(OpMIPSMOVWconst)
  7637  		v.AuxInt = 0
  7638  		return true
  7639  	}
  7640  	return false
  7641  }
  7642  func rewriteValueMIPS_OpRsh8Ux8(v *Value, config *Config) bool {
  7643  	b := v.Block
  7644  	_ = b
  7645  	// match: (Rsh8Ux8 <t> x y)
  7646  	// cond:
  7647  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7648  	for {
  7649  		t := v.Type
  7650  		x := v.Args[0]
  7651  		y := v.Args[1]
  7652  		v.reset(OpMIPSCMOVZ)
  7653  		v0 := b.NewValue0(v.Line, OpMIPSSRL, t)
  7654  		v1 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7655  		v1.AddArg(x)
  7656  		v0.AddArg(v1)
  7657  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7658  		v2.AddArg(y)
  7659  		v0.AddArg(v2)
  7660  		v.AddArg(v0)
  7661  		v3 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7662  		v3.AuxInt = 0
  7663  		v.AddArg(v3)
  7664  		v4 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7665  		v4.AuxInt = 32
  7666  		v5 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7667  		v5.AddArg(y)
  7668  		v4.AddArg(v5)
  7669  		v.AddArg(v4)
  7670  		return true
  7671  	}
  7672  }
  7673  func rewriteValueMIPS_OpRsh8x16(v *Value, config *Config) bool {
  7674  	b := v.Block
  7675  	_ = b
  7676  	// match: (Rsh8x16 x y)
  7677  	// cond:
  7678  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7679  	for {
  7680  		x := v.Args[0]
  7681  		y := v.Args[1]
  7682  		v.reset(OpMIPSSRA)
  7683  		v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  7684  		v0.AddArg(x)
  7685  		v.AddArg(v0)
  7686  		v1 := b.NewValue0(v.Line, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7687  		v2 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7688  		v2.AddArg(y)
  7689  		v1.AddArg(v2)
  7690  		v3 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7691  		v3.AuxInt = -1
  7692  		v1.AddArg(v3)
  7693  		v4 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7694  		v4.AuxInt = 32
  7695  		v5 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
  7696  		v5.AddArg(y)
  7697  		v4.AddArg(v5)
  7698  		v1.AddArg(v4)
  7699  		v.AddArg(v1)
  7700  		return true
  7701  	}
  7702  }
  7703  func rewriteValueMIPS_OpRsh8x32(v *Value, config *Config) bool {
  7704  	b := v.Block
  7705  	_ = b
  7706  	// match: (Rsh8x32 x y)
  7707  	// cond:
  7708  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7709  	for {
  7710  		x := v.Args[0]
  7711  		y := v.Args[1]
  7712  		v.reset(OpMIPSSRA)
  7713  		v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  7714  		v0.AddArg(x)
  7715  		v.AddArg(v0)
  7716  		v1 := b.NewValue0(v.Line, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7717  		v1.AddArg(y)
  7718  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7719  		v2.AuxInt = -1
  7720  		v1.AddArg(v2)
  7721  		v3 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7722  		v3.AuxInt = 32
  7723  		v3.AddArg(y)
  7724  		v1.AddArg(v3)
  7725  		v.AddArg(v1)
  7726  		return true
  7727  	}
  7728  }
  7729  func rewriteValueMIPS_OpRsh8x64(v *Value, config *Config) bool {
  7730  	b := v.Block
  7731  	_ = b
  7732  	// match: (Rsh8x64 x (Const64 [c]))
  7733  	// cond: uint32(c) < 8
  7734  	// result: (SRAconst (SLLconst <config.fe.TypeUInt32()> x [24]) [c+24])
  7735  	for {
  7736  		x := v.Args[0]
  7737  		v_1 := v.Args[1]
  7738  		if v_1.Op != OpConst64 {
  7739  			break
  7740  		}
  7741  		c := v_1.AuxInt
  7742  		if !(uint32(c) < 8) {
  7743  			break
  7744  		}
  7745  		v.reset(OpMIPSSRAconst)
  7746  		v.AuxInt = c + 24
  7747  		v0 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
  7748  		v0.AuxInt = 24
  7749  		v0.AddArg(x)
  7750  		v.AddArg(v0)
  7751  		return true
  7752  	}
  7753  	// match: (Rsh8x64 x (Const64 [c]))
  7754  	// cond: uint32(c) >= 8
  7755  	// result: (SRAconst (SLLconst <config.fe.TypeUInt32()> x [24]) [31])
  7756  	for {
  7757  		x := v.Args[0]
  7758  		v_1 := v.Args[1]
  7759  		if v_1.Op != OpConst64 {
  7760  			break
  7761  		}
  7762  		c := v_1.AuxInt
  7763  		if !(uint32(c) >= 8) {
  7764  			break
  7765  		}
  7766  		v.reset(OpMIPSSRAconst)
  7767  		v.AuxInt = 31
  7768  		v0 := b.NewValue0(v.Line, OpMIPSSLLconst, config.fe.TypeUInt32())
  7769  		v0.AuxInt = 24
  7770  		v0.AddArg(x)
  7771  		v.AddArg(v0)
  7772  		return true
  7773  	}
  7774  	return false
  7775  }
  7776  func rewriteValueMIPS_OpRsh8x8(v *Value, config *Config) bool {
  7777  	b := v.Block
  7778  	_ = b
  7779  	// match: (Rsh8x8 x y)
  7780  	// cond:
  7781  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7782  	for {
  7783  		x := v.Args[0]
  7784  		y := v.Args[1]
  7785  		v.reset(OpMIPSSRA)
  7786  		v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
  7787  		v0.AddArg(x)
  7788  		v.AddArg(v0)
  7789  		v1 := b.NewValue0(v.Line, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7790  		v2 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7791  		v2.AddArg(y)
  7792  		v1.AddArg(v2)
  7793  		v3 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7794  		v3.AuxInt = -1
  7795  		v1.AddArg(v3)
  7796  		v4 := b.NewValue0(v.Line, OpMIPSSGTUconst, config.fe.TypeBool())
  7797  		v4.AuxInt = 32
  7798  		v5 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
  7799  		v5.AddArg(y)
  7800  		v4.AddArg(v5)
  7801  		v1.AddArg(v4)
  7802  		v.AddArg(v1)
  7803  		return true
  7804  	}
  7805  }
  7806  func rewriteValueMIPS_OpSelect0(v *Value, config *Config) bool {
  7807  	b := v.Block
  7808  	_ = b
  7809  	// match: (Select0 (Add32carry <t> x y))
  7810  	// cond:
  7811  	// result: (ADD <t.FieldType(0)> x y)
  7812  	for {
  7813  		v_0 := v.Args[0]
  7814  		if v_0.Op != OpAdd32carry {
  7815  			break
  7816  		}
  7817  		t := v_0.Type
  7818  		x := v_0.Args[0]
  7819  		y := v_0.Args[1]
  7820  		v.reset(OpMIPSADD)
  7821  		v.Type = t.FieldType(0)
  7822  		v.AddArg(x)
  7823  		v.AddArg(y)
  7824  		return true
  7825  	}
  7826  	// match: (Select0 (Sub32carry <t> x y))
  7827  	// cond:
  7828  	// result: (SUB <t.FieldType(0)> x y)
  7829  	for {
  7830  		v_0 := v.Args[0]
  7831  		if v_0.Op != OpSub32carry {
  7832  			break
  7833  		}
  7834  		t := v_0.Type
  7835  		x := v_0.Args[0]
  7836  		y := v_0.Args[1]
  7837  		v.reset(OpMIPSSUB)
  7838  		v.Type = t.FieldType(0)
  7839  		v.AddArg(x)
  7840  		v.AddArg(y)
  7841  		return true
  7842  	}
  7843  	// match: (Select0 (MULTU x (MOVWconst [c])))
  7844  	// cond: x.Op != OpMIPSMOVWconst
  7845  	// result: (Select0 (MULTU (MOVWconst [c]) x ))
  7846  	for {
  7847  		v_0 := v.Args[0]
  7848  		if v_0.Op != OpMIPSMULTU {
  7849  			break
  7850  		}
  7851  		x := v_0.Args[0]
  7852  		v_0_1 := v_0.Args[1]
  7853  		if v_0_1.Op != OpMIPSMOVWconst {
  7854  			break
  7855  		}
  7856  		c := v_0_1.AuxInt
  7857  		if !(x.Op != OpMIPSMOVWconst) {
  7858  			break
  7859  		}
  7860  		v.reset(OpSelect0)
  7861  		v0 := b.NewValue0(v.Line, OpMIPSMULTU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  7862  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7863  		v1.AuxInt = c
  7864  		v0.AddArg(v1)
  7865  		v0.AddArg(x)
  7866  		v.AddArg(v0)
  7867  		return true
  7868  	}
  7869  	// match: (Select0 (MULTU (MOVWconst [0]) _ ))
  7870  	// cond:
  7871  	// result: (MOVWconst [0])
  7872  	for {
  7873  		v_0 := v.Args[0]
  7874  		if v_0.Op != OpMIPSMULTU {
  7875  			break
  7876  		}
  7877  		v_0_0 := v_0.Args[0]
  7878  		if v_0_0.Op != OpMIPSMOVWconst {
  7879  			break
  7880  		}
  7881  		if v_0_0.AuxInt != 0 {
  7882  			break
  7883  		}
  7884  		v.reset(OpMIPSMOVWconst)
  7885  		v.AuxInt = 0
  7886  		return true
  7887  	}
  7888  	// match: (Select0 (MULTU (MOVWconst [1]) _ ))
  7889  	// cond:
  7890  	// result: (MOVWconst [0])
  7891  	for {
  7892  		v_0 := v.Args[0]
  7893  		if v_0.Op != OpMIPSMULTU {
  7894  			break
  7895  		}
  7896  		v_0_0 := v_0.Args[0]
  7897  		if v_0_0.Op != OpMIPSMOVWconst {
  7898  			break
  7899  		}
  7900  		if v_0_0.AuxInt != 1 {
  7901  			break
  7902  		}
  7903  		v.reset(OpMIPSMOVWconst)
  7904  		v.AuxInt = 0
  7905  		return true
  7906  	}
  7907  	// match: (Select0 (MULTU (MOVWconst [-1]) x ))
  7908  	// cond:
  7909  	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  7910  	for {
  7911  		v_0 := v.Args[0]
  7912  		if v_0.Op != OpMIPSMULTU {
  7913  			break
  7914  		}
  7915  		v_0_0 := v_0.Args[0]
  7916  		if v_0_0.Op != OpMIPSMOVWconst {
  7917  			break
  7918  		}
  7919  		if v_0_0.AuxInt != -1 {
  7920  			break
  7921  		}
  7922  		x := v_0.Args[1]
  7923  		v.reset(OpMIPSCMOVZ)
  7924  		v0 := b.NewValue0(v.Line, OpMIPSADDconst, x.Type)
  7925  		v0.AuxInt = -1
  7926  		v0.AddArg(x)
  7927  		v.AddArg(v0)
  7928  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7929  		v1.AuxInt = 0
  7930  		v.AddArg(v1)
  7931  		v.AddArg(x)
  7932  		return true
  7933  	}
  7934  	// match: (Select0 (MULTU (MOVWconst [c]) x ))
  7935  	// cond: isPowerOfTwo(int64(uint32(c)))
  7936  	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  7937  	for {
  7938  		v_0 := v.Args[0]
  7939  		if v_0.Op != OpMIPSMULTU {
  7940  			break
  7941  		}
  7942  		v_0_0 := v_0.Args[0]
  7943  		if v_0_0.Op != OpMIPSMOVWconst {
  7944  			break
  7945  		}
  7946  		c := v_0_0.AuxInt
  7947  		x := v_0.Args[1]
  7948  		if !(isPowerOfTwo(int64(uint32(c)))) {
  7949  			break
  7950  		}
  7951  		v.reset(OpMIPSSRLconst)
  7952  		v.AuxInt = 32 - log2(int64(uint32(c)))
  7953  		v.AddArg(x)
  7954  		return true
  7955  	}
  7956  	// match: (Select0 (MULTU  (MOVWconst [c]) (MOVWconst [d])))
  7957  	// cond:
  7958  	// result: (MOVWconst [(c*d)>>32])
  7959  	for {
  7960  		v_0 := v.Args[0]
  7961  		if v_0.Op != OpMIPSMULTU {
  7962  			break
  7963  		}
  7964  		v_0_0 := v_0.Args[0]
  7965  		if v_0_0.Op != OpMIPSMOVWconst {
  7966  			break
  7967  		}
  7968  		c := v_0_0.AuxInt
  7969  		v_0_1 := v_0.Args[1]
  7970  		if v_0_1.Op != OpMIPSMOVWconst {
  7971  			break
  7972  		}
  7973  		d := v_0_1.AuxInt
  7974  		v.reset(OpMIPSMOVWconst)
  7975  		v.AuxInt = (c * d) >> 32
  7976  		return true
  7977  	}
  7978  	// match: (Select0 (DIV  (MOVWconst [c]) (MOVWconst [d])))
  7979  	// cond:
  7980  	// result: (MOVWconst [int64(int32(c)%int32(d))])
  7981  	for {
  7982  		v_0 := v.Args[0]
  7983  		if v_0.Op != OpMIPSDIV {
  7984  			break
  7985  		}
  7986  		v_0_0 := v_0.Args[0]
  7987  		if v_0_0.Op != OpMIPSMOVWconst {
  7988  			break
  7989  		}
  7990  		c := v_0_0.AuxInt
  7991  		v_0_1 := v_0.Args[1]
  7992  		if v_0_1.Op != OpMIPSMOVWconst {
  7993  			break
  7994  		}
  7995  		d := v_0_1.AuxInt
  7996  		v.reset(OpMIPSMOVWconst)
  7997  		v.AuxInt = int64(int32(c) % int32(d))
  7998  		return true
  7999  	}
  8000  	// match: (Select0 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8001  	// cond:
  8002  	// result: (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
  8003  	for {
  8004  		v_0 := v.Args[0]
  8005  		if v_0.Op != OpMIPSDIVU {
  8006  			break
  8007  		}
  8008  		v_0_0 := v_0.Args[0]
  8009  		if v_0_0.Op != OpMIPSMOVWconst {
  8010  			break
  8011  		}
  8012  		c := v_0_0.AuxInt
  8013  		v_0_1 := v_0.Args[1]
  8014  		if v_0_1.Op != OpMIPSMOVWconst {
  8015  			break
  8016  		}
  8017  		d := v_0_1.AuxInt
  8018  		v.reset(OpMIPSMOVWconst)
  8019  		v.AuxInt = int64(int32(uint32(c) % uint32(d)))
  8020  		return true
  8021  	}
  8022  	return false
  8023  }
  8024  func rewriteValueMIPS_OpSelect1(v *Value, config *Config) bool {
  8025  	b := v.Block
  8026  	_ = b
  8027  	// match: (Select1 (Add32carry <t> x y))
  8028  	// cond:
  8029  	// result: (SGTU <config.fe.TypeBool()> x (ADD <t.FieldType(0)> x y))
  8030  	for {
  8031  		v_0 := v.Args[0]
  8032  		if v_0.Op != OpAdd32carry {
  8033  			break
  8034  		}
  8035  		t := v_0.Type
  8036  		x := v_0.Args[0]
  8037  		y := v_0.Args[1]
  8038  		v.reset(OpMIPSSGTU)
  8039  		v.Type = config.fe.TypeBool()
  8040  		v.AddArg(x)
  8041  		v0 := b.NewValue0(v.Line, OpMIPSADD, t.FieldType(0))
  8042  		v0.AddArg(x)
  8043  		v0.AddArg(y)
  8044  		v.AddArg(v0)
  8045  		return true
  8046  	}
  8047  	// match: (Select1 (Sub32carry <t> x y))
  8048  	// cond:
  8049  	// result: (SGTU <config.fe.TypeBool()> (SUB <t.FieldType(0)> x y) x)
  8050  	for {
  8051  		v_0 := v.Args[0]
  8052  		if v_0.Op != OpSub32carry {
  8053  			break
  8054  		}
  8055  		t := v_0.Type
  8056  		x := v_0.Args[0]
  8057  		y := v_0.Args[1]
  8058  		v.reset(OpMIPSSGTU)
  8059  		v.Type = config.fe.TypeBool()
  8060  		v0 := b.NewValue0(v.Line, OpMIPSSUB, t.FieldType(0))
  8061  		v0.AddArg(x)
  8062  		v0.AddArg(y)
  8063  		v.AddArg(v0)
  8064  		v.AddArg(x)
  8065  		return true
  8066  	}
  8067  	// match: (Select1 (MULTU x (MOVWconst [c])))
  8068  	// cond: x.Op != OpMIPSMOVWconst
  8069  	// result: (Select1 (MULTU (MOVWconst [c]) x ))
  8070  	for {
  8071  		v_0 := v.Args[0]
  8072  		if v_0.Op != OpMIPSMULTU {
  8073  			break
  8074  		}
  8075  		x := v_0.Args[0]
  8076  		v_0_1 := v_0.Args[1]
  8077  		if v_0_1.Op != OpMIPSMOVWconst {
  8078  			break
  8079  		}
  8080  		c := v_0_1.AuxInt
  8081  		if !(x.Op != OpMIPSMOVWconst) {
  8082  			break
  8083  		}
  8084  		v.reset(OpSelect1)
  8085  		v0 := b.NewValue0(v.Line, OpMIPSMULTU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  8086  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8087  		v1.AuxInt = c
  8088  		v0.AddArg(v1)
  8089  		v0.AddArg(x)
  8090  		v.AddArg(v0)
  8091  		return true
  8092  	}
  8093  	// match: (Select1 (MULTU (MOVWconst [0]) _ ))
  8094  	// cond:
  8095  	// result: (MOVWconst [0])
  8096  	for {
  8097  		v_0 := v.Args[0]
  8098  		if v_0.Op != OpMIPSMULTU {
  8099  			break
  8100  		}
  8101  		v_0_0 := v_0.Args[0]
  8102  		if v_0_0.Op != OpMIPSMOVWconst {
  8103  			break
  8104  		}
  8105  		if v_0_0.AuxInt != 0 {
  8106  			break
  8107  		}
  8108  		v.reset(OpMIPSMOVWconst)
  8109  		v.AuxInt = 0
  8110  		return true
  8111  	}
  8112  	// match: (Select1 (MULTU (MOVWconst [1]) x ))
  8113  	// cond:
  8114  	// result: x
  8115  	for {
  8116  		v_0 := v.Args[0]
  8117  		if v_0.Op != OpMIPSMULTU {
  8118  			break
  8119  		}
  8120  		v_0_0 := v_0.Args[0]
  8121  		if v_0_0.Op != OpMIPSMOVWconst {
  8122  			break
  8123  		}
  8124  		if v_0_0.AuxInt != 1 {
  8125  			break
  8126  		}
  8127  		x := v_0.Args[1]
  8128  		v.reset(OpCopy)
  8129  		v.Type = x.Type
  8130  		v.AddArg(x)
  8131  		return true
  8132  	}
  8133  	// match: (Select1 (MULTU (MOVWconst [-1]) x ))
  8134  	// cond:
  8135  	// result: (NEG <x.Type> x)
  8136  	for {
  8137  		v_0 := v.Args[0]
  8138  		if v_0.Op != OpMIPSMULTU {
  8139  			break
  8140  		}
  8141  		v_0_0 := v_0.Args[0]
  8142  		if v_0_0.Op != OpMIPSMOVWconst {
  8143  			break
  8144  		}
  8145  		if v_0_0.AuxInt != -1 {
  8146  			break
  8147  		}
  8148  		x := v_0.Args[1]
  8149  		v.reset(OpMIPSNEG)
  8150  		v.Type = x.Type
  8151  		v.AddArg(x)
  8152  		return true
  8153  	}
  8154  	// match: (Select1 (MULTU (MOVWconst [c]) x ))
  8155  	// cond: isPowerOfTwo(int64(uint32(c)))
  8156  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  8157  	for {
  8158  		v_0 := v.Args[0]
  8159  		if v_0.Op != OpMIPSMULTU {
  8160  			break
  8161  		}
  8162  		v_0_0 := v_0.Args[0]
  8163  		if v_0_0.Op != OpMIPSMOVWconst {
  8164  			break
  8165  		}
  8166  		c := v_0_0.AuxInt
  8167  		x := v_0.Args[1]
  8168  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8169  			break
  8170  		}
  8171  		v.reset(OpMIPSSLLconst)
  8172  		v.AuxInt = log2(int64(uint32(c)))
  8173  		v.AddArg(x)
  8174  		return true
  8175  	}
  8176  	// match: (Select1 (MULTU  (MOVWconst [c]) (MOVWconst [d])))
  8177  	// cond:
  8178  	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8179  	for {
  8180  		v_0 := v.Args[0]
  8181  		if v_0.Op != OpMIPSMULTU {
  8182  			break
  8183  		}
  8184  		v_0_0 := v_0.Args[0]
  8185  		if v_0_0.Op != OpMIPSMOVWconst {
  8186  			break
  8187  		}
  8188  		c := v_0_0.AuxInt
  8189  		v_0_1 := v_0.Args[1]
  8190  		if v_0_1.Op != OpMIPSMOVWconst {
  8191  			break
  8192  		}
  8193  		d := v_0_1.AuxInt
  8194  		v.reset(OpMIPSMOVWconst)
  8195  		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8196  		return true
  8197  	}
  8198  	// match: (Select1 (DIV  (MOVWconst [c]) (MOVWconst [d])))
  8199  	// cond:
  8200  	// result: (MOVWconst [int64(int32(c)/int32(d))])
  8201  	for {
  8202  		v_0 := v.Args[0]
  8203  		if v_0.Op != OpMIPSDIV {
  8204  			break
  8205  		}
  8206  		v_0_0 := v_0.Args[0]
  8207  		if v_0_0.Op != OpMIPSMOVWconst {
  8208  			break
  8209  		}
  8210  		c := v_0_0.AuxInt
  8211  		v_0_1 := v_0.Args[1]
  8212  		if v_0_1.Op != OpMIPSMOVWconst {
  8213  			break
  8214  		}
  8215  		d := v_0_1.AuxInt
  8216  		v.reset(OpMIPSMOVWconst)
  8217  		v.AuxInt = int64(int32(c) / int32(d))
  8218  		return true
  8219  	}
  8220  	// match: (Select1 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8221  	// cond:
  8222  	// result: (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
  8223  	for {
  8224  		v_0 := v.Args[0]
  8225  		if v_0.Op != OpMIPSDIVU {
  8226  			break
  8227  		}
  8228  		v_0_0 := v_0.Args[0]
  8229  		if v_0_0.Op != OpMIPSMOVWconst {
  8230  			break
  8231  		}
  8232  		c := v_0_0.AuxInt
  8233  		v_0_1 := v_0.Args[1]
  8234  		if v_0_1.Op != OpMIPSMOVWconst {
  8235  			break
  8236  		}
  8237  		d := v_0_1.AuxInt
  8238  		v.reset(OpMIPSMOVWconst)
  8239  		v.AuxInt = int64(int32(uint32(c) / uint32(d)))
  8240  		return true
  8241  	}
  8242  	return false
  8243  }
  8244  func rewriteValueMIPS_OpSignExt16to32(v *Value, config *Config) bool {
  8245  	b := v.Block
  8246  	_ = b
  8247  	// match: (SignExt16to32 x)
  8248  	// cond:
  8249  	// result: (MOVHreg x)
  8250  	for {
  8251  		x := v.Args[0]
  8252  		v.reset(OpMIPSMOVHreg)
  8253  		v.AddArg(x)
  8254  		return true
  8255  	}
  8256  }
  8257  func rewriteValueMIPS_OpSignExt8to16(v *Value, config *Config) bool {
  8258  	b := v.Block
  8259  	_ = b
  8260  	// match: (SignExt8to16 x)
  8261  	// cond:
  8262  	// result: (MOVBreg x)
  8263  	for {
  8264  		x := v.Args[0]
  8265  		v.reset(OpMIPSMOVBreg)
  8266  		v.AddArg(x)
  8267  		return true
  8268  	}
  8269  }
  8270  func rewriteValueMIPS_OpSignExt8to32(v *Value, config *Config) bool {
  8271  	b := v.Block
  8272  	_ = b
  8273  	// match: (SignExt8to32 x)
  8274  	// cond:
  8275  	// result: (MOVBreg x)
  8276  	for {
  8277  		x := v.Args[0]
  8278  		v.reset(OpMIPSMOVBreg)
  8279  		v.AddArg(x)
  8280  		return true
  8281  	}
  8282  }
  8283  func rewriteValueMIPS_OpSignmask(v *Value, config *Config) bool {
  8284  	b := v.Block
  8285  	_ = b
  8286  	// match: (Signmask x)
  8287  	// cond:
  8288  	// result: (SRAconst x [31])
  8289  	for {
  8290  		x := v.Args[0]
  8291  		v.reset(OpMIPSSRAconst)
  8292  		v.AuxInt = 31
  8293  		v.AddArg(x)
  8294  		return true
  8295  	}
  8296  }
  8297  func rewriteValueMIPS_OpSlicemask(v *Value, config *Config) bool {
  8298  	b := v.Block
  8299  	_ = b
  8300  	// match: (Slicemask x)
  8301  	// cond:
  8302  	// result: (NEG (SGT x (MOVWconst [0])))
  8303  	for {
  8304  		x := v.Args[0]
  8305  		v.reset(OpMIPSNEG)
  8306  		v0 := b.NewValue0(v.Line, OpMIPSSGT, config.fe.TypeBool())
  8307  		v0.AddArg(x)
  8308  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8309  		v1.AuxInt = 0
  8310  		v0.AddArg(v1)
  8311  		v.AddArg(v0)
  8312  		return true
  8313  	}
  8314  }
  8315  func rewriteValueMIPS_OpSqrt(v *Value, config *Config) bool {
  8316  	b := v.Block
  8317  	_ = b
  8318  	// match: (Sqrt x)
  8319  	// cond:
  8320  	// result: (SQRTD x)
  8321  	for {
  8322  		x := v.Args[0]
  8323  		v.reset(OpMIPSSQRTD)
  8324  		v.AddArg(x)
  8325  		return true
  8326  	}
  8327  }
  8328  func rewriteValueMIPS_OpStaticCall(v *Value, config *Config) bool {
  8329  	b := v.Block
  8330  	_ = b
  8331  	// match: (StaticCall [argwid] {target} mem)
  8332  	// cond:
  8333  	// result: (CALLstatic [argwid] {target} mem)
  8334  	for {
  8335  		argwid := v.AuxInt
  8336  		target := v.Aux
  8337  		mem := v.Args[0]
  8338  		v.reset(OpMIPSCALLstatic)
  8339  		v.AuxInt = argwid
  8340  		v.Aux = target
  8341  		v.AddArg(mem)
  8342  		return true
  8343  	}
  8344  }
  8345  func rewriteValueMIPS_OpStore(v *Value, config *Config) bool {
  8346  	b := v.Block
  8347  	_ = b
  8348  	// match: (Store [1] ptr val mem)
  8349  	// cond:
  8350  	// result: (MOVBstore ptr val mem)
  8351  	for {
  8352  		if v.AuxInt != 1 {
  8353  			break
  8354  		}
  8355  		ptr := v.Args[0]
  8356  		val := v.Args[1]
  8357  		mem := v.Args[2]
  8358  		v.reset(OpMIPSMOVBstore)
  8359  		v.AddArg(ptr)
  8360  		v.AddArg(val)
  8361  		v.AddArg(mem)
  8362  		return true
  8363  	}
  8364  	// match: (Store [2] ptr val mem)
  8365  	// cond:
  8366  	// result: (MOVHstore ptr val mem)
  8367  	for {
  8368  		if v.AuxInt != 2 {
  8369  			break
  8370  		}
  8371  		ptr := v.Args[0]
  8372  		val := v.Args[1]
  8373  		mem := v.Args[2]
  8374  		v.reset(OpMIPSMOVHstore)
  8375  		v.AddArg(ptr)
  8376  		v.AddArg(val)
  8377  		v.AddArg(mem)
  8378  		return true
  8379  	}
  8380  	// match: (Store [4] ptr val mem)
  8381  	// cond: !is32BitFloat(val.Type)
  8382  	// result: (MOVWstore ptr val mem)
  8383  	for {
  8384  		if v.AuxInt != 4 {
  8385  			break
  8386  		}
  8387  		ptr := v.Args[0]
  8388  		val := v.Args[1]
  8389  		mem := v.Args[2]
  8390  		if !(!is32BitFloat(val.Type)) {
  8391  			break
  8392  		}
  8393  		v.reset(OpMIPSMOVWstore)
  8394  		v.AddArg(ptr)
  8395  		v.AddArg(val)
  8396  		v.AddArg(mem)
  8397  		return true
  8398  	}
  8399  	// match: (Store [8] ptr val mem)
  8400  	// cond: !is64BitFloat(val.Type)
  8401  	// result: (MOVWstore ptr val mem)
  8402  	for {
  8403  		if v.AuxInt != 8 {
  8404  			break
  8405  		}
  8406  		ptr := v.Args[0]
  8407  		val := v.Args[1]
  8408  		mem := v.Args[2]
  8409  		if !(!is64BitFloat(val.Type)) {
  8410  			break
  8411  		}
  8412  		v.reset(OpMIPSMOVWstore)
  8413  		v.AddArg(ptr)
  8414  		v.AddArg(val)
  8415  		v.AddArg(mem)
  8416  		return true
  8417  	}
  8418  	// match: (Store [4] ptr val mem)
  8419  	// cond: is32BitFloat(val.Type)
  8420  	// result: (MOVFstore ptr val mem)
  8421  	for {
  8422  		if v.AuxInt != 4 {
  8423  			break
  8424  		}
  8425  		ptr := v.Args[0]
  8426  		val := v.Args[1]
  8427  		mem := v.Args[2]
  8428  		if !(is32BitFloat(val.Type)) {
  8429  			break
  8430  		}
  8431  		v.reset(OpMIPSMOVFstore)
  8432  		v.AddArg(ptr)
  8433  		v.AddArg(val)
  8434  		v.AddArg(mem)
  8435  		return true
  8436  	}
  8437  	// match: (Store [8] ptr val mem)
  8438  	// cond: is64BitFloat(val.Type)
  8439  	// result: (MOVDstore ptr val mem)
  8440  	for {
  8441  		if v.AuxInt != 8 {
  8442  			break
  8443  		}
  8444  		ptr := v.Args[0]
  8445  		val := v.Args[1]
  8446  		mem := v.Args[2]
  8447  		if !(is64BitFloat(val.Type)) {
  8448  			break
  8449  		}
  8450  		v.reset(OpMIPSMOVDstore)
  8451  		v.AddArg(ptr)
  8452  		v.AddArg(val)
  8453  		v.AddArg(mem)
  8454  		return true
  8455  	}
  8456  	return false
  8457  }
  8458  func rewriteValueMIPS_OpSub16(v *Value, config *Config) bool {
  8459  	b := v.Block
  8460  	_ = b
  8461  	// match: (Sub16 x y)
  8462  	// cond:
  8463  	// result: (SUB x y)
  8464  	for {
  8465  		x := v.Args[0]
  8466  		y := v.Args[1]
  8467  		v.reset(OpMIPSSUB)
  8468  		v.AddArg(x)
  8469  		v.AddArg(y)
  8470  		return true
  8471  	}
  8472  }
  8473  func rewriteValueMIPS_OpSub32(v *Value, config *Config) bool {
  8474  	b := v.Block
  8475  	_ = b
  8476  	// match: (Sub32 x y)
  8477  	// cond:
  8478  	// result: (SUB x y)
  8479  	for {
  8480  		x := v.Args[0]
  8481  		y := v.Args[1]
  8482  		v.reset(OpMIPSSUB)
  8483  		v.AddArg(x)
  8484  		v.AddArg(y)
  8485  		return true
  8486  	}
  8487  }
  8488  func rewriteValueMIPS_OpSub32F(v *Value, config *Config) bool {
  8489  	b := v.Block
  8490  	_ = b
  8491  	// match: (Sub32F x y)
  8492  	// cond:
  8493  	// result: (SUBF x y)
  8494  	for {
  8495  		x := v.Args[0]
  8496  		y := v.Args[1]
  8497  		v.reset(OpMIPSSUBF)
  8498  		v.AddArg(x)
  8499  		v.AddArg(y)
  8500  		return true
  8501  	}
  8502  }
  8503  func rewriteValueMIPS_OpSub32withcarry(v *Value, config *Config) bool {
  8504  	b := v.Block
  8505  	_ = b
  8506  	// match: (Sub32withcarry <t> x y c)
  8507  	// cond:
  8508  	// result: (SUB (SUB <t> x y) c)
  8509  	for {
  8510  		t := v.Type
  8511  		x := v.Args[0]
  8512  		y := v.Args[1]
  8513  		c := v.Args[2]
  8514  		v.reset(OpMIPSSUB)
  8515  		v0 := b.NewValue0(v.Line, OpMIPSSUB, t)
  8516  		v0.AddArg(x)
  8517  		v0.AddArg(y)
  8518  		v.AddArg(v0)
  8519  		v.AddArg(c)
  8520  		return true
  8521  	}
  8522  }
  8523  func rewriteValueMIPS_OpSub64F(v *Value, config *Config) bool {
  8524  	b := v.Block
  8525  	_ = b
  8526  	// match: (Sub64F x y)
  8527  	// cond:
  8528  	// result: (SUBD x y)
  8529  	for {
  8530  		x := v.Args[0]
  8531  		y := v.Args[1]
  8532  		v.reset(OpMIPSSUBD)
  8533  		v.AddArg(x)
  8534  		v.AddArg(y)
  8535  		return true
  8536  	}
  8537  }
  8538  func rewriteValueMIPS_OpSub8(v *Value, config *Config) bool {
  8539  	b := v.Block
  8540  	_ = b
  8541  	// match: (Sub8 x y)
  8542  	// cond:
  8543  	// result: (SUB x y)
  8544  	for {
  8545  		x := v.Args[0]
  8546  		y := v.Args[1]
  8547  		v.reset(OpMIPSSUB)
  8548  		v.AddArg(x)
  8549  		v.AddArg(y)
  8550  		return true
  8551  	}
  8552  }
  8553  func rewriteValueMIPS_OpSubPtr(v *Value, config *Config) bool {
  8554  	b := v.Block
  8555  	_ = b
  8556  	// match: (SubPtr x y)
  8557  	// cond:
  8558  	// result: (SUB x y)
  8559  	for {
  8560  		x := v.Args[0]
  8561  		y := v.Args[1]
  8562  		v.reset(OpMIPSSUB)
  8563  		v.AddArg(x)
  8564  		v.AddArg(y)
  8565  		return true
  8566  	}
  8567  }
  8568  func rewriteValueMIPS_OpTrunc16to8(v *Value, config *Config) bool {
  8569  	b := v.Block
  8570  	_ = b
  8571  	// match: (Trunc16to8 x)
  8572  	// cond:
  8573  	// result: x
  8574  	for {
  8575  		x := v.Args[0]
  8576  		v.reset(OpCopy)
  8577  		v.Type = x.Type
  8578  		v.AddArg(x)
  8579  		return true
  8580  	}
  8581  }
  8582  func rewriteValueMIPS_OpTrunc32to16(v *Value, config *Config) bool {
  8583  	b := v.Block
  8584  	_ = b
  8585  	// match: (Trunc32to16 x)
  8586  	// cond:
  8587  	// result: x
  8588  	for {
  8589  		x := v.Args[0]
  8590  		v.reset(OpCopy)
  8591  		v.Type = x.Type
  8592  		v.AddArg(x)
  8593  		return true
  8594  	}
  8595  }
  8596  func rewriteValueMIPS_OpTrunc32to8(v *Value, config *Config) bool {
  8597  	b := v.Block
  8598  	_ = b
  8599  	// match: (Trunc32to8 x)
  8600  	// cond:
  8601  	// result: x
  8602  	for {
  8603  		x := v.Args[0]
  8604  		v.reset(OpCopy)
  8605  		v.Type = x.Type
  8606  		v.AddArg(x)
  8607  		return true
  8608  	}
  8609  }
  8610  func rewriteValueMIPS_OpXor16(v *Value, config *Config) bool {
  8611  	b := v.Block
  8612  	_ = b
  8613  	// match: (Xor16 x y)
  8614  	// cond:
  8615  	// result: (XOR x y)
  8616  	for {
  8617  		x := v.Args[0]
  8618  		y := v.Args[1]
  8619  		v.reset(OpMIPSXOR)
  8620  		v.AddArg(x)
  8621  		v.AddArg(y)
  8622  		return true
  8623  	}
  8624  }
  8625  func rewriteValueMIPS_OpXor32(v *Value, config *Config) bool {
  8626  	b := v.Block
  8627  	_ = b
  8628  	// match: (Xor32 x y)
  8629  	// cond:
  8630  	// result: (XOR x y)
  8631  	for {
  8632  		x := v.Args[0]
  8633  		y := v.Args[1]
  8634  		v.reset(OpMIPSXOR)
  8635  		v.AddArg(x)
  8636  		v.AddArg(y)
  8637  		return true
  8638  	}
  8639  }
  8640  func rewriteValueMIPS_OpXor8(v *Value, config *Config) bool {
  8641  	b := v.Block
  8642  	_ = b
  8643  	// match: (Xor8 x y)
  8644  	// cond:
  8645  	// result: (XOR x y)
  8646  	for {
  8647  		x := v.Args[0]
  8648  		y := v.Args[1]
  8649  		v.reset(OpMIPSXOR)
  8650  		v.AddArg(x)
  8651  		v.AddArg(y)
  8652  		return true
  8653  	}
  8654  }
  8655  func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
  8656  	b := v.Block
  8657  	_ = b
  8658  	// match: (Zero [s] _ mem)
  8659  	// cond: SizeAndAlign(s).Size() == 0
  8660  	// result: mem
  8661  	for {
  8662  		s := v.AuxInt
  8663  		mem := v.Args[1]
  8664  		if !(SizeAndAlign(s).Size() == 0) {
  8665  			break
  8666  		}
  8667  		v.reset(OpCopy)
  8668  		v.Type = mem.Type
  8669  		v.AddArg(mem)
  8670  		return true
  8671  	}
  8672  	// match: (Zero [s] ptr mem)
  8673  	// cond: SizeAndAlign(s).Size() == 1
  8674  	// result: (MOVBstore ptr (MOVWconst [0]) mem)
  8675  	for {
  8676  		s := v.AuxInt
  8677  		ptr := v.Args[0]
  8678  		mem := v.Args[1]
  8679  		if !(SizeAndAlign(s).Size() == 1) {
  8680  			break
  8681  		}
  8682  		v.reset(OpMIPSMOVBstore)
  8683  		v.AddArg(ptr)
  8684  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8685  		v0.AuxInt = 0
  8686  		v.AddArg(v0)
  8687  		v.AddArg(mem)
  8688  		return true
  8689  	}
  8690  	// match: (Zero [s] ptr mem)
  8691  	// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0
  8692  	// result: (MOVHstore ptr (MOVWconst [0]) mem)
  8693  	for {
  8694  		s := v.AuxInt
  8695  		ptr := v.Args[0]
  8696  		mem := v.Args[1]
  8697  		if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) {
  8698  			break
  8699  		}
  8700  		v.reset(OpMIPSMOVHstore)
  8701  		v.AddArg(ptr)
  8702  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8703  		v0.AuxInt = 0
  8704  		v.AddArg(v0)
  8705  		v.AddArg(mem)
  8706  		return true
  8707  	}
  8708  	// match: (Zero [s] ptr mem)
  8709  	// cond: SizeAndAlign(s).Size() == 2
  8710  	// result: (MOVBstore [1] ptr (MOVWconst [0]) 		(MOVBstore [0] ptr (MOVWconst [0]) mem))
  8711  	for {
  8712  		s := v.AuxInt
  8713  		ptr := v.Args[0]
  8714  		mem := v.Args[1]
  8715  		if !(SizeAndAlign(s).Size() == 2) {
  8716  			break
  8717  		}
  8718  		v.reset(OpMIPSMOVBstore)
  8719  		v.AuxInt = 1
  8720  		v.AddArg(ptr)
  8721  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8722  		v0.AuxInt = 0
  8723  		v.AddArg(v0)
  8724  		v1 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  8725  		v1.AuxInt = 0
  8726  		v1.AddArg(ptr)
  8727  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8728  		v2.AuxInt = 0
  8729  		v1.AddArg(v2)
  8730  		v1.AddArg(mem)
  8731  		v.AddArg(v1)
  8732  		return true
  8733  	}
  8734  	// match: (Zero [s] ptr mem)
  8735  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0
  8736  	// result: (MOVWstore ptr (MOVWconst [0]) mem)
  8737  	for {
  8738  		s := v.AuxInt
  8739  		ptr := v.Args[0]
  8740  		mem := v.Args[1]
  8741  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) {
  8742  			break
  8743  		}
  8744  		v.reset(OpMIPSMOVWstore)
  8745  		v.AddArg(ptr)
  8746  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8747  		v0.AuxInt = 0
  8748  		v.AddArg(v0)
  8749  		v.AddArg(mem)
  8750  		return true
  8751  	}
  8752  	// match: (Zero [s] ptr mem)
  8753  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0
  8754  	// result: (MOVHstore [2] ptr (MOVWconst [0]) 		(MOVHstore [0] ptr (MOVWconst [0]) mem))
  8755  	for {
  8756  		s := v.AuxInt
  8757  		ptr := v.Args[0]
  8758  		mem := v.Args[1]
  8759  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) {
  8760  			break
  8761  		}
  8762  		v.reset(OpMIPSMOVHstore)
  8763  		v.AuxInt = 2
  8764  		v.AddArg(ptr)
  8765  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8766  		v0.AuxInt = 0
  8767  		v.AddArg(v0)
  8768  		v1 := b.NewValue0(v.Line, OpMIPSMOVHstore, TypeMem)
  8769  		v1.AuxInt = 0
  8770  		v1.AddArg(ptr)
  8771  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8772  		v2.AuxInt = 0
  8773  		v1.AddArg(v2)
  8774  		v1.AddArg(mem)
  8775  		v.AddArg(v1)
  8776  		return true
  8777  	}
  8778  	// match: (Zero [s] ptr mem)
  8779  	// cond: SizeAndAlign(s).Size() == 4
  8780  	// result: (MOVBstore [3] ptr (MOVWconst [0]) 		(MOVBstore [2] ptr (MOVWconst [0]) 			(MOVBstore [1] ptr (MOVWconst [0]) 				(MOVBstore [0] ptr (MOVWconst [0]) mem))))
  8781  	for {
  8782  		s := v.AuxInt
  8783  		ptr := v.Args[0]
  8784  		mem := v.Args[1]
  8785  		if !(SizeAndAlign(s).Size() == 4) {
  8786  			break
  8787  		}
  8788  		v.reset(OpMIPSMOVBstore)
  8789  		v.AuxInt = 3
  8790  		v.AddArg(ptr)
  8791  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8792  		v0.AuxInt = 0
  8793  		v.AddArg(v0)
  8794  		v1 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  8795  		v1.AuxInt = 2
  8796  		v1.AddArg(ptr)
  8797  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8798  		v2.AuxInt = 0
  8799  		v1.AddArg(v2)
  8800  		v3 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  8801  		v3.AuxInt = 1
  8802  		v3.AddArg(ptr)
  8803  		v4 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8804  		v4.AuxInt = 0
  8805  		v3.AddArg(v4)
  8806  		v5 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  8807  		v5.AuxInt = 0
  8808  		v5.AddArg(ptr)
  8809  		v6 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8810  		v6.AuxInt = 0
  8811  		v5.AddArg(v6)
  8812  		v5.AddArg(mem)
  8813  		v3.AddArg(v5)
  8814  		v1.AddArg(v3)
  8815  		v.AddArg(v1)
  8816  		return true
  8817  	}
  8818  	// match: (Zero [s] ptr mem)
  8819  	// cond: SizeAndAlign(s).Size() == 3
  8820  	// result: (MOVBstore [2] ptr (MOVWconst [0]) 		(MOVBstore [1] ptr (MOVWconst [0]) 			(MOVBstore [0] ptr (MOVWconst [0]) mem)))
  8821  	for {
  8822  		s := v.AuxInt
  8823  		ptr := v.Args[0]
  8824  		mem := v.Args[1]
  8825  		if !(SizeAndAlign(s).Size() == 3) {
  8826  			break
  8827  		}
  8828  		v.reset(OpMIPSMOVBstore)
  8829  		v.AuxInt = 2
  8830  		v.AddArg(ptr)
  8831  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8832  		v0.AuxInt = 0
  8833  		v.AddArg(v0)
  8834  		v1 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  8835  		v1.AuxInt = 1
  8836  		v1.AddArg(ptr)
  8837  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8838  		v2.AuxInt = 0
  8839  		v1.AddArg(v2)
  8840  		v3 := b.NewValue0(v.Line, OpMIPSMOVBstore, TypeMem)
  8841  		v3.AuxInt = 0
  8842  		v3.AddArg(ptr)
  8843  		v4 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8844  		v4.AuxInt = 0
  8845  		v3.AddArg(v4)
  8846  		v3.AddArg(mem)
  8847  		v1.AddArg(v3)
  8848  		v.AddArg(v1)
  8849  		return true
  8850  	}
  8851  	// match: (Zero [s] ptr mem)
  8852  	// cond: SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0
  8853  	// result: (MOVHstore [4] ptr (MOVWconst [0]) 		(MOVHstore [2] ptr (MOVWconst [0]) 			(MOVHstore [0] ptr (MOVWconst [0]) mem)))
  8854  	for {
  8855  		s := v.AuxInt
  8856  		ptr := v.Args[0]
  8857  		mem := v.Args[1]
  8858  		if !(SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0) {
  8859  			break
  8860  		}
  8861  		v.reset(OpMIPSMOVHstore)
  8862  		v.AuxInt = 4
  8863  		v.AddArg(ptr)
  8864  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8865  		v0.AuxInt = 0
  8866  		v.AddArg(v0)
  8867  		v1 := b.NewValue0(v.Line, OpMIPSMOVHstore, TypeMem)
  8868  		v1.AuxInt = 2
  8869  		v1.AddArg(ptr)
  8870  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8871  		v2.AuxInt = 0
  8872  		v1.AddArg(v2)
  8873  		v3 := b.NewValue0(v.Line, OpMIPSMOVHstore, TypeMem)
  8874  		v3.AuxInt = 0
  8875  		v3.AddArg(ptr)
  8876  		v4 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8877  		v4.AuxInt = 0
  8878  		v3.AddArg(v4)
  8879  		v3.AddArg(mem)
  8880  		v1.AddArg(v3)
  8881  		v.AddArg(v1)
  8882  		return true
  8883  	}
  8884  	// match: (Zero [s] ptr mem)
  8885  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0
  8886  	// result: (MOVWstore [4] ptr (MOVWconst [0]) 			(MOVWstore [0] ptr (MOVWconst [0]) mem))
  8887  	for {
  8888  		s := v.AuxInt
  8889  		ptr := v.Args[0]
  8890  		mem := v.Args[1]
  8891  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) {
  8892  			break
  8893  		}
  8894  		v.reset(OpMIPSMOVWstore)
  8895  		v.AuxInt = 4
  8896  		v.AddArg(ptr)
  8897  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8898  		v0.AuxInt = 0
  8899  		v.AddArg(v0)
  8900  		v1 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  8901  		v1.AuxInt = 0
  8902  		v1.AddArg(ptr)
  8903  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8904  		v2.AuxInt = 0
  8905  		v1.AddArg(v2)
  8906  		v1.AddArg(mem)
  8907  		v.AddArg(v1)
  8908  		return true
  8909  	}
  8910  	// match: (Zero [s] ptr mem)
  8911  	// cond: SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0
  8912  	// result: (MOVWstore [8] ptr (MOVWconst [0]) 		(MOVWstore [4] ptr (MOVWconst [0]) 			(MOVWstore [0] ptr (MOVWconst [0]) mem)))
  8913  	for {
  8914  		s := v.AuxInt
  8915  		ptr := v.Args[0]
  8916  		mem := v.Args[1]
  8917  		if !(SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0) {
  8918  			break
  8919  		}
  8920  		v.reset(OpMIPSMOVWstore)
  8921  		v.AuxInt = 8
  8922  		v.AddArg(ptr)
  8923  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8924  		v0.AuxInt = 0
  8925  		v.AddArg(v0)
  8926  		v1 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  8927  		v1.AuxInt = 4
  8928  		v1.AddArg(ptr)
  8929  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8930  		v2.AuxInt = 0
  8931  		v1.AddArg(v2)
  8932  		v3 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  8933  		v3.AuxInt = 0
  8934  		v3.AddArg(ptr)
  8935  		v4 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8936  		v4.AuxInt = 0
  8937  		v3.AddArg(v4)
  8938  		v3.AddArg(mem)
  8939  		v1.AddArg(v3)
  8940  		v.AddArg(v1)
  8941  		return true
  8942  	}
  8943  	// match: (Zero [s] ptr mem)
  8944  	// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0
  8945  	// result: (MOVWstore [12] ptr (MOVWconst [0]) 		(MOVWstore [8] ptr (MOVWconst [0]) 			(MOVWstore [4] ptr (MOVWconst [0]) 				(MOVWstore [0] ptr (MOVWconst [0]) mem))))
  8946  	for {
  8947  		s := v.AuxInt
  8948  		ptr := v.Args[0]
  8949  		mem := v.Args[1]
  8950  		if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0) {
  8951  			break
  8952  		}
  8953  		v.reset(OpMIPSMOVWstore)
  8954  		v.AuxInt = 12
  8955  		v.AddArg(ptr)
  8956  		v0 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8957  		v0.AuxInt = 0
  8958  		v.AddArg(v0)
  8959  		v1 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  8960  		v1.AuxInt = 8
  8961  		v1.AddArg(ptr)
  8962  		v2 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8963  		v2.AuxInt = 0
  8964  		v1.AddArg(v2)
  8965  		v3 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  8966  		v3.AuxInt = 4
  8967  		v3.AddArg(ptr)
  8968  		v4 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8969  		v4.AuxInt = 0
  8970  		v3.AddArg(v4)
  8971  		v5 := b.NewValue0(v.Line, OpMIPSMOVWstore, TypeMem)
  8972  		v5.AuxInt = 0
  8973  		v5.AddArg(ptr)
  8974  		v6 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8975  		v6.AuxInt = 0
  8976  		v5.AddArg(v6)
  8977  		v5.AddArg(mem)
  8978  		v3.AddArg(v5)
  8979  		v1.AddArg(v3)
  8980  		v.AddArg(v1)
  8981  		return true
  8982  	}
  8983  	// match: (Zero [s] ptr mem)
  8984  	// cond: (SizeAndAlign(s).Size() > 16  || SizeAndAlign(s).Align()%4 != 0)
  8985  	// result: (LoweredZero [SizeAndAlign(s).Align()] 		ptr 		(ADDconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) 		mem)
  8986  	for {
  8987  		s := v.AuxInt
  8988  		ptr := v.Args[0]
  8989  		mem := v.Args[1]
  8990  		if !(SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0) {
  8991  			break
  8992  		}
  8993  		v.reset(OpMIPSLoweredZero)
  8994  		v.AuxInt = SizeAndAlign(s).Align()
  8995  		v.AddArg(ptr)
  8996  		v0 := b.NewValue0(v.Line, OpMIPSADDconst, ptr.Type)
  8997  		v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config)
  8998  		v0.AddArg(ptr)
  8999  		v.AddArg(v0)
  9000  		v.AddArg(mem)
  9001  		return true
  9002  	}
  9003  	return false
  9004  }
  9005  func rewriteValueMIPS_OpZeroExt16to32(v *Value, config *Config) bool {
  9006  	b := v.Block
  9007  	_ = b
  9008  	// match: (ZeroExt16to32 x)
  9009  	// cond:
  9010  	// result: (MOVHUreg x)
  9011  	for {
  9012  		x := v.Args[0]
  9013  		v.reset(OpMIPSMOVHUreg)
  9014  		v.AddArg(x)
  9015  		return true
  9016  	}
  9017  }
  9018  func rewriteValueMIPS_OpZeroExt8to16(v *Value, config *Config) bool {
  9019  	b := v.Block
  9020  	_ = b
  9021  	// match: (ZeroExt8to16 x)
  9022  	// cond:
  9023  	// result: (MOVBUreg x)
  9024  	for {
  9025  		x := v.Args[0]
  9026  		v.reset(OpMIPSMOVBUreg)
  9027  		v.AddArg(x)
  9028  		return true
  9029  	}
  9030  }
  9031  func rewriteValueMIPS_OpZeroExt8to32(v *Value, config *Config) bool {
  9032  	b := v.Block
  9033  	_ = b
  9034  	// match: (ZeroExt8to32 x)
  9035  	// cond:
  9036  	// result: (MOVBUreg x)
  9037  	for {
  9038  		x := v.Args[0]
  9039  		v.reset(OpMIPSMOVBUreg)
  9040  		v.AddArg(x)
  9041  		return true
  9042  	}
  9043  }
  9044  func rewriteValueMIPS_OpZeromask(v *Value, config *Config) bool {
  9045  	b := v.Block
  9046  	_ = b
  9047  	// match: (Zeromask x)
  9048  	// cond:
  9049  	// result: (NEG (SGTU x (MOVWconst [0])))
  9050  	for {
  9051  		x := v.Args[0]
  9052  		v.reset(OpMIPSNEG)
  9053  		v0 := b.NewValue0(v.Line, OpMIPSSGTU, config.fe.TypeBool())
  9054  		v0.AddArg(x)
  9055  		v1 := b.NewValue0(v.Line, OpMIPSMOVWconst, config.fe.TypeUInt32())
  9056  		v1.AuxInt = 0
  9057  		v0.AddArg(v1)
  9058  		v.AddArg(v0)
  9059  		return true
  9060  	}
  9061  }
  9062  func rewriteBlockMIPS(b *Block, config *Config) bool {
  9063  	switch b.Kind {
  9064  	case BlockMIPSEQ:
  9065  		// match: (EQ (FPFlagTrue cmp) yes no)
  9066  		// cond:
  9067  		// result: (FPF cmp yes no)
  9068  		for {
  9069  			v := b.Control
  9070  			if v.Op != OpMIPSFPFlagTrue {
  9071  				break
  9072  			}
  9073  			cmp := v.Args[0]
  9074  			yes := b.Succs[0]
  9075  			no := b.Succs[1]
  9076  			b.Kind = BlockMIPSFPF
  9077  			b.SetControl(cmp)
  9078  			_ = yes
  9079  			_ = no
  9080  			return true
  9081  		}
  9082  		// match: (EQ (FPFlagFalse cmp) yes no)
  9083  		// cond:
  9084  		// result: (FPT cmp yes no)
  9085  		for {
  9086  			v := b.Control
  9087  			if v.Op != OpMIPSFPFlagFalse {
  9088  				break
  9089  			}
  9090  			cmp := v.Args[0]
  9091  			yes := b.Succs[0]
  9092  			no := b.Succs[1]
  9093  			b.Kind = BlockMIPSFPT
  9094  			b.SetControl(cmp)
  9095  			_ = yes
  9096  			_ = no
  9097  			return true
  9098  		}
  9099  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  9100  		// cond:
  9101  		// result: (NE cmp yes no)
  9102  		for {
  9103  			v := b.Control
  9104  			if v.Op != OpMIPSXORconst {
  9105  				break
  9106  			}
  9107  			if v.AuxInt != 1 {
  9108  				break
  9109  			}
  9110  			cmp := v.Args[0]
  9111  			if cmp.Op != OpMIPSSGT {
  9112  				break
  9113  			}
  9114  			yes := b.Succs[0]
  9115  			no := b.Succs[1]
  9116  			b.Kind = BlockMIPSNE
  9117  			b.SetControl(cmp)
  9118  			_ = yes
  9119  			_ = no
  9120  			return true
  9121  		}
  9122  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  9123  		// cond:
  9124  		// result: (NE cmp yes no)
  9125  		for {
  9126  			v := b.Control
  9127  			if v.Op != OpMIPSXORconst {
  9128  				break
  9129  			}
  9130  			if v.AuxInt != 1 {
  9131  				break
  9132  			}
  9133  			cmp := v.Args[0]
  9134  			if cmp.Op != OpMIPSSGTU {
  9135  				break
  9136  			}
  9137  			yes := b.Succs[0]
  9138  			no := b.Succs[1]
  9139  			b.Kind = BlockMIPSNE
  9140  			b.SetControl(cmp)
  9141  			_ = yes
  9142  			_ = no
  9143  			return true
  9144  		}
  9145  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  9146  		// cond:
  9147  		// result: (NE cmp yes no)
  9148  		for {
  9149  			v := b.Control
  9150  			if v.Op != OpMIPSXORconst {
  9151  				break
  9152  			}
  9153  			if v.AuxInt != 1 {
  9154  				break
  9155  			}
  9156  			cmp := v.Args[0]
  9157  			if cmp.Op != OpMIPSSGTconst {
  9158  				break
  9159  			}
  9160  			yes := b.Succs[0]
  9161  			no := b.Succs[1]
  9162  			b.Kind = BlockMIPSNE
  9163  			b.SetControl(cmp)
  9164  			_ = yes
  9165  			_ = no
  9166  			return true
  9167  		}
  9168  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  9169  		// cond:
  9170  		// result: (NE cmp yes no)
  9171  		for {
  9172  			v := b.Control
  9173  			if v.Op != OpMIPSXORconst {
  9174  				break
  9175  			}
  9176  			if v.AuxInt != 1 {
  9177  				break
  9178  			}
  9179  			cmp := v.Args[0]
  9180  			if cmp.Op != OpMIPSSGTUconst {
  9181  				break
  9182  			}
  9183  			yes := b.Succs[0]
  9184  			no := b.Succs[1]
  9185  			b.Kind = BlockMIPSNE
  9186  			b.SetControl(cmp)
  9187  			_ = yes
  9188  			_ = no
  9189  			return true
  9190  		}
  9191  		// match: (EQ (XORconst [1] cmp:(SGTzero _)) yes no)
  9192  		// cond:
  9193  		// result: (NE cmp yes no)
  9194  		for {
  9195  			v := b.Control
  9196  			if v.Op != OpMIPSXORconst {
  9197  				break
  9198  			}
  9199  			if v.AuxInt != 1 {
  9200  				break
  9201  			}
  9202  			cmp := v.Args[0]
  9203  			if cmp.Op != OpMIPSSGTzero {
  9204  				break
  9205  			}
  9206  			yes := b.Succs[0]
  9207  			no := b.Succs[1]
  9208  			b.Kind = BlockMIPSNE
  9209  			b.SetControl(cmp)
  9210  			_ = yes
  9211  			_ = no
  9212  			return true
  9213  		}
  9214  		// match: (EQ (XORconst [1] cmp:(SGTUzero _)) yes no)
  9215  		// cond:
  9216  		// result: (NE cmp yes no)
  9217  		for {
  9218  			v := b.Control
  9219  			if v.Op != OpMIPSXORconst {
  9220  				break
  9221  			}
  9222  			if v.AuxInt != 1 {
  9223  				break
  9224  			}
  9225  			cmp := v.Args[0]
  9226  			if cmp.Op != OpMIPSSGTUzero {
  9227  				break
  9228  			}
  9229  			yes := b.Succs[0]
  9230  			no := b.Succs[1]
  9231  			b.Kind = BlockMIPSNE
  9232  			b.SetControl(cmp)
  9233  			_ = yes
  9234  			_ = no
  9235  			return true
  9236  		}
  9237  		// match: (EQ (SGTUconst [1] x) yes no)
  9238  		// cond:
  9239  		// result: (NE x yes no)
  9240  		for {
  9241  			v := b.Control
  9242  			if v.Op != OpMIPSSGTUconst {
  9243  				break
  9244  			}
  9245  			if v.AuxInt != 1 {
  9246  				break
  9247  			}
  9248  			x := v.Args[0]
  9249  			yes := b.Succs[0]
  9250  			no := b.Succs[1]
  9251  			b.Kind = BlockMIPSNE
  9252  			b.SetControl(x)
  9253  			_ = yes
  9254  			_ = no
  9255  			return true
  9256  		}
  9257  		// match: (EQ (SGTUzero x) yes no)
  9258  		// cond:
  9259  		// result: (EQ x yes no)
  9260  		for {
  9261  			v := b.Control
  9262  			if v.Op != OpMIPSSGTUzero {
  9263  				break
  9264  			}
  9265  			x := v.Args[0]
  9266  			yes := b.Succs[0]
  9267  			no := b.Succs[1]
  9268  			b.Kind = BlockMIPSEQ
  9269  			b.SetControl(x)
  9270  			_ = yes
  9271  			_ = no
  9272  			return true
  9273  		}
  9274  		// match: (EQ (SGTconst [0] x) yes no)
  9275  		// cond:
  9276  		// result: (GEZ x yes no)
  9277  		for {
  9278  			v := b.Control
  9279  			if v.Op != OpMIPSSGTconst {
  9280  				break
  9281  			}
  9282  			if v.AuxInt != 0 {
  9283  				break
  9284  			}
  9285  			x := v.Args[0]
  9286  			yes := b.Succs[0]
  9287  			no := b.Succs[1]
  9288  			b.Kind = BlockMIPSGEZ
  9289  			b.SetControl(x)
  9290  			_ = yes
  9291  			_ = no
  9292  			return true
  9293  		}
  9294  		// match: (EQ (SGTzero x) yes no)
  9295  		// cond:
  9296  		// result: (LEZ x yes no)
  9297  		for {
  9298  			v := b.Control
  9299  			if v.Op != OpMIPSSGTzero {
  9300  				break
  9301  			}
  9302  			x := v.Args[0]
  9303  			yes := b.Succs[0]
  9304  			no := b.Succs[1]
  9305  			b.Kind = BlockMIPSLEZ
  9306  			b.SetControl(x)
  9307  			_ = yes
  9308  			_ = no
  9309  			return true
  9310  		}
  9311  		// match: (EQ  (MOVWconst [0]) yes no)
  9312  		// cond:
  9313  		// result: (First nil yes no)
  9314  		for {
  9315  			v := b.Control
  9316  			if v.Op != OpMIPSMOVWconst {
  9317  				break
  9318  			}
  9319  			if v.AuxInt != 0 {
  9320  				break
  9321  			}
  9322  			yes := b.Succs[0]
  9323  			no := b.Succs[1]
  9324  			b.Kind = BlockFirst
  9325  			b.SetControl(nil)
  9326  			_ = yes
  9327  			_ = no
  9328  			return true
  9329  		}
  9330  		// match: (EQ  (MOVWconst [c]) yes no)
  9331  		// cond: c != 0
  9332  		// result: (First nil no yes)
  9333  		for {
  9334  			v := b.Control
  9335  			if v.Op != OpMIPSMOVWconst {
  9336  				break
  9337  			}
  9338  			c := v.AuxInt
  9339  			yes := b.Succs[0]
  9340  			no := b.Succs[1]
  9341  			if !(c != 0) {
  9342  				break
  9343  			}
  9344  			b.Kind = BlockFirst
  9345  			b.SetControl(nil)
  9346  			b.swapSuccessors()
  9347  			_ = no
  9348  			_ = yes
  9349  			return true
  9350  		}
  9351  	case BlockMIPSGEZ:
  9352  		// match: (GEZ (MOVWconst [c]) yes no)
  9353  		// cond: int32(c) >= 0
  9354  		// result: (First nil yes no)
  9355  		for {
  9356  			v := b.Control
  9357  			if v.Op != OpMIPSMOVWconst {
  9358  				break
  9359  			}
  9360  			c := v.AuxInt
  9361  			yes := b.Succs[0]
  9362  			no := b.Succs[1]
  9363  			if !(int32(c) >= 0) {
  9364  				break
  9365  			}
  9366  			b.Kind = BlockFirst
  9367  			b.SetControl(nil)
  9368  			_ = yes
  9369  			_ = no
  9370  			return true
  9371  		}
  9372  		// match: (GEZ (MOVWconst [c]) yes no)
  9373  		// cond: int32(c) <  0
  9374  		// result: (First nil no yes)
  9375  		for {
  9376  			v := b.Control
  9377  			if v.Op != OpMIPSMOVWconst {
  9378  				break
  9379  			}
  9380  			c := v.AuxInt
  9381  			yes := b.Succs[0]
  9382  			no := b.Succs[1]
  9383  			if !(int32(c) < 0) {
  9384  				break
  9385  			}
  9386  			b.Kind = BlockFirst
  9387  			b.SetControl(nil)
  9388  			b.swapSuccessors()
  9389  			_ = no
  9390  			_ = yes
  9391  			return true
  9392  		}
  9393  	case BlockMIPSGTZ:
  9394  		// match: (GTZ (MOVWconst [c]) yes no)
  9395  		// cond: int32(c) >  0
  9396  		// result: (First nil yes no)
  9397  		for {
  9398  			v := b.Control
  9399  			if v.Op != OpMIPSMOVWconst {
  9400  				break
  9401  			}
  9402  			c := v.AuxInt
  9403  			yes := b.Succs[0]
  9404  			no := b.Succs[1]
  9405  			if !(int32(c) > 0) {
  9406  				break
  9407  			}
  9408  			b.Kind = BlockFirst
  9409  			b.SetControl(nil)
  9410  			_ = yes
  9411  			_ = no
  9412  			return true
  9413  		}
  9414  		// match: (GTZ (MOVWconst [c]) yes no)
  9415  		// cond: int32(c) <= 0
  9416  		// result: (First nil no yes)
  9417  		for {
  9418  			v := b.Control
  9419  			if v.Op != OpMIPSMOVWconst {
  9420  				break
  9421  			}
  9422  			c := v.AuxInt
  9423  			yes := b.Succs[0]
  9424  			no := b.Succs[1]
  9425  			if !(int32(c) <= 0) {
  9426  				break
  9427  			}
  9428  			b.Kind = BlockFirst
  9429  			b.SetControl(nil)
  9430  			b.swapSuccessors()
  9431  			_ = no
  9432  			_ = yes
  9433  			return true
  9434  		}
  9435  	case BlockIf:
  9436  		// match: (If cond yes no)
  9437  		// cond:
  9438  		// result: (NE cond yes no)
  9439  		for {
  9440  			v := b.Control
  9441  			_ = v
  9442  			cond := b.Control
  9443  			yes := b.Succs[0]
  9444  			no := b.Succs[1]
  9445  			b.Kind = BlockMIPSNE
  9446  			b.SetControl(cond)
  9447  			_ = yes
  9448  			_ = no
  9449  			return true
  9450  		}
  9451  	case BlockMIPSLEZ:
  9452  		// match: (LEZ (MOVWconst [c]) yes no)
  9453  		// cond: int32(c) <= 0
  9454  		// result: (First nil yes no)
  9455  		for {
  9456  			v := b.Control
  9457  			if v.Op != OpMIPSMOVWconst {
  9458  				break
  9459  			}
  9460  			c := v.AuxInt
  9461  			yes := b.Succs[0]
  9462  			no := b.Succs[1]
  9463  			if !(int32(c) <= 0) {
  9464  				break
  9465  			}
  9466  			b.Kind = BlockFirst
  9467  			b.SetControl(nil)
  9468  			_ = yes
  9469  			_ = no
  9470  			return true
  9471  		}
  9472  		// match: (LEZ (MOVWconst [c]) yes no)
  9473  		// cond: int32(c) >  0
  9474  		// result: (First nil no yes)
  9475  		for {
  9476  			v := b.Control
  9477  			if v.Op != OpMIPSMOVWconst {
  9478  				break
  9479  			}
  9480  			c := v.AuxInt
  9481  			yes := b.Succs[0]
  9482  			no := b.Succs[1]
  9483  			if !(int32(c) > 0) {
  9484  				break
  9485  			}
  9486  			b.Kind = BlockFirst
  9487  			b.SetControl(nil)
  9488  			b.swapSuccessors()
  9489  			_ = no
  9490  			_ = yes
  9491  			return true
  9492  		}
  9493  	case BlockMIPSLTZ:
  9494  		// match: (LTZ (MOVWconst [c]) yes no)
  9495  		// cond: int32(c) <  0
  9496  		// result: (First nil yes no)
  9497  		for {
  9498  			v := b.Control
  9499  			if v.Op != OpMIPSMOVWconst {
  9500  				break
  9501  			}
  9502  			c := v.AuxInt
  9503  			yes := b.Succs[0]
  9504  			no := b.Succs[1]
  9505  			if !(int32(c) < 0) {
  9506  				break
  9507  			}
  9508  			b.Kind = BlockFirst
  9509  			b.SetControl(nil)
  9510  			_ = yes
  9511  			_ = no
  9512  			return true
  9513  		}
  9514  		// match: (LTZ (MOVWconst [c]) yes no)
  9515  		// cond: int32(c) >= 0
  9516  		// result: (First nil no yes)
  9517  		for {
  9518  			v := b.Control
  9519  			if v.Op != OpMIPSMOVWconst {
  9520  				break
  9521  			}
  9522  			c := v.AuxInt
  9523  			yes := b.Succs[0]
  9524  			no := b.Succs[1]
  9525  			if !(int32(c) >= 0) {
  9526  				break
  9527  			}
  9528  			b.Kind = BlockFirst
  9529  			b.SetControl(nil)
  9530  			b.swapSuccessors()
  9531  			_ = no
  9532  			_ = yes
  9533  			return true
  9534  		}
  9535  	case BlockMIPSNE:
  9536  		// match: (NE (FPFlagTrue cmp) yes no)
  9537  		// cond:
  9538  		// result: (FPT cmp yes no)
  9539  		for {
  9540  			v := b.Control
  9541  			if v.Op != OpMIPSFPFlagTrue {
  9542  				break
  9543  			}
  9544  			cmp := v.Args[0]
  9545  			yes := b.Succs[0]
  9546  			no := b.Succs[1]
  9547  			b.Kind = BlockMIPSFPT
  9548  			b.SetControl(cmp)
  9549  			_ = yes
  9550  			_ = no
  9551  			return true
  9552  		}
  9553  		// match: (NE (FPFlagFalse cmp) yes no)
  9554  		// cond:
  9555  		// result: (FPF cmp yes no)
  9556  		for {
  9557  			v := b.Control
  9558  			if v.Op != OpMIPSFPFlagFalse {
  9559  				break
  9560  			}
  9561  			cmp := v.Args[0]
  9562  			yes := b.Succs[0]
  9563  			no := b.Succs[1]
  9564  			b.Kind = BlockMIPSFPF
  9565  			b.SetControl(cmp)
  9566  			_ = yes
  9567  			_ = no
  9568  			return true
  9569  		}
  9570  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
  9571  		// cond:
  9572  		// result: (EQ cmp yes no)
  9573  		for {
  9574  			v := b.Control
  9575  			if v.Op != OpMIPSXORconst {
  9576  				break
  9577  			}
  9578  			if v.AuxInt != 1 {
  9579  				break
  9580  			}
  9581  			cmp := v.Args[0]
  9582  			if cmp.Op != OpMIPSSGT {
  9583  				break
  9584  			}
  9585  			yes := b.Succs[0]
  9586  			no := b.Succs[1]
  9587  			b.Kind = BlockMIPSEQ
  9588  			b.SetControl(cmp)
  9589  			_ = yes
  9590  			_ = no
  9591  			return true
  9592  		}
  9593  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
  9594  		// cond:
  9595  		// result: (EQ cmp yes no)
  9596  		for {
  9597  			v := b.Control
  9598  			if v.Op != OpMIPSXORconst {
  9599  				break
  9600  			}
  9601  			if v.AuxInt != 1 {
  9602  				break
  9603  			}
  9604  			cmp := v.Args[0]
  9605  			if cmp.Op != OpMIPSSGTU {
  9606  				break
  9607  			}
  9608  			yes := b.Succs[0]
  9609  			no := b.Succs[1]
  9610  			b.Kind = BlockMIPSEQ
  9611  			b.SetControl(cmp)
  9612  			_ = yes
  9613  			_ = no
  9614  			return true
  9615  		}
  9616  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
  9617  		// cond:
  9618  		// result: (EQ cmp yes no)
  9619  		for {
  9620  			v := b.Control
  9621  			if v.Op != OpMIPSXORconst {
  9622  				break
  9623  			}
  9624  			if v.AuxInt != 1 {
  9625  				break
  9626  			}
  9627  			cmp := v.Args[0]
  9628  			if cmp.Op != OpMIPSSGTconst {
  9629  				break
  9630  			}
  9631  			yes := b.Succs[0]
  9632  			no := b.Succs[1]
  9633  			b.Kind = BlockMIPSEQ
  9634  			b.SetControl(cmp)
  9635  			_ = yes
  9636  			_ = no
  9637  			return true
  9638  		}
  9639  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
  9640  		// cond:
  9641  		// result: (EQ cmp yes no)
  9642  		for {
  9643  			v := b.Control
  9644  			if v.Op != OpMIPSXORconst {
  9645  				break
  9646  			}
  9647  			if v.AuxInt != 1 {
  9648  				break
  9649  			}
  9650  			cmp := v.Args[0]
  9651  			if cmp.Op != OpMIPSSGTUconst {
  9652  				break
  9653  			}
  9654  			yes := b.Succs[0]
  9655  			no := b.Succs[1]
  9656  			b.Kind = BlockMIPSEQ
  9657  			b.SetControl(cmp)
  9658  			_ = yes
  9659  			_ = no
  9660  			return true
  9661  		}
  9662  		// match: (NE (XORconst [1] cmp:(SGTzero _)) yes no)
  9663  		// cond:
  9664  		// result: (EQ cmp yes no)
  9665  		for {
  9666  			v := b.Control
  9667  			if v.Op != OpMIPSXORconst {
  9668  				break
  9669  			}
  9670  			if v.AuxInt != 1 {
  9671  				break
  9672  			}
  9673  			cmp := v.Args[0]
  9674  			if cmp.Op != OpMIPSSGTzero {
  9675  				break
  9676  			}
  9677  			yes := b.Succs[0]
  9678  			no := b.Succs[1]
  9679  			b.Kind = BlockMIPSEQ
  9680  			b.SetControl(cmp)
  9681  			_ = yes
  9682  			_ = no
  9683  			return true
  9684  		}
  9685  		// match: (NE (XORconst [1] cmp:(SGTUzero _)) yes no)
  9686  		// cond:
  9687  		// result: (EQ cmp yes no)
  9688  		for {
  9689  			v := b.Control
  9690  			if v.Op != OpMIPSXORconst {
  9691  				break
  9692  			}
  9693  			if v.AuxInt != 1 {
  9694  				break
  9695  			}
  9696  			cmp := v.Args[0]
  9697  			if cmp.Op != OpMIPSSGTUzero {
  9698  				break
  9699  			}
  9700  			yes := b.Succs[0]
  9701  			no := b.Succs[1]
  9702  			b.Kind = BlockMIPSEQ
  9703  			b.SetControl(cmp)
  9704  			_ = yes
  9705  			_ = no
  9706  			return true
  9707  		}
  9708  		// match: (NE (SGTUconst [1] x) yes no)
  9709  		// cond:
  9710  		// result: (EQ x yes no)
  9711  		for {
  9712  			v := b.Control
  9713  			if v.Op != OpMIPSSGTUconst {
  9714  				break
  9715  			}
  9716  			if v.AuxInt != 1 {
  9717  				break
  9718  			}
  9719  			x := v.Args[0]
  9720  			yes := b.Succs[0]
  9721  			no := b.Succs[1]
  9722  			b.Kind = BlockMIPSEQ
  9723  			b.SetControl(x)
  9724  			_ = yes
  9725  			_ = no
  9726  			return true
  9727  		}
  9728  		// match: (NE (SGTUzero x) yes no)
  9729  		// cond:
  9730  		// result: (NE x yes no)
  9731  		for {
  9732  			v := b.Control
  9733  			if v.Op != OpMIPSSGTUzero {
  9734  				break
  9735  			}
  9736  			x := v.Args[0]
  9737  			yes := b.Succs[0]
  9738  			no := b.Succs[1]
  9739  			b.Kind = BlockMIPSNE
  9740  			b.SetControl(x)
  9741  			_ = yes
  9742  			_ = no
  9743  			return true
  9744  		}
  9745  		// match: (NE (SGTconst [0] x) yes no)
  9746  		// cond:
  9747  		// result: (LTZ x yes no)
  9748  		for {
  9749  			v := b.Control
  9750  			if v.Op != OpMIPSSGTconst {
  9751  				break
  9752  			}
  9753  			if v.AuxInt != 0 {
  9754  				break
  9755  			}
  9756  			x := v.Args[0]
  9757  			yes := b.Succs[0]
  9758  			no := b.Succs[1]
  9759  			b.Kind = BlockMIPSLTZ
  9760  			b.SetControl(x)
  9761  			_ = yes
  9762  			_ = no
  9763  			return true
  9764  		}
  9765  		// match: (NE (SGTzero x) yes no)
  9766  		// cond:
  9767  		// result: (GTZ x yes no)
  9768  		for {
  9769  			v := b.Control
  9770  			if v.Op != OpMIPSSGTzero {
  9771  				break
  9772  			}
  9773  			x := v.Args[0]
  9774  			yes := b.Succs[0]
  9775  			no := b.Succs[1]
  9776  			b.Kind = BlockMIPSGTZ
  9777  			b.SetControl(x)
  9778  			_ = yes
  9779  			_ = no
  9780  			return true
  9781  		}
  9782  		// match: (NE  (MOVWconst [0]) yes no)
  9783  		// cond:
  9784  		// result: (First nil no yes)
  9785  		for {
  9786  			v := b.Control
  9787  			if v.Op != OpMIPSMOVWconst {
  9788  				break
  9789  			}
  9790  			if v.AuxInt != 0 {
  9791  				break
  9792  			}
  9793  			yes := b.Succs[0]
  9794  			no := b.Succs[1]
  9795  			b.Kind = BlockFirst
  9796  			b.SetControl(nil)
  9797  			b.swapSuccessors()
  9798  			_ = no
  9799  			_ = yes
  9800  			return true
  9801  		}
  9802  		// match: (NE  (MOVWconst [c]) yes no)
  9803  		// cond: c != 0
  9804  		// result: (First nil yes no)
  9805  		for {
  9806  			v := b.Control
  9807  			if v.Op != OpMIPSMOVWconst {
  9808  				break
  9809  			}
  9810  			c := v.AuxInt
  9811  			yes := b.Succs[0]
  9812  			no := b.Succs[1]
  9813  			if !(c != 0) {
  9814  				break
  9815  			}
  9816  			b.Kind = BlockFirst
  9817  			b.SetControl(nil)
  9818  			_ = yes
  9819  			_ = no
  9820  			return true
  9821  		}
  9822  	}
  9823  	return false
  9824  }