github.com/riscv/riscv-go@v0.0.0-20200123204226-124ebd6fcc8e/src/cmd/compile/internal/ssa/rewriteMIPS.go (about)

     1  // autogenerated from gen/MIPS.rules: do not edit!
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "math"
     7  
     8  var _ = math.MinInt8 // in case not otherwise used
     9  func rewriteValueMIPS(v *Value, config *Config) bool {
    10  	switch v.Op {
    11  	case OpAdd16:
    12  		return rewriteValueMIPS_OpAdd16(v, config)
    13  	case OpAdd32:
    14  		return rewriteValueMIPS_OpAdd32(v, config)
    15  	case OpAdd32F:
    16  		return rewriteValueMIPS_OpAdd32F(v, config)
    17  	case OpAdd32withcarry:
    18  		return rewriteValueMIPS_OpAdd32withcarry(v, config)
    19  	case OpAdd64F:
    20  		return rewriteValueMIPS_OpAdd64F(v, config)
    21  	case OpAdd8:
    22  		return rewriteValueMIPS_OpAdd8(v, config)
    23  	case OpAddPtr:
    24  		return rewriteValueMIPS_OpAddPtr(v, config)
    25  	case OpAddr:
    26  		return rewriteValueMIPS_OpAddr(v, config)
    27  	case OpAnd16:
    28  		return rewriteValueMIPS_OpAnd16(v, config)
    29  	case OpAnd32:
    30  		return rewriteValueMIPS_OpAnd32(v, config)
    31  	case OpAnd8:
    32  		return rewriteValueMIPS_OpAnd8(v, config)
    33  	case OpAndB:
    34  		return rewriteValueMIPS_OpAndB(v, config)
    35  	case OpAtomicAdd32:
    36  		return rewriteValueMIPS_OpAtomicAdd32(v, config)
    37  	case OpAtomicAnd8:
    38  		return rewriteValueMIPS_OpAtomicAnd8(v, config)
    39  	case OpAtomicCompareAndSwap32:
    40  		return rewriteValueMIPS_OpAtomicCompareAndSwap32(v, config)
    41  	case OpAtomicExchange32:
    42  		return rewriteValueMIPS_OpAtomicExchange32(v, config)
    43  	case OpAtomicLoad32:
    44  		return rewriteValueMIPS_OpAtomicLoad32(v, config)
    45  	case OpAtomicLoadPtr:
    46  		return rewriteValueMIPS_OpAtomicLoadPtr(v, config)
    47  	case OpAtomicOr8:
    48  		return rewriteValueMIPS_OpAtomicOr8(v, config)
    49  	case OpAtomicStore32:
    50  		return rewriteValueMIPS_OpAtomicStore32(v, config)
    51  	case OpAtomicStorePtrNoWB:
    52  		return rewriteValueMIPS_OpAtomicStorePtrNoWB(v, config)
    53  	case OpClosureCall:
    54  		return rewriteValueMIPS_OpClosureCall(v, config)
    55  	case OpCom16:
    56  		return rewriteValueMIPS_OpCom16(v, config)
    57  	case OpCom32:
    58  		return rewriteValueMIPS_OpCom32(v, config)
    59  	case OpCom8:
    60  		return rewriteValueMIPS_OpCom8(v, config)
    61  	case OpConst16:
    62  		return rewriteValueMIPS_OpConst16(v, config)
    63  	case OpConst32:
    64  		return rewriteValueMIPS_OpConst32(v, config)
    65  	case OpConst32F:
    66  		return rewriteValueMIPS_OpConst32F(v, config)
    67  	case OpConst64F:
    68  		return rewriteValueMIPS_OpConst64F(v, config)
    69  	case OpConst8:
    70  		return rewriteValueMIPS_OpConst8(v, config)
    71  	case OpConstBool:
    72  		return rewriteValueMIPS_OpConstBool(v, config)
    73  	case OpConstNil:
    74  		return rewriteValueMIPS_OpConstNil(v, config)
    75  	case OpConvert:
    76  		return rewriteValueMIPS_OpConvert(v, config)
    77  	case OpCtz32:
    78  		return rewriteValueMIPS_OpCtz32(v, config)
    79  	case OpCvt32Fto32:
    80  		return rewriteValueMIPS_OpCvt32Fto32(v, config)
    81  	case OpCvt32Fto64F:
    82  		return rewriteValueMIPS_OpCvt32Fto64F(v, config)
    83  	case OpCvt32to32F:
    84  		return rewriteValueMIPS_OpCvt32to32F(v, config)
    85  	case OpCvt32to64F:
    86  		return rewriteValueMIPS_OpCvt32to64F(v, config)
    87  	case OpCvt64Fto32:
    88  		return rewriteValueMIPS_OpCvt64Fto32(v, config)
    89  	case OpCvt64Fto32F:
    90  		return rewriteValueMIPS_OpCvt64Fto32F(v, config)
    91  	case OpDeferCall:
    92  		return rewriteValueMIPS_OpDeferCall(v, config)
    93  	case OpDiv16:
    94  		return rewriteValueMIPS_OpDiv16(v, config)
    95  	case OpDiv16u:
    96  		return rewriteValueMIPS_OpDiv16u(v, config)
    97  	case OpDiv32:
    98  		return rewriteValueMIPS_OpDiv32(v, config)
    99  	case OpDiv32F:
   100  		return rewriteValueMIPS_OpDiv32F(v, config)
   101  	case OpDiv32u:
   102  		return rewriteValueMIPS_OpDiv32u(v, config)
   103  	case OpDiv64F:
   104  		return rewriteValueMIPS_OpDiv64F(v, config)
   105  	case OpDiv8:
   106  		return rewriteValueMIPS_OpDiv8(v, config)
   107  	case OpDiv8u:
   108  		return rewriteValueMIPS_OpDiv8u(v, config)
   109  	case OpEq16:
   110  		return rewriteValueMIPS_OpEq16(v, config)
   111  	case OpEq32:
   112  		return rewriteValueMIPS_OpEq32(v, config)
   113  	case OpEq32F:
   114  		return rewriteValueMIPS_OpEq32F(v, config)
   115  	case OpEq64F:
   116  		return rewriteValueMIPS_OpEq64F(v, config)
   117  	case OpEq8:
   118  		return rewriteValueMIPS_OpEq8(v, config)
   119  	case OpEqB:
   120  		return rewriteValueMIPS_OpEqB(v, config)
   121  	case OpEqPtr:
   122  		return rewriteValueMIPS_OpEqPtr(v, config)
   123  	case OpGeq16:
   124  		return rewriteValueMIPS_OpGeq16(v, config)
   125  	case OpGeq16U:
   126  		return rewriteValueMIPS_OpGeq16U(v, config)
   127  	case OpGeq32:
   128  		return rewriteValueMIPS_OpGeq32(v, config)
   129  	case OpGeq32F:
   130  		return rewriteValueMIPS_OpGeq32F(v, config)
   131  	case OpGeq32U:
   132  		return rewriteValueMIPS_OpGeq32U(v, config)
   133  	case OpGeq64F:
   134  		return rewriteValueMIPS_OpGeq64F(v, config)
   135  	case OpGeq8:
   136  		return rewriteValueMIPS_OpGeq8(v, config)
   137  	case OpGeq8U:
   138  		return rewriteValueMIPS_OpGeq8U(v, config)
   139  	case OpGetClosurePtr:
   140  		return rewriteValueMIPS_OpGetClosurePtr(v, config)
   141  	case OpGoCall:
   142  		return rewriteValueMIPS_OpGoCall(v, config)
   143  	case OpGreater16:
   144  		return rewriteValueMIPS_OpGreater16(v, config)
   145  	case OpGreater16U:
   146  		return rewriteValueMIPS_OpGreater16U(v, config)
   147  	case OpGreater32:
   148  		return rewriteValueMIPS_OpGreater32(v, config)
   149  	case OpGreater32F:
   150  		return rewriteValueMIPS_OpGreater32F(v, config)
   151  	case OpGreater32U:
   152  		return rewriteValueMIPS_OpGreater32U(v, config)
   153  	case OpGreater64F:
   154  		return rewriteValueMIPS_OpGreater64F(v, config)
   155  	case OpGreater8:
   156  		return rewriteValueMIPS_OpGreater8(v, config)
   157  	case OpGreater8U:
   158  		return rewriteValueMIPS_OpGreater8U(v, config)
   159  	case OpHmul16:
   160  		return rewriteValueMIPS_OpHmul16(v, config)
   161  	case OpHmul16u:
   162  		return rewriteValueMIPS_OpHmul16u(v, config)
   163  	case OpHmul32:
   164  		return rewriteValueMIPS_OpHmul32(v, config)
   165  	case OpHmul32u:
   166  		return rewriteValueMIPS_OpHmul32u(v, config)
   167  	case OpHmul8:
   168  		return rewriteValueMIPS_OpHmul8(v, config)
   169  	case OpHmul8u:
   170  		return rewriteValueMIPS_OpHmul8u(v, config)
   171  	case OpInterCall:
   172  		return rewriteValueMIPS_OpInterCall(v, config)
   173  	case OpIsInBounds:
   174  		return rewriteValueMIPS_OpIsInBounds(v, config)
   175  	case OpIsNonNil:
   176  		return rewriteValueMIPS_OpIsNonNil(v, config)
   177  	case OpIsSliceInBounds:
   178  		return rewriteValueMIPS_OpIsSliceInBounds(v, config)
   179  	case OpLeq16:
   180  		return rewriteValueMIPS_OpLeq16(v, config)
   181  	case OpLeq16U:
   182  		return rewriteValueMIPS_OpLeq16U(v, config)
   183  	case OpLeq32:
   184  		return rewriteValueMIPS_OpLeq32(v, config)
   185  	case OpLeq32F:
   186  		return rewriteValueMIPS_OpLeq32F(v, config)
   187  	case OpLeq32U:
   188  		return rewriteValueMIPS_OpLeq32U(v, config)
   189  	case OpLeq64F:
   190  		return rewriteValueMIPS_OpLeq64F(v, config)
   191  	case OpLeq8:
   192  		return rewriteValueMIPS_OpLeq8(v, config)
   193  	case OpLeq8U:
   194  		return rewriteValueMIPS_OpLeq8U(v, config)
   195  	case OpLess16:
   196  		return rewriteValueMIPS_OpLess16(v, config)
   197  	case OpLess16U:
   198  		return rewriteValueMIPS_OpLess16U(v, config)
   199  	case OpLess32:
   200  		return rewriteValueMIPS_OpLess32(v, config)
   201  	case OpLess32F:
   202  		return rewriteValueMIPS_OpLess32F(v, config)
   203  	case OpLess32U:
   204  		return rewriteValueMIPS_OpLess32U(v, config)
   205  	case OpLess64F:
   206  		return rewriteValueMIPS_OpLess64F(v, config)
   207  	case OpLess8:
   208  		return rewriteValueMIPS_OpLess8(v, config)
   209  	case OpLess8U:
   210  		return rewriteValueMIPS_OpLess8U(v, config)
   211  	case OpLoad:
   212  		return rewriteValueMIPS_OpLoad(v, config)
   213  	case OpLsh16x16:
   214  		return rewriteValueMIPS_OpLsh16x16(v, config)
   215  	case OpLsh16x32:
   216  		return rewriteValueMIPS_OpLsh16x32(v, config)
   217  	case OpLsh16x64:
   218  		return rewriteValueMIPS_OpLsh16x64(v, config)
   219  	case OpLsh16x8:
   220  		return rewriteValueMIPS_OpLsh16x8(v, config)
   221  	case OpLsh32x16:
   222  		return rewriteValueMIPS_OpLsh32x16(v, config)
   223  	case OpLsh32x32:
   224  		return rewriteValueMIPS_OpLsh32x32(v, config)
   225  	case OpLsh32x64:
   226  		return rewriteValueMIPS_OpLsh32x64(v, config)
   227  	case OpLsh32x8:
   228  		return rewriteValueMIPS_OpLsh32x8(v, config)
   229  	case OpLsh8x16:
   230  		return rewriteValueMIPS_OpLsh8x16(v, config)
   231  	case OpLsh8x32:
   232  		return rewriteValueMIPS_OpLsh8x32(v, config)
   233  	case OpLsh8x64:
   234  		return rewriteValueMIPS_OpLsh8x64(v, config)
   235  	case OpLsh8x8:
   236  		return rewriteValueMIPS_OpLsh8x8(v, config)
   237  	case OpMIPSADD:
   238  		return rewriteValueMIPS_OpMIPSADD(v, config)
   239  	case OpMIPSADDconst:
   240  		return rewriteValueMIPS_OpMIPSADDconst(v, config)
   241  	case OpMIPSAND:
   242  		return rewriteValueMIPS_OpMIPSAND(v, config)
   243  	case OpMIPSANDconst:
   244  		return rewriteValueMIPS_OpMIPSANDconst(v, config)
   245  	case OpMIPSCMOVZ:
   246  		return rewriteValueMIPS_OpMIPSCMOVZ(v, config)
   247  	case OpMIPSCMOVZzero:
   248  		return rewriteValueMIPS_OpMIPSCMOVZzero(v, config)
   249  	case OpMIPSLoweredAtomicAdd:
   250  		return rewriteValueMIPS_OpMIPSLoweredAtomicAdd(v, config)
   251  	case OpMIPSLoweredAtomicStore:
   252  		return rewriteValueMIPS_OpMIPSLoweredAtomicStore(v, config)
   253  	case OpMIPSMOVBUload:
   254  		return rewriteValueMIPS_OpMIPSMOVBUload(v, config)
   255  	case OpMIPSMOVBUreg:
   256  		return rewriteValueMIPS_OpMIPSMOVBUreg(v, config)
   257  	case OpMIPSMOVBload:
   258  		return rewriteValueMIPS_OpMIPSMOVBload(v, config)
   259  	case OpMIPSMOVBreg:
   260  		return rewriteValueMIPS_OpMIPSMOVBreg(v, config)
   261  	case OpMIPSMOVBstore:
   262  		return rewriteValueMIPS_OpMIPSMOVBstore(v, config)
   263  	case OpMIPSMOVBstorezero:
   264  		return rewriteValueMIPS_OpMIPSMOVBstorezero(v, config)
   265  	case OpMIPSMOVDload:
   266  		return rewriteValueMIPS_OpMIPSMOVDload(v, config)
   267  	case OpMIPSMOVDstore:
   268  		return rewriteValueMIPS_OpMIPSMOVDstore(v, config)
   269  	case OpMIPSMOVFload:
   270  		return rewriteValueMIPS_OpMIPSMOVFload(v, config)
   271  	case OpMIPSMOVFstore:
   272  		return rewriteValueMIPS_OpMIPSMOVFstore(v, config)
   273  	case OpMIPSMOVHUload:
   274  		return rewriteValueMIPS_OpMIPSMOVHUload(v, config)
   275  	case OpMIPSMOVHUreg:
   276  		return rewriteValueMIPS_OpMIPSMOVHUreg(v, config)
   277  	case OpMIPSMOVHload:
   278  		return rewriteValueMIPS_OpMIPSMOVHload(v, config)
   279  	case OpMIPSMOVHreg:
   280  		return rewriteValueMIPS_OpMIPSMOVHreg(v, config)
   281  	case OpMIPSMOVHstore:
   282  		return rewriteValueMIPS_OpMIPSMOVHstore(v, config)
   283  	case OpMIPSMOVHstorezero:
   284  		return rewriteValueMIPS_OpMIPSMOVHstorezero(v, config)
   285  	case OpMIPSMOVWload:
   286  		return rewriteValueMIPS_OpMIPSMOVWload(v, config)
   287  	case OpMIPSMOVWreg:
   288  		return rewriteValueMIPS_OpMIPSMOVWreg(v, config)
   289  	case OpMIPSMOVWstore:
   290  		return rewriteValueMIPS_OpMIPSMOVWstore(v, config)
   291  	case OpMIPSMOVWstorezero:
   292  		return rewriteValueMIPS_OpMIPSMOVWstorezero(v, config)
   293  	case OpMIPSMUL:
   294  		return rewriteValueMIPS_OpMIPSMUL(v, config)
   295  	case OpMIPSNEG:
   296  		return rewriteValueMIPS_OpMIPSNEG(v, config)
   297  	case OpMIPSNOR:
   298  		return rewriteValueMIPS_OpMIPSNOR(v, config)
   299  	case OpMIPSNORconst:
   300  		return rewriteValueMIPS_OpMIPSNORconst(v, config)
   301  	case OpMIPSOR:
   302  		return rewriteValueMIPS_OpMIPSOR(v, config)
   303  	case OpMIPSORconst:
   304  		return rewriteValueMIPS_OpMIPSORconst(v, config)
   305  	case OpMIPSSGT:
   306  		return rewriteValueMIPS_OpMIPSSGT(v, config)
   307  	case OpMIPSSGTU:
   308  		return rewriteValueMIPS_OpMIPSSGTU(v, config)
   309  	case OpMIPSSGTUconst:
   310  		return rewriteValueMIPS_OpMIPSSGTUconst(v, config)
   311  	case OpMIPSSGTUzero:
   312  		return rewriteValueMIPS_OpMIPSSGTUzero(v, config)
   313  	case OpMIPSSGTconst:
   314  		return rewriteValueMIPS_OpMIPSSGTconst(v, config)
   315  	case OpMIPSSGTzero:
   316  		return rewriteValueMIPS_OpMIPSSGTzero(v, config)
   317  	case OpMIPSSLL:
   318  		return rewriteValueMIPS_OpMIPSSLL(v, config)
   319  	case OpMIPSSLLconst:
   320  		return rewriteValueMIPS_OpMIPSSLLconst(v, config)
   321  	case OpMIPSSRA:
   322  		return rewriteValueMIPS_OpMIPSSRA(v, config)
   323  	case OpMIPSSRAconst:
   324  		return rewriteValueMIPS_OpMIPSSRAconst(v, config)
   325  	case OpMIPSSRL:
   326  		return rewriteValueMIPS_OpMIPSSRL(v, config)
   327  	case OpMIPSSRLconst:
   328  		return rewriteValueMIPS_OpMIPSSRLconst(v, config)
   329  	case OpMIPSSUB:
   330  		return rewriteValueMIPS_OpMIPSSUB(v, config)
   331  	case OpMIPSSUBconst:
   332  		return rewriteValueMIPS_OpMIPSSUBconst(v, config)
   333  	case OpMIPSXOR:
   334  		return rewriteValueMIPS_OpMIPSXOR(v, config)
   335  	case OpMIPSXORconst:
   336  		return rewriteValueMIPS_OpMIPSXORconst(v, config)
   337  	case OpMod16:
   338  		return rewriteValueMIPS_OpMod16(v, config)
   339  	case OpMod16u:
   340  		return rewriteValueMIPS_OpMod16u(v, config)
   341  	case OpMod32:
   342  		return rewriteValueMIPS_OpMod32(v, config)
   343  	case OpMod32u:
   344  		return rewriteValueMIPS_OpMod32u(v, config)
   345  	case OpMod8:
   346  		return rewriteValueMIPS_OpMod8(v, config)
   347  	case OpMod8u:
   348  		return rewriteValueMIPS_OpMod8u(v, config)
   349  	case OpMove:
   350  		return rewriteValueMIPS_OpMove(v, config)
   351  	case OpMul16:
   352  		return rewriteValueMIPS_OpMul16(v, config)
   353  	case OpMul32:
   354  		return rewriteValueMIPS_OpMul32(v, config)
   355  	case OpMul32F:
   356  		return rewriteValueMIPS_OpMul32F(v, config)
   357  	case OpMul32uhilo:
   358  		return rewriteValueMIPS_OpMul32uhilo(v, config)
   359  	case OpMul64F:
   360  		return rewriteValueMIPS_OpMul64F(v, config)
   361  	case OpMul8:
   362  		return rewriteValueMIPS_OpMul8(v, config)
   363  	case OpNeg16:
   364  		return rewriteValueMIPS_OpNeg16(v, config)
   365  	case OpNeg32:
   366  		return rewriteValueMIPS_OpNeg32(v, config)
   367  	case OpNeg32F:
   368  		return rewriteValueMIPS_OpNeg32F(v, config)
   369  	case OpNeg64F:
   370  		return rewriteValueMIPS_OpNeg64F(v, config)
   371  	case OpNeg8:
   372  		return rewriteValueMIPS_OpNeg8(v, config)
   373  	case OpNeq16:
   374  		return rewriteValueMIPS_OpNeq16(v, config)
   375  	case OpNeq32:
   376  		return rewriteValueMIPS_OpNeq32(v, config)
   377  	case OpNeq32F:
   378  		return rewriteValueMIPS_OpNeq32F(v, config)
   379  	case OpNeq64F:
   380  		return rewriteValueMIPS_OpNeq64F(v, config)
   381  	case OpNeq8:
   382  		return rewriteValueMIPS_OpNeq8(v, config)
   383  	case OpNeqB:
   384  		return rewriteValueMIPS_OpNeqB(v, config)
   385  	case OpNeqPtr:
   386  		return rewriteValueMIPS_OpNeqPtr(v, config)
   387  	case OpNilCheck:
   388  		return rewriteValueMIPS_OpNilCheck(v, config)
   389  	case OpNot:
   390  		return rewriteValueMIPS_OpNot(v, config)
   391  	case OpOffPtr:
   392  		return rewriteValueMIPS_OpOffPtr(v, config)
   393  	case OpOr16:
   394  		return rewriteValueMIPS_OpOr16(v, config)
   395  	case OpOr32:
   396  		return rewriteValueMIPS_OpOr32(v, config)
   397  	case OpOr8:
   398  		return rewriteValueMIPS_OpOr8(v, config)
   399  	case OpOrB:
   400  		return rewriteValueMIPS_OpOrB(v, config)
   401  	case OpRsh16Ux16:
   402  		return rewriteValueMIPS_OpRsh16Ux16(v, config)
   403  	case OpRsh16Ux32:
   404  		return rewriteValueMIPS_OpRsh16Ux32(v, config)
   405  	case OpRsh16Ux64:
   406  		return rewriteValueMIPS_OpRsh16Ux64(v, config)
   407  	case OpRsh16Ux8:
   408  		return rewriteValueMIPS_OpRsh16Ux8(v, config)
   409  	case OpRsh16x16:
   410  		return rewriteValueMIPS_OpRsh16x16(v, config)
   411  	case OpRsh16x32:
   412  		return rewriteValueMIPS_OpRsh16x32(v, config)
   413  	case OpRsh16x64:
   414  		return rewriteValueMIPS_OpRsh16x64(v, config)
   415  	case OpRsh16x8:
   416  		return rewriteValueMIPS_OpRsh16x8(v, config)
   417  	case OpRsh32Ux16:
   418  		return rewriteValueMIPS_OpRsh32Ux16(v, config)
   419  	case OpRsh32Ux32:
   420  		return rewriteValueMIPS_OpRsh32Ux32(v, config)
   421  	case OpRsh32Ux64:
   422  		return rewriteValueMIPS_OpRsh32Ux64(v, config)
   423  	case OpRsh32Ux8:
   424  		return rewriteValueMIPS_OpRsh32Ux8(v, config)
   425  	case OpRsh32x16:
   426  		return rewriteValueMIPS_OpRsh32x16(v, config)
   427  	case OpRsh32x32:
   428  		return rewriteValueMIPS_OpRsh32x32(v, config)
   429  	case OpRsh32x64:
   430  		return rewriteValueMIPS_OpRsh32x64(v, config)
   431  	case OpRsh32x8:
   432  		return rewriteValueMIPS_OpRsh32x8(v, config)
   433  	case OpRsh8Ux16:
   434  		return rewriteValueMIPS_OpRsh8Ux16(v, config)
   435  	case OpRsh8Ux32:
   436  		return rewriteValueMIPS_OpRsh8Ux32(v, config)
   437  	case OpRsh8Ux64:
   438  		return rewriteValueMIPS_OpRsh8Ux64(v, config)
   439  	case OpRsh8Ux8:
   440  		return rewriteValueMIPS_OpRsh8Ux8(v, config)
   441  	case OpRsh8x16:
   442  		return rewriteValueMIPS_OpRsh8x16(v, config)
   443  	case OpRsh8x32:
   444  		return rewriteValueMIPS_OpRsh8x32(v, config)
   445  	case OpRsh8x64:
   446  		return rewriteValueMIPS_OpRsh8x64(v, config)
   447  	case OpRsh8x8:
   448  		return rewriteValueMIPS_OpRsh8x8(v, config)
   449  	case OpSelect0:
   450  		return rewriteValueMIPS_OpSelect0(v, config)
   451  	case OpSelect1:
   452  		return rewriteValueMIPS_OpSelect1(v, config)
   453  	case OpSignExt16to32:
   454  		return rewriteValueMIPS_OpSignExt16to32(v, config)
   455  	case OpSignExt8to16:
   456  		return rewriteValueMIPS_OpSignExt8to16(v, config)
   457  	case OpSignExt8to32:
   458  		return rewriteValueMIPS_OpSignExt8to32(v, config)
   459  	case OpSignmask:
   460  		return rewriteValueMIPS_OpSignmask(v, config)
   461  	case OpSlicemask:
   462  		return rewriteValueMIPS_OpSlicemask(v, config)
   463  	case OpSqrt:
   464  		return rewriteValueMIPS_OpSqrt(v, config)
   465  	case OpStaticCall:
   466  		return rewriteValueMIPS_OpStaticCall(v, config)
   467  	case OpStore:
   468  		return rewriteValueMIPS_OpStore(v, config)
   469  	case OpSub16:
   470  		return rewriteValueMIPS_OpSub16(v, config)
   471  	case OpSub32:
   472  		return rewriteValueMIPS_OpSub32(v, config)
   473  	case OpSub32F:
   474  		return rewriteValueMIPS_OpSub32F(v, config)
   475  	case OpSub32withcarry:
   476  		return rewriteValueMIPS_OpSub32withcarry(v, config)
   477  	case OpSub64F:
   478  		return rewriteValueMIPS_OpSub64F(v, config)
   479  	case OpSub8:
   480  		return rewriteValueMIPS_OpSub8(v, config)
   481  	case OpSubPtr:
   482  		return rewriteValueMIPS_OpSubPtr(v, config)
   483  	case OpTrunc16to8:
   484  		return rewriteValueMIPS_OpTrunc16to8(v, config)
   485  	case OpTrunc32to16:
   486  		return rewriteValueMIPS_OpTrunc32to16(v, config)
   487  	case OpTrunc32to8:
   488  		return rewriteValueMIPS_OpTrunc32to8(v, config)
   489  	case OpXor16:
   490  		return rewriteValueMIPS_OpXor16(v, config)
   491  	case OpXor32:
   492  		return rewriteValueMIPS_OpXor32(v, config)
   493  	case OpXor8:
   494  		return rewriteValueMIPS_OpXor8(v, config)
   495  	case OpZero:
   496  		return rewriteValueMIPS_OpZero(v, config)
   497  	case OpZeroExt16to32:
   498  		return rewriteValueMIPS_OpZeroExt16to32(v, config)
   499  	case OpZeroExt8to16:
   500  		return rewriteValueMIPS_OpZeroExt8to16(v, config)
   501  	case OpZeroExt8to32:
   502  		return rewriteValueMIPS_OpZeroExt8to32(v, config)
   503  	case OpZeromask:
   504  		return rewriteValueMIPS_OpZeromask(v, config)
   505  	}
   506  	return false
   507  }
   508  func rewriteValueMIPS_OpAdd16(v *Value, config *Config) bool {
   509  	b := v.Block
   510  	_ = b
   511  	// match: (Add16 x y)
   512  	// cond:
   513  	// result: (ADD x y)
   514  	for {
   515  		x := v.Args[0]
   516  		y := v.Args[1]
   517  		v.reset(OpMIPSADD)
   518  		v.AddArg(x)
   519  		v.AddArg(y)
   520  		return true
   521  	}
   522  }
   523  func rewriteValueMIPS_OpAdd32(v *Value, config *Config) bool {
   524  	b := v.Block
   525  	_ = b
   526  	// match: (Add32 x y)
   527  	// cond:
   528  	// result: (ADD x y)
   529  	for {
   530  		x := v.Args[0]
   531  		y := v.Args[1]
   532  		v.reset(OpMIPSADD)
   533  		v.AddArg(x)
   534  		v.AddArg(y)
   535  		return true
   536  	}
   537  }
   538  func rewriteValueMIPS_OpAdd32F(v *Value, config *Config) bool {
   539  	b := v.Block
   540  	_ = b
   541  	// match: (Add32F x y)
   542  	// cond:
   543  	// result: (ADDF x y)
   544  	for {
   545  		x := v.Args[0]
   546  		y := v.Args[1]
   547  		v.reset(OpMIPSADDF)
   548  		v.AddArg(x)
   549  		v.AddArg(y)
   550  		return true
   551  	}
   552  }
   553  func rewriteValueMIPS_OpAdd32withcarry(v *Value, config *Config) bool {
   554  	b := v.Block
   555  	_ = b
   556  	// match: (Add32withcarry <t> x y c)
   557  	// cond:
   558  	// result: (ADD c (ADD <t> x y))
   559  	for {
   560  		t := v.Type
   561  		x := v.Args[0]
   562  		y := v.Args[1]
   563  		c := v.Args[2]
   564  		v.reset(OpMIPSADD)
   565  		v.AddArg(c)
   566  		v0 := b.NewValue0(v.Pos, OpMIPSADD, t)
   567  		v0.AddArg(x)
   568  		v0.AddArg(y)
   569  		v.AddArg(v0)
   570  		return true
   571  	}
   572  }
   573  func rewriteValueMIPS_OpAdd64F(v *Value, config *Config) bool {
   574  	b := v.Block
   575  	_ = b
   576  	// match: (Add64F x y)
   577  	// cond:
   578  	// result: (ADDD x y)
   579  	for {
   580  		x := v.Args[0]
   581  		y := v.Args[1]
   582  		v.reset(OpMIPSADDD)
   583  		v.AddArg(x)
   584  		v.AddArg(y)
   585  		return true
   586  	}
   587  }
   588  func rewriteValueMIPS_OpAdd8(v *Value, config *Config) bool {
   589  	b := v.Block
   590  	_ = b
   591  	// match: (Add8 x y)
   592  	// cond:
   593  	// result: (ADD x y)
   594  	for {
   595  		x := v.Args[0]
   596  		y := v.Args[1]
   597  		v.reset(OpMIPSADD)
   598  		v.AddArg(x)
   599  		v.AddArg(y)
   600  		return true
   601  	}
   602  }
   603  func rewriteValueMIPS_OpAddPtr(v *Value, config *Config) bool {
   604  	b := v.Block
   605  	_ = b
   606  	// match: (AddPtr x y)
   607  	// cond:
   608  	// result: (ADD x y)
   609  	for {
   610  		x := v.Args[0]
   611  		y := v.Args[1]
   612  		v.reset(OpMIPSADD)
   613  		v.AddArg(x)
   614  		v.AddArg(y)
   615  		return true
   616  	}
   617  }
   618  func rewriteValueMIPS_OpAddr(v *Value, config *Config) bool {
   619  	b := v.Block
   620  	_ = b
   621  	// match: (Addr {sym} base)
   622  	// cond:
   623  	// result: (MOVWaddr {sym} base)
   624  	for {
   625  		sym := v.Aux
   626  		base := v.Args[0]
   627  		v.reset(OpMIPSMOVWaddr)
   628  		v.Aux = sym
   629  		v.AddArg(base)
   630  		return true
   631  	}
   632  }
   633  func rewriteValueMIPS_OpAnd16(v *Value, config *Config) bool {
   634  	b := v.Block
   635  	_ = b
   636  	// match: (And16 x y)
   637  	// cond:
   638  	// result: (AND x y)
   639  	for {
   640  		x := v.Args[0]
   641  		y := v.Args[1]
   642  		v.reset(OpMIPSAND)
   643  		v.AddArg(x)
   644  		v.AddArg(y)
   645  		return true
   646  	}
   647  }
   648  func rewriteValueMIPS_OpAnd32(v *Value, config *Config) bool {
   649  	b := v.Block
   650  	_ = b
   651  	// match: (And32 x y)
   652  	// cond:
   653  	// result: (AND x y)
   654  	for {
   655  		x := v.Args[0]
   656  		y := v.Args[1]
   657  		v.reset(OpMIPSAND)
   658  		v.AddArg(x)
   659  		v.AddArg(y)
   660  		return true
   661  	}
   662  }
   663  func rewriteValueMIPS_OpAnd8(v *Value, config *Config) bool {
   664  	b := v.Block
   665  	_ = b
   666  	// match: (And8 x y)
   667  	// cond:
   668  	// result: (AND x y)
   669  	for {
   670  		x := v.Args[0]
   671  		y := v.Args[1]
   672  		v.reset(OpMIPSAND)
   673  		v.AddArg(x)
   674  		v.AddArg(y)
   675  		return true
   676  	}
   677  }
   678  func rewriteValueMIPS_OpAndB(v *Value, config *Config) bool {
   679  	b := v.Block
   680  	_ = b
   681  	// match: (AndB x y)
   682  	// cond:
   683  	// result: (AND x y)
   684  	for {
   685  		x := v.Args[0]
   686  		y := v.Args[1]
   687  		v.reset(OpMIPSAND)
   688  		v.AddArg(x)
   689  		v.AddArg(y)
   690  		return true
   691  	}
   692  }
   693  func rewriteValueMIPS_OpAtomicAdd32(v *Value, config *Config) bool {
   694  	b := v.Block
   695  	_ = b
   696  	// match: (AtomicAdd32 ptr val mem)
   697  	// cond:
   698  	// result: (LoweredAtomicAdd ptr val mem)
   699  	for {
   700  		ptr := v.Args[0]
   701  		val := v.Args[1]
   702  		mem := v.Args[2]
   703  		v.reset(OpMIPSLoweredAtomicAdd)
   704  		v.AddArg(ptr)
   705  		v.AddArg(val)
   706  		v.AddArg(mem)
   707  		return true
   708  	}
   709  }
   710  func rewriteValueMIPS_OpAtomicAnd8(v *Value, config *Config) bool {
   711  	b := v.Block
   712  	_ = b
   713  	// match: (AtomicAnd8  ptr val mem)
   714  	// cond: !config.BigEndian
   715  	// result: (LoweredAtomicAnd (AND <config.fe.TypeUInt32().PtrTo()> (MOVWconst [^3]) ptr) 		(OR <config.fe.TypeUInt32()> (SLL <config.fe.TypeUInt32()> (ZeroExt8to32 val) 			(SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst  <config.fe.TypeUInt32()> [3] ptr))) 		(NORconst [0] <config.fe.TypeUInt32()> (SLL <config.fe.TypeUInt32()> 			(MOVWconst [0xff]) (SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst <config.fe.TypeUInt32()> [3] 					(XORconst <config.fe.TypeUInt32()> [3] ptr)))))) mem)
   716  	for {
   717  		ptr := v.Args[0]
   718  		val := v.Args[1]
   719  		mem := v.Args[2]
   720  		if !(!config.BigEndian) {
   721  			break
   722  		}
   723  		v.reset(OpMIPSLoweredAtomicAnd)
   724  		v0 := b.NewValue0(v.Pos, OpMIPSAND, config.fe.TypeUInt32().PtrTo())
   725  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
   726  		v1.AuxInt = ^3
   727  		v0.AddArg(v1)
   728  		v0.AddArg(ptr)
   729  		v.AddArg(v0)
   730  		v2 := b.NewValue0(v.Pos, OpMIPSOR, config.fe.TypeUInt32())
   731  		v3 := b.NewValue0(v.Pos, OpMIPSSLL, config.fe.TypeUInt32())
   732  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
   733  		v4.AddArg(val)
   734  		v3.AddArg(v4)
   735  		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
   736  		v5.AuxInt = 3
   737  		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, config.fe.TypeUInt32())
   738  		v6.AuxInt = 3
   739  		v6.AddArg(ptr)
   740  		v5.AddArg(v6)
   741  		v3.AddArg(v5)
   742  		v2.AddArg(v3)
   743  		v7 := b.NewValue0(v.Pos, OpMIPSNORconst, config.fe.TypeUInt32())
   744  		v7.AuxInt = 0
   745  		v8 := b.NewValue0(v.Pos, OpMIPSSLL, config.fe.TypeUInt32())
   746  		v9 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
   747  		v9.AuxInt = 0xff
   748  		v8.AddArg(v9)
   749  		v10 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
   750  		v10.AuxInt = 3
   751  		v11 := b.NewValue0(v.Pos, OpMIPSANDconst, config.fe.TypeUInt32())
   752  		v11.AuxInt = 3
   753  		v12 := b.NewValue0(v.Pos, OpMIPSXORconst, config.fe.TypeUInt32())
   754  		v12.AuxInt = 3
   755  		v12.AddArg(ptr)
   756  		v11.AddArg(v12)
   757  		v10.AddArg(v11)
   758  		v8.AddArg(v10)
   759  		v7.AddArg(v8)
   760  		v2.AddArg(v7)
   761  		v.AddArg(v2)
   762  		v.AddArg(mem)
   763  		return true
   764  	}
   765  	// match: (AtomicAnd8  ptr val mem)
   766  	// cond: config.BigEndian
   767  	// result: (LoweredAtomicAnd (AND <config.fe.TypeUInt32().PtrTo()> (MOVWconst [^3]) ptr) 		(OR <config.fe.TypeUInt32()> (SLL <config.fe.TypeUInt32()> (ZeroExt8to32 val) 			(SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst  <config.fe.TypeUInt32()> [3] 					(XORconst <config.fe.TypeUInt32()> [3] ptr)))) 		(NORconst [0] <config.fe.TypeUInt32()> (SLL <config.fe.TypeUInt32()> 			(MOVWconst [0xff]) (SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst <config.fe.TypeUInt32()> [3] 					(XORconst <config.fe.TypeUInt32()> [3] ptr)))))) mem)
   768  	for {
   769  		ptr := v.Args[0]
   770  		val := v.Args[1]
   771  		mem := v.Args[2]
   772  		if !(config.BigEndian) {
   773  			break
   774  		}
   775  		v.reset(OpMIPSLoweredAtomicAnd)
   776  		v0 := b.NewValue0(v.Pos, OpMIPSAND, config.fe.TypeUInt32().PtrTo())
   777  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
   778  		v1.AuxInt = ^3
   779  		v0.AddArg(v1)
   780  		v0.AddArg(ptr)
   781  		v.AddArg(v0)
   782  		v2 := b.NewValue0(v.Pos, OpMIPSOR, config.fe.TypeUInt32())
   783  		v3 := b.NewValue0(v.Pos, OpMIPSSLL, config.fe.TypeUInt32())
   784  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
   785  		v4.AddArg(val)
   786  		v3.AddArg(v4)
   787  		v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
   788  		v5.AuxInt = 3
   789  		v6 := b.NewValue0(v.Pos, OpMIPSANDconst, config.fe.TypeUInt32())
   790  		v6.AuxInt = 3
   791  		v7 := b.NewValue0(v.Pos, OpMIPSXORconst, config.fe.TypeUInt32())
   792  		v7.AuxInt = 3
   793  		v7.AddArg(ptr)
   794  		v6.AddArg(v7)
   795  		v5.AddArg(v6)
   796  		v3.AddArg(v5)
   797  		v2.AddArg(v3)
   798  		v8 := b.NewValue0(v.Pos, OpMIPSNORconst, config.fe.TypeUInt32())
   799  		v8.AuxInt = 0
   800  		v9 := b.NewValue0(v.Pos, OpMIPSSLL, config.fe.TypeUInt32())
   801  		v10 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
   802  		v10.AuxInt = 0xff
   803  		v9.AddArg(v10)
   804  		v11 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
   805  		v11.AuxInt = 3
   806  		v12 := b.NewValue0(v.Pos, OpMIPSANDconst, config.fe.TypeUInt32())
   807  		v12.AuxInt = 3
   808  		v13 := b.NewValue0(v.Pos, OpMIPSXORconst, config.fe.TypeUInt32())
   809  		v13.AuxInt = 3
   810  		v13.AddArg(ptr)
   811  		v12.AddArg(v13)
   812  		v11.AddArg(v12)
   813  		v9.AddArg(v11)
   814  		v8.AddArg(v9)
   815  		v2.AddArg(v8)
   816  		v.AddArg(v2)
   817  		v.AddArg(mem)
   818  		return true
   819  	}
   820  	return false
   821  }
   822  func rewriteValueMIPS_OpAtomicCompareAndSwap32(v *Value, config *Config) bool {
   823  	b := v.Block
   824  	_ = b
   825  	// match: (AtomicCompareAndSwap32 ptr old new_ mem)
   826  	// cond:
   827  	// result: (LoweredAtomicCas ptr old new_ mem)
   828  	for {
   829  		ptr := v.Args[0]
   830  		old := v.Args[1]
   831  		new_ := v.Args[2]
   832  		mem := v.Args[3]
   833  		v.reset(OpMIPSLoweredAtomicCas)
   834  		v.AddArg(ptr)
   835  		v.AddArg(old)
   836  		v.AddArg(new_)
   837  		v.AddArg(mem)
   838  		return true
   839  	}
   840  }
   841  func rewriteValueMIPS_OpAtomicExchange32(v *Value, config *Config) bool {
   842  	b := v.Block
   843  	_ = b
   844  	// match: (AtomicExchange32 ptr val mem)
   845  	// cond:
   846  	// result: (LoweredAtomicExchange ptr val mem)
   847  	for {
   848  		ptr := v.Args[0]
   849  		val := v.Args[1]
   850  		mem := v.Args[2]
   851  		v.reset(OpMIPSLoweredAtomicExchange)
   852  		v.AddArg(ptr)
   853  		v.AddArg(val)
   854  		v.AddArg(mem)
   855  		return true
   856  	}
   857  }
   858  func rewriteValueMIPS_OpAtomicLoad32(v *Value, config *Config) bool {
   859  	b := v.Block
   860  	_ = b
   861  	// match: (AtomicLoad32  ptr mem)
   862  	// cond:
   863  	// result: (LoweredAtomicLoad ptr mem)
   864  	for {
   865  		ptr := v.Args[0]
   866  		mem := v.Args[1]
   867  		v.reset(OpMIPSLoweredAtomicLoad)
   868  		v.AddArg(ptr)
   869  		v.AddArg(mem)
   870  		return true
   871  	}
   872  }
   873  func rewriteValueMIPS_OpAtomicLoadPtr(v *Value, config *Config) bool {
   874  	b := v.Block
   875  	_ = b
   876  	// match: (AtomicLoadPtr ptr mem)
   877  	// cond:
   878  	// result: (LoweredAtomicLoad  ptr mem)
   879  	for {
   880  		ptr := v.Args[0]
   881  		mem := v.Args[1]
   882  		v.reset(OpMIPSLoweredAtomicLoad)
   883  		v.AddArg(ptr)
   884  		v.AddArg(mem)
   885  		return true
   886  	}
   887  }
   888  func rewriteValueMIPS_OpAtomicOr8(v *Value, config *Config) bool {
   889  	b := v.Block
   890  	_ = b
   891  	// match: (AtomicOr8 ptr val mem)
   892  	// cond: !config.BigEndian
   893  	// result: (LoweredAtomicOr (AND <config.fe.TypeUInt32().PtrTo()> (MOVWconst [^3]) ptr) 		(SLL <config.fe.TypeUInt32()> (ZeroExt8to32 val) 			(SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst <config.fe.TypeUInt32()> [3] ptr))) mem)
   894  	for {
   895  		ptr := v.Args[0]
   896  		val := v.Args[1]
   897  		mem := v.Args[2]
   898  		if !(!config.BigEndian) {
   899  			break
   900  		}
   901  		v.reset(OpMIPSLoweredAtomicOr)
   902  		v0 := b.NewValue0(v.Pos, OpMIPSAND, config.fe.TypeUInt32().PtrTo())
   903  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
   904  		v1.AuxInt = ^3
   905  		v0.AddArg(v1)
   906  		v0.AddArg(ptr)
   907  		v.AddArg(v0)
   908  		v2 := b.NewValue0(v.Pos, OpMIPSSLL, config.fe.TypeUInt32())
   909  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
   910  		v3.AddArg(val)
   911  		v2.AddArg(v3)
   912  		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
   913  		v4.AuxInt = 3
   914  		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, config.fe.TypeUInt32())
   915  		v5.AuxInt = 3
   916  		v5.AddArg(ptr)
   917  		v4.AddArg(v5)
   918  		v2.AddArg(v4)
   919  		v.AddArg(v2)
   920  		v.AddArg(mem)
   921  		return true
   922  	}
   923  	// match: (AtomicOr8 ptr val mem)
   924  	// cond: config.BigEndian
   925  	// result: (LoweredAtomicOr (AND <config.fe.TypeUInt32().PtrTo()> (MOVWconst [^3]) ptr) 		(SLL <config.fe.TypeUInt32()> (ZeroExt8to32 val) 			(SLLconst <config.fe.TypeUInt32()> [3] 				(ANDconst <config.fe.TypeUInt32()> [3] 					(XORconst <config.fe.TypeUInt32()> [3] ptr)))) mem)
   926  	for {
   927  		ptr := v.Args[0]
   928  		val := v.Args[1]
   929  		mem := v.Args[2]
   930  		if !(config.BigEndian) {
   931  			break
   932  		}
   933  		v.reset(OpMIPSLoweredAtomicOr)
   934  		v0 := b.NewValue0(v.Pos, OpMIPSAND, config.fe.TypeUInt32().PtrTo())
   935  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
   936  		v1.AuxInt = ^3
   937  		v0.AddArg(v1)
   938  		v0.AddArg(ptr)
   939  		v.AddArg(v0)
   940  		v2 := b.NewValue0(v.Pos, OpMIPSSLL, config.fe.TypeUInt32())
   941  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
   942  		v3.AddArg(val)
   943  		v2.AddArg(v3)
   944  		v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
   945  		v4.AuxInt = 3
   946  		v5 := b.NewValue0(v.Pos, OpMIPSANDconst, config.fe.TypeUInt32())
   947  		v5.AuxInt = 3
   948  		v6 := b.NewValue0(v.Pos, OpMIPSXORconst, config.fe.TypeUInt32())
   949  		v6.AuxInt = 3
   950  		v6.AddArg(ptr)
   951  		v5.AddArg(v6)
   952  		v4.AddArg(v5)
   953  		v2.AddArg(v4)
   954  		v.AddArg(v2)
   955  		v.AddArg(mem)
   956  		return true
   957  	}
   958  	return false
   959  }
   960  func rewriteValueMIPS_OpAtomicStore32(v *Value, config *Config) bool {
   961  	b := v.Block
   962  	_ = b
   963  	// match: (AtomicStore32      ptr val mem)
   964  	// cond:
   965  	// result: (LoweredAtomicStore ptr val mem)
   966  	for {
   967  		ptr := v.Args[0]
   968  		val := v.Args[1]
   969  		mem := v.Args[2]
   970  		v.reset(OpMIPSLoweredAtomicStore)
   971  		v.AddArg(ptr)
   972  		v.AddArg(val)
   973  		v.AddArg(mem)
   974  		return true
   975  	}
   976  }
   977  func rewriteValueMIPS_OpAtomicStorePtrNoWB(v *Value, config *Config) bool {
   978  	b := v.Block
   979  	_ = b
   980  	// match: (AtomicStorePtrNoWB ptr val mem)
   981  	// cond:
   982  	// result: (LoweredAtomicStore  ptr val mem)
   983  	for {
   984  		ptr := v.Args[0]
   985  		val := v.Args[1]
   986  		mem := v.Args[2]
   987  		v.reset(OpMIPSLoweredAtomicStore)
   988  		v.AddArg(ptr)
   989  		v.AddArg(val)
   990  		v.AddArg(mem)
   991  		return true
   992  	}
   993  }
   994  func rewriteValueMIPS_OpClosureCall(v *Value, config *Config) bool {
   995  	b := v.Block
   996  	_ = b
   997  	// match: (ClosureCall [argwid] entry closure mem)
   998  	// cond:
   999  	// result: (CALLclosure [argwid] entry closure mem)
  1000  	for {
  1001  		argwid := v.AuxInt
  1002  		entry := v.Args[0]
  1003  		closure := v.Args[1]
  1004  		mem := v.Args[2]
  1005  		v.reset(OpMIPSCALLclosure)
  1006  		v.AuxInt = argwid
  1007  		v.AddArg(entry)
  1008  		v.AddArg(closure)
  1009  		v.AddArg(mem)
  1010  		return true
  1011  	}
  1012  }
  1013  func rewriteValueMIPS_OpCom16(v *Value, config *Config) bool {
  1014  	b := v.Block
  1015  	_ = b
  1016  	// match: (Com16 x)
  1017  	// cond:
  1018  	// result: (NORconst [0] x)
  1019  	for {
  1020  		x := v.Args[0]
  1021  		v.reset(OpMIPSNORconst)
  1022  		v.AuxInt = 0
  1023  		v.AddArg(x)
  1024  		return true
  1025  	}
  1026  }
  1027  func rewriteValueMIPS_OpCom32(v *Value, config *Config) bool {
  1028  	b := v.Block
  1029  	_ = b
  1030  	// match: (Com32 x)
  1031  	// cond:
  1032  	// result: (NORconst [0] x)
  1033  	for {
  1034  		x := v.Args[0]
  1035  		v.reset(OpMIPSNORconst)
  1036  		v.AuxInt = 0
  1037  		v.AddArg(x)
  1038  		return true
  1039  	}
  1040  }
  1041  func rewriteValueMIPS_OpCom8(v *Value, config *Config) bool {
  1042  	b := v.Block
  1043  	_ = b
  1044  	// match: (Com8 x)
  1045  	// cond:
  1046  	// result: (NORconst [0] x)
  1047  	for {
  1048  		x := v.Args[0]
  1049  		v.reset(OpMIPSNORconst)
  1050  		v.AuxInt = 0
  1051  		v.AddArg(x)
  1052  		return true
  1053  	}
  1054  }
  1055  func rewriteValueMIPS_OpConst16(v *Value, config *Config) bool {
  1056  	b := v.Block
  1057  	_ = b
  1058  	// match: (Const16 [val])
  1059  	// cond:
  1060  	// result: (MOVWconst [val])
  1061  	for {
  1062  		val := v.AuxInt
  1063  		v.reset(OpMIPSMOVWconst)
  1064  		v.AuxInt = val
  1065  		return true
  1066  	}
  1067  }
  1068  func rewriteValueMIPS_OpConst32(v *Value, config *Config) bool {
  1069  	b := v.Block
  1070  	_ = b
  1071  	// match: (Const32 [val])
  1072  	// cond:
  1073  	// result: (MOVWconst [val])
  1074  	for {
  1075  		val := v.AuxInt
  1076  		v.reset(OpMIPSMOVWconst)
  1077  		v.AuxInt = val
  1078  		return true
  1079  	}
  1080  }
  1081  func rewriteValueMIPS_OpConst32F(v *Value, config *Config) bool {
  1082  	b := v.Block
  1083  	_ = b
  1084  	// match: (Const32F [val])
  1085  	// cond:
  1086  	// result: (MOVFconst [val])
  1087  	for {
  1088  		val := v.AuxInt
  1089  		v.reset(OpMIPSMOVFconst)
  1090  		v.AuxInt = val
  1091  		return true
  1092  	}
  1093  }
  1094  func rewriteValueMIPS_OpConst64F(v *Value, config *Config) bool {
  1095  	b := v.Block
  1096  	_ = b
  1097  	// match: (Const64F [val])
  1098  	// cond:
  1099  	// result: (MOVDconst [val])
  1100  	for {
  1101  		val := v.AuxInt
  1102  		v.reset(OpMIPSMOVDconst)
  1103  		v.AuxInt = val
  1104  		return true
  1105  	}
  1106  }
  1107  func rewriteValueMIPS_OpConst8(v *Value, config *Config) bool {
  1108  	b := v.Block
  1109  	_ = b
  1110  	// match: (Const8 [val])
  1111  	// cond:
  1112  	// result: (MOVWconst [val])
  1113  	for {
  1114  		val := v.AuxInt
  1115  		v.reset(OpMIPSMOVWconst)
  1116  		v.AuxInt = val
  1117  		return true
  1118  	}
  1119  }
  1120  func rewriteValueMIPS_OpConstBool(v *Value, config *Config) bool {
  1121  	b := v.Block
  1122  	_ = b
  1123  	// match: (ConstBool [b])
  1124  	// cond:
  1125  	// result: (MOVWconst [b])
  1126  	for {
  1127  		b := v.AuxInt
  1128  		v.reset(OpMIPSMOVWconst)
  1129  		v.AuxInt = b
  1130  		return true
  1131  	}
  1132  }
  1133  func rewriteValueMIPS_OpConstNil(v *Value, config *Config) bool {
  1134  	b := v.Block
  1135  	_ = b
  1136  	// match: (ConstNil)
  1137  	// cond:
  1138  	// result: (MOVWconst [0])
  1139  	for {
  1140  		v.reset(OpMIPSMOVWconst)
  1141  		v.AuxInt = 0
  1142  		return true
  1143  	}
  1144  }
  1145  func rewriteValueMIPS_OpConvert(v *Value, config *Config) bool {
  1146  	b := v.Block
  1147  	_ = b
  1148  	// match: (Convert x mem)
  1149  	// cond:
  1150  	// result: (MOVWconvert x mem)
  1151  	for {
  1152  		x := v.Args[0]
  1153  		mem := v.Args[1]
  1154  		v.reset(OpMIPSMOVWconvert)
  1155  		v.AddArg(x)
  1156  		v.AddArg(mem)
  1157  		return true
  1158  	}
  1159  }
  1160  func rewriteValueMIPS_OpCtz32(v *Value, config *Config) bool {
  1161  	b := v.Block
  1162  	_ = b
  1163  	// match: (Ctz32 <t> x)
  1164  	// cond:
  1165  	// result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x)))))
  1166  	for {
  1167  		t := v.Type
  1168  		x := v.Args[0]
  1169  		v.reset(OpMIPSSUB)
  1170  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  1171  		v0.AuxInt = 32
  1172  		v.AddArg(v0)
  1173  		v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1174  		v2 := b.NewValue0(v.Pos, OpMIPSSUBconst, t)
  1175  		v2.AuxInt = 1
  1176  		v3 := b.NewValue0(v.Pos, OpMIPSAND, t)
  1177  		v3.AddArg(x)
  1178  		v4 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  1179  		v4.AddArg(x)
  1180  		v3.AddArg(v4)
  1181  		v2.AddArg(v3)
  1182  		v1.AddArg(v2)
  1183  		v.AddArg(v1)
  1184  		return true
  1185  	}
  1186  }
  1187  func rewriteValueMIPS_OpCvt32Fto32(v *Value, config *Config) bool {
  1188  	b := v.Block
  1189  	_ = b
  1190  	// match: (Cvt32Fto32 x)
  1191  	// cond:
  1192  	// result: (TRUNCFW x)
  1193  	for {
  1194  		x := v.Args[0]
  1195  		v.reset(OpMIPSTRUNCFW)
  1196  		v.AddArg(x)
  1197  		return true
  1198  	}
  1199  }
  1200  func rewriteValueMIPS_OpCvt32Fto64F(v *Value, config *Config) bool {
  1201  	b := v.Block
  1202  	_ = b
  1203  	// match: (Cvt32Fto64F x)
  1204  	// cond:
  1205  	// result: (MOVFD x)
  1206  	for {
  1207  		x := v.Args[0]
  1208  		v.reset(OpMIPSMOVFD)
  1209  		v.AddArg(x)
  1210  		return true
  1211  	}
  1212  }
  1213  func rewriteValueMIPS_OpCvt32to32F(v *Value, config *Config) bool {
  1214  	b := v.Block
  1215  	_ = b
  1216  	// match: (Cvt32to32F x)
  1217  	// cond:
  1218  	// result: (MOVWF x)
  1219  	for {
  1220  		x := v.Args[0]
  1221  		v.reset(OpMIPSMOVWF)
  1222  		v.AddArg(x)
  1223  		return true
  1224  	}
  1225  }
  1226  func rewriteValueMIPS_OpCvt32to64F(v *Value, config *Config) bool {
  1227  	b := v.Block
  1228  	_ = b
  1229  	// match: (Cvt32to64F x)
  1230  	// cond:
  1231  	// result: (MOVWD x)
  1232  	for {
  1233  		x := v.Args[0]
  1234  		v.reset(OpMIPSMOVWD)
  1235  		v.AddArg(x)
  1236  		return true
  1237  	}
  1238  }
  1239  func rewriteValueMIPS_OpCvt64Fto32(v *Value, config *Config) bool {
  1240  	b := v.Block
  1241  	_ = b
  1242  	// match: (Cvt64Fto32 x)
  1243  	// cond:
  1244  	// result: (TRUNCDW x)
  1245  	for {
  1246  		x := v.Args[0]
  1247  		v.reset(OpMIPSTRUNCDW)
  1248  		v.AddArg(x)
  1249  		return true
  1250  	}
  1251  }
  1252  func rewriteValueMIPS_OpCvt64Fto32F(v *Value, config *Config) bool {
  1253  	b := v.Block
  1254  	_ = b
  1255  	// match: (Cvt64Fto32F x)
  1256  	// cond:
  1257  	// result: (MOVDF x)
  1258  	for {
  1259  		x := v.Args[0]
  1260  		v.reset(OpMIPSMOVDF)
  1261  		v.AddArg(x)
  1262  		return true
  1263  	}
  1264  }
  1265  func rewriteValueMIPS_OpDeferCall(v *Value, config *Config) bool {
  1266  	b := v.Block
  1267  	_ = b
  1268  	// match: (DeferCall [argwid] mem)
  1269  	// cond:
  1270  	// result: (CALLdefer [argwid] mem)
  1271  	for {
  1272  		argwid := v.AuxInt
  1273  		mem := v.Args[0]
  1274  		v.reset(OpMIPSCALLdefer)
  1275  		v.AuxInt = argwid
  1276  		v.AddArg(mem)
  1277  		return true
  1278  	}
  1279  }
  1280  func rewriteValueMIPS_OpDiv16(v *Value, config *Config) bool {
  1281  	b := v.Block
  1282  	_ = b
  1283  	// match: (Div16 x y)
  1284  	// cond:
  1285  	// result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  1286  	for {
  1287  		x := v.Args[0]
  1288  		y := v.Args[1]
  1289  		v.reset(OpSelect1)
  1290  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  1291  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  1292  		v1.AddArg(x)
  1293  		v0.AddArg(v1)
  1294  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  1295  		v2.AddArg(y)
  1296  		v0.AddArg(v2)
  1297  		v.AddArg(v0)
  1298  		return true
  1299  	}
  1300  }
  1301  func rewriteValueMIPS_OpDiv16u(v *Value, config *Config) bool {
  1302  	b := v.Block
  1303  	_ = b
  1304  	// match: (Div16u x y)
  1305  	// cond:
  1306  	// result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1307  	for {
  1308  		x := v.Args[0]
  1309  		y := v.Args[1]
  1310  		v.reset(OpSelect1)
  1311  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  1312  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  1313  		v1.AddArg(x)
  1314  		v0.AddArg(v1)
  1315  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  1316  		v2.AddArg(y)
  1317  		v0.AddArg(v2)
  1318  		v.AddArg(v0)
  1319  		return true
  1320  	}
  1321  }
  1322  func rewriteValueMIPS_OpDiv32(v *Value, config *Config) bool {
  1323  	b := v.Block
  1324  	_ = b
  1325  	// match: (Div32 x y)
  1326  	// cond:
  1327  	// result: (Select1 (DIV x y))
  1328  	for {
  1329  		x := v.Args[0]
  1330  		y := v.Args[1]
  1331  		v.reset(OpSelect1)
  1332  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  1333  		v0.AddArg(x)
  1334  		v0.AddArg(y)
  1335  		v.AddArg(v0)
  1336  		return true
  1337  	}
  1338  }
  1339  func rewriteValueMIPS_OpDiv32F(v *Value, config *Config) bool {
  1340  	b := v.Block
  1341  	_ = b
  1342  	// match: (Div32F x y)
  1343  	// cond:
  1344  	// result: (DIVF x y)
  1345  	for {
  1346  		x := v.Args[0]
  1347  		y := v.Args[1]
  1348  		v.reset(OpMIPSDIVF)
  1349  		v.AddArg(x)
  1350  		v.AddArg(y)
  1351  		return true
  1352  	}
  1353  }
  1354  func rewriteValueMIPS_OpDiv32u(v *Value, config *Config) bool {
  1355  	b := v.Block
  1356  	_ = b
  1357  	// match: (Div32u x y)
  1358  	// cond:
  1359  	// result: (Select1 (DIVU x y))
  1360  	for {
  1361  		x := v.Args[0]
  1362  		y := v.Args[1]
  1363  		v.reset(OpSelect1)
  1364  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  1365  		v0.AddArg(x)
  1366  		v0.AddArg(y)
  1367  		v.AddArg(v0)
  1368  		return true
  1369  	}
  1370  }
  1371  func rewriteValueMIPS_OpDiv64F(v *Value, config *Config) bool {
  1372  	b := v.Block
  1373  	_ = b
  1374  	// match: (Div64F x y)
  1375  	// cond:
  1376  	// result: (DIVD x y)
  1377  	for {
  1378  		x := v.Args[0]
  1379  		y := v.Args[1]
  1380  		v.reset(OpMIPSDIVD)
  1381  		v.AddArg(x)
  1382  		v.AddArg(y)
  1383  		return true
  1384  	}
  1385  }
  1386  func rewriteValueMIPS_OpDiv8(v *Value, config *Config) bool {
  1387  	b := v.Block
  1388  	_ = b
  1389  	// match: (Div8 x y)
  1390  	// cond:
  1391  	// result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  1392  	for {
  1393  		x := v.Args[0]
  1394  		y := v.Args[1]
  1395  		v.reset(OpSelect1)
  1396  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  1397  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  1398  		v1.AddArg(x)
  1399  		v0.AddArg(v1)
  1400  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  1401  		v2.AddArg(y)
  1402  		v0.AddArg(v2)
  1403  		v.AddArg(v0)
  1404  		return true
  1405  	}
  1406  }
  1407  func rewriteValueMIPS_OpDiv8u(v *Value, config *Config) bool {
  1408  	b := v.Block
  1409  	_ = b
  1410  	// match: (Div8u x y)
  1411  	// cond:
  1412  	// result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1413  	for {
  1414  		x := v.Args[0]
  1415  		y := v.Args[1]
  1416  		v.reset(OpSelect1)
  1417  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  1418  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  1419  		v1.AddArg(x)
  1420  		v0.AddArg(v1)
  1421  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  1422  		v2.AddArg(y)
  1423  		v0.AddArg(v2)
  1424  		v.AddArg(v0)
  1425  		return true
  1426  	}
  1427  }
  1428  func rewriteValueMIPS_OpEq16(v *Value, config *Config) bool {
  1429  	b := v.Block
  1430  	_ = b
  1431  	// match: (Eq16 x y)
  1432  	// cond:
  1433  	// result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1434  	for {
  1435  		x := v.Args[0]
  1436  		y := v.Args[1]
  1437  		v.reset(OpMIPSSGTUconst)
  1438  		v.AuxInt = 1
  1439  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, config.fe.TypeUInt32())
  1440  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  1441  		v1.AddArg(x)
  1442  		v0.AddArg(v1)
  1443  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  1444  		v2.AddArg(y)
  1445  		v0.AddArg(v2)
  1446  		v.AddArg(v0)
  1447  		return true
  1448  	}
  1449  }
  1450  func rewriteValueMIPS_OpEq32(v *Value, config *Config) bool {
  1451  	b := v.Block
  1452  	_ = b
  1453  	// match: (Eq32 x y)
  1454  	// cond:
  1455  	// result: (SGTUconst [1] (XOR x y))
  1456  	for {
  1457  		x := v.Args[0]
  1458  		y := v.Args[1]
  1459  		v.reset(OpMIPSSGTUconst)
  1460  		v.AuxInt = 1
  1461  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, config.fe.TypeUInt32())
  1462  		v0.AddArg(x)
  1463  		v0.AddArg(y)
  1464  		v.AddArg(v0)
  1465  		return true
  1466  	}
  1467  }
  1468  func rewriteValueMIPS_OpEq32F(v *Value, config *Config) bool {
  1469  	b := v.Block
  1470  	_ = b
  1471  	// match: (Eq32F x y)
  1472  	// cond:
  1473  	// result: (FPFlagTrue (CMPEQF x y))
  1474  	for {
  1475  		x := v.Args[0]
  1476  		y := v.Args[1]
  1477  		v.reset(OpMIPSFPFlagTrue)
  1478  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, TypeFlags)
  1479  		v0.AddArg(x)
  1480  		v0.AddArg(y)
  1481  		v.AddArg(v0)
  1482  		return true
  1483  	}
  1484  }
  1485  func rewriteValueMIPS_OpEq64F(v *Value, config *Config) bool {
  1486  	b := v.Block
  1487  	_ = b
  1488  	// match: (Eq64F x y)
  1489  	// cond:
  1490  	// result: (FPFlagTrue (CMPEQD x y))
  1491  	for {
  1492  		x := v.Args[0]
  1493  		y := v.Args[1]
  1494  		v.reset(OpMIPSFPFlagTrue)
  1495  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, TypeFlags)
  1496  		v0.AddArg(x)
  1497  		v0.AddArg(y)
  1498  		v.AddArg(v0)
  1499  		return true
  1500  	}
  1501  }
  1502  func rewriteValueMIPS_OpEq8(v *Value, config *Config) bool {
  1503  	b := v.Block
  1504  	_ = b
  1505  	// match: (Eq8 x y)
  1506  	// cond:
  1507  	// result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1508  	for {
  1509  		x := v.Args[0]
  1510  		y := v.Args[1]
  1511  		v.reset(OpMIPSSGTUconst)
  1512  		v.AuxInt = 1
  1513  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, config.fe.TypeUInt32())
  1514  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  1515  		v1.AddArg(x)
  1516  		v0.AddArg(v1)
  1517  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  1518  		v2.AddArg(y)
  1519  		v0.AddArg(v2)
  1520  		v.AddArg(v0)
  1521  		return true
  1522  	}
  1523  }
  1524  func rewriteValueMIPS_OpEqB(v *Value, config *Config) bool {
  1525  	b := v.Block
  1526  	_ = b
  1527  	// match: (EqB x y)
  1528  	// cond:
  1529  	// result: (XORconst [1] (XOR <config.fe.TypeBool()> x y))
  1530  	for {
  1531  		x := v.Args[0]
  1532  		y := v.Args[1]
  1533  		v.reset(OpMIPSXORconst)
  1534  		v.AuxInt = 1
  1535  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, config.fe.TypeBool())
  1536  		v0.AddArg(x)
  1537  		v0.AddArg(y)
  1538  		v.AddArg(v0)
  1539  		return true
  1540  	}
  1541  }
  1542  func rewriteValueMIPS_OpEqPtr(v *Value, config *Config) bool {
  1543  	b := v.Block
  1544  	_ = b
  1545  	// match: (EqPtr x y)
  1546  	// cond:
  1547  	// result: (SGTUconst [1] (XOR x y))
  1548  	for {
  1549  		x := v.Args[0]
  1550  		y := v.Args[1]
  1551  		v.reset(OpMIPSSGTUconst)
  1552  		v.AuxInt = 1
  1553  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, config.fe.TypeUInt32())
  1554  		v0.AddArg(x)
  1555  		v0.AddArg(y)
  1556  		v.AddArg(v0)
  1557  		return true
  1558  	}
  1559  }
  1560  func rewriteValueMIPS_OpGeq16(v *Value, config *Config) bool {
  1561  	b := v.Block
  1562  	_ = b
  1563  	// match: (Geq16 x y)
  1564  	// cond:
  1565  	// result: (XORconst [1] (SGT (SignExt16to32 y) (SignExt16to32 x)))
  1566  	for {
  1567  		x := v.Args[0]
  1568  		y := v.Args[1]
  1569  		v.reset(OpMIPSXORconst)
  1570  		v.AuxInt = 1
  1571  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, config.fe.TypeBool())
  1572  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  1573  		v1.AddArg(y)
  1574  		v0.AddArg(v1)
  1575  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  1576  		v2.AddArg(x)
  1577  		v0.AddArg(v2)
  1578  		v.AddArg(v0)
  1579  		return true
  1580  	}
  1581  }
  1582  func rewriteValueMIPS_OpGeq16U(v *Value, config *Config) bool {
  1583  	b := v.Block
  1584  	_ = b
  1585  	// match: (Geq16U x y)
  1586  	// cond:
  1587  	// result: (XORconst [1] (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)))
  1588  	for {
  1589  		x := v.Args[0]
  1590  		y := v.Args[1]
  1591  		v.reset(OpMIPSXORconst)
  1592  		v.AuxInt = 1
  1593  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, config.fe.TypeBool())
  1594  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  1595  		v1.AddArg(y)
  1596  		v0.AddArg(v1)
  1597  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  1598  		v2.AddArg(x)
  1599  		v0.AddArg(v2)
  1600  		v.AddArg(v0)
  1601  		return true
  1602  	}
  1603  }
  1604  func rewriteValueMIPS_OpGeq32(v *Value, config *Config) bool {
  1605  	b := v.Block
  1606  	_ = b
  1607  	// match: (Geq32 x y)
  1608  	// cond:
  1609  	// result: (XORconst [1] (SGT y x))
  1610  	for {
  1611  		x := v.Args[0]
  1612  		y := v.Args[1]
  1613  		v.reset(OpMIPSXORconst)
  1614  		v.AuxInt = 1
  1615  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, config.fe.TypeBool())
  1616  		v0.AddArg(y)
  1617  		v0.AddArg(x)
  1618  		v.AddArg(v0)
  1619  		return true
  1620  	}
  1621  }
  1622  func rewriteValueMIPS_OpGeq32F(v *Value, config *Config) bool {
  1623  	b := v.Block
  1624  	_ = b
  1625  	// match: (Geq32F x y)
  1626  	// cond:
  1627  	// result: (FPFlagTrue (CMPGEF x y))
  1628  	for {
  1629  		x := v.Args[0]
  1630  		y := v.Args[1]
  1631  		v.reset(OpMIPSFPFlagTrue)
  1632  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, TypeFlags)
  1633  		v0.AddArg(x)
  1634  		v0.AddArg(y)
  1635  		v.AddArg(v0)
  1636  		return true
  1637  	}
  1638  }
  1639  func rewriteValueMIPS_OpGeq32U(v *Value, config *Config) bool {
  1640  	b := v.Block
  1641  	_ = b
  1642  	// match: (Geq32U x y)
  1643  	// cond:
  1644  	// result: (XORconst [1] (SGTU y x))
  1645  	for {
  1646  		x := v.Args[0]
  1647  		y := v.Args[1]
  1648  		v.reset(OpMIPSXORconst)
  1649  		v.AuxInt = 1
  1650  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, config.fe.TypeBool())
  1651  		v0.AddArg(y)
  1652  		v0.AddArg(x)
  1653  		v.AddArg(v0)
  1654  		return true
  1655  	}
  1656  }
  1657  func rewriteValueMIPS_OpGeq64F(v *Value, config *Config) bool {
  1658  	b := v.Block
  1659  	_ = b
  1660  	// match: (Geq64F x y)
  1661  	// cond:
  1662  	// result: (FPFlagTrue (CMPGED x y))
  1663  	for {
  1664  		x := v.Args[0]
  1665  		y := v.Args[1]
  1666  		v.reset(OpMIPSFPFlagTrue)
  1667  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, TypeFlags)
  1668  		v0.AddArg(x)
  1669  		v0.AddArg(y)
  1670  		v.AddArg(v0)
  1671  		return true
  1672  	}
  1673  }
  1674  func rewriteValueMIPS_OpGeq8(v *Value, config *Config) bool {
  1675  	b := v.Block
  1676  	_ = b
  1677  	// match: (Geq8 x y)
  1678  	// cond:
  1679  	// result: (XORconst [1] (SGT (SignExt8to32 y) (SignExt8to32 x)))
  1680  	for {
  1681  		x := v.Args[0]
  1682  		y := v.Args[1]
  1683  		v.reset(OpMIPSXORconst)
  1684  		v.AuxInt = 1
  1685  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, config.fe.TypeBool())
  1686  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  1687  		v1.AddArg(y)
  1688  		v0.AddArg(v1)
  1689  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  1690  		v2.AddArg(x)
  1691  		v0.AddArg(v2)
  1692  		v.AddArg(v0)
  1693  		return true
  1694  	}
  1695  }
  1696  func rewriteValueMIPS_OpGeq8U(v *Value, config *Config) bool {
  1697  	b := v.Block
  1698  	_ = b
  1699  	// match: (Geq8U x y)
  1700  	// cond:
  1701  	// result: (XORconst [1] (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)))
  1702  	for {
  1703  		x := v.Args[0]
  1704  		y := v.Args[1]
  1705  		v.reset(OpMIPSXORconst)
  1706  		v.AuxInt = 1
  1707  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, config.fe.TypeBool())
  1708  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  1709  		v1.AddArg(y)
  1710  		v0.AddArg(v1)
  1711  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  1712  		v2.AddArg(x)
  1713  		v0.AddArg(v2)
  1714  		v.AddArg(v0)
  1715  		return true
  1716  	}
  1717  }
  1718  func rewriteValueMIPS_OpGetClosurePtr(v *Value, config *Config) bool {
  1719  	b := v.Block
  1720  	_ = b
  1721  	// match: (GetClosurePtr)
  1722  	// cond:
  1723  	// result: (LoweredGetClosurePtr)
  1724  	for {
  1725  		v.reset(OpMIPSLoweredGetClosurePtr)
  1726  		return true
  1727  	}
  1728  }
  1729  func rewriteValueMIPS_OpGoCall(v *Value, config *Config) bool {
  1730  	b := v.Block
  1731  	_ = b
  1732  	// match: (GoCall [argwid] mem)
  1733  	// cond:
  1734  	// result: (CALLgo [argwid] mem)
  1735  	for {
  1736  		argwid := v.AuxInt
  1737  		mem := v.Args[0]
  1738  		v.reset(OpMIPSCALLgo)
  1739  		v.AuxInt = argwid
  1740  		v.AddArg(mem)
  1741  		return true
  1742  	}
  1743  }
  1744  func rewriteValueMIPS_OpGreater16(v *Value, config *Config) bool {
  1745  	b := v.Block
  1746  	_ = b
  1747  	// match: (Greater16 x y)
  1748  	// cond:
  1749  	// result: (SGT (SignExt16to32 x) (SignExt16to32 y))
  1750  	for {
  1751  		x := v.Args[0]
  1752  		y := v.Args[1]
  1753  		v.reset(OpMIPSSGT)
  1754  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  1755  		v0.AddArg(x)
  1756  		v.AddArg(v0)
  1757  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  1758  		v1.AddArg(y)
  1759  		v.AddArg(v1)
  1760  		return true
  1761  	}
  1762  }
  1763  func rewriteValueMIPS_OpGreater16U(v *Value, config *Config) bool {
  1764  	b := v.Block
  1765  	_ = b
  1766  	// match: (Greater16U x y)
  1767  	// cond:
  1768  	// result: (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))
  1769  	for {
  1770  		x := v.Args[0]
  1771  		y := v.Args[1]
  1772  		v.reset(OpMIPSSGTU)
  1773  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  1774  		v0.AddArg(x)
  1775  		v.AddArg(v0)
  1776  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  1777  		v1.AddArg(y)
  1778  		v.AddArg(v1)
  1779  		return true
  1780  	}
  1781  }
  1782  func rewriteValueMIPS_OpGreater32(v *Value, config *Config) bool {
  1783  	b := v.Block
  1784  	_ = b
  1785  	// match: (Greater32 x y)
  1786  	// cond:
  1787  	// result: (SGT x y)
  1788  	for {
  1789  		x := v.Args[0]
  1790  		y := v.Args[1]
  1791  		v.reset(OpMIPSSGT)
  1792  		v.AddArg(x)
  1793  		v.AddArg(y)
  1794  		return true
  1795  	}
  1796  }
  1797  func rewriteValueMIPS_OpGreater32F(v *Value, config *Config) bool {
  1798  	b := v.Block
  1799  	_ = b
  1800  	// match: (Greater32F x y)
  1801  	// cond:
  1802  	// result: (FPFlagTrue (CMPGTF x y))
  1803  	for {
  1804  		x := v.Args[0]
  1805  		y := v.Args[1]
  1806  		v.reset(OpMIPSFPFlagTrue)
  1807  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, TypeFlags)
  1808  		v0.AddArg(x)
  1809  		v0.AddArg(y)
  1810  		v.AddArg(v0)
  1811  		return true
  1812  	}
  1813  }
  1814  func rewriteValueMIPS_OpGreater32U(v *Value, config *Config) bool {
  1815  	b := v.Block
  1816  	_ = b
  1817  	// match: (Greater32U x y)
  1818  	// cond:
  1819  	// result: (SGTU x y)
  1820  	for {
  1821  		x := v.Args[0]
  1822  		y := v.Args[1]
  1823  		v.reset(OpMIPSSGTU)
  1824  		v.AddArg(x)
  1825  		v.AddArg(y)
  1826  		return true
  1827  	}
  1828  }
  1829  func rewriteValueMIPS_OpGreater64F(v *Value, config *Config) bool {
  1830  	b := v.Block
  1831  	_ = b
  1832  	// match: (Greater64F x y)
  1833  	// cond:
  1834  	// result: (FPFlagTrue (CMPGTD x y))
  1835  	for {
  1836  		x := v.Args[0]
  1837  		y := v.Args[1]
  1838  		v.reset(OpMIPSFPFlagTrue)
  1839  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, TypeFlags)
  1840  		v0.AddArg(x)
  1841  		v0.AddArg(y)
  1842  		v.AddArg(v0)
  1843  		return true
  1844  	}
  1845  }
  1846  func rewriteValueMIPS_OpGreater8(v *Value, config *Config) bool {
  1847  	b := v.Block
  1848  	_ = b
  1849  	// match: (Greater8 x y)
  1850  	// cond:
  1851  	// result: (SGT (SignExt8to32 x) (SignExt8to32 y))
  1852  	for {
  1853  		x := v.Args[0]
  1854  		y := v.Args[1]
  1855  		v.reset(OpMIPSSGT)
  1856  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  1857  		v0.AddArg(x)
  1858  		v.AddArg(v0)
  1859  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  1860  		v1.AddArg(y)
  1861  		v.AddArg(v1)
  1862  		return true
  1863  	}
  1864  }
  1865  func rewriteValueMIPS_OpGreater8U(v *Value, config *Config) bool {
  1866  	b := v.Block
  1867  	_ = b
  1868  	// match: (Greater8U x y)
  1869  	// cond:
  1870  	// result: (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))
  1871  	for {
  1872  		x := v.Args[0]
  1873  		y := v.Args[1]
  1874  		v.reset(OpMIPSSGTU)
  1875  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  1876  		v0.AddArg(x)
  1877  		v.AddArg(v0)
  1878  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  1879  		v1.AddArg(y)
  1880  		v.AddArg(v1)
  1881  		return true
  1882  	}
  1883  }
  1884  func rewriteValueMIPS_OpHmul16(v *Value, config *Config) bool {
  1885  	b := v.Block
  1886  	_ = b
  1887  	// match: (Hmul16 x y)
  1888  	// cond:
  1889  	// result: (SRAconst (MUL <config.fe.TypeInt32()> (SignExt16to32 x) (SignExt16to32 y)) [16])
  1890  	for {
  1891  		x := v.Args[0]
  1892  		y := v.Args[1]
  1893  		v.reset(OpMIPSSRAconst)
  1894  		v.AuxInt = 16
  1895  		v0 := b.NewValue0(v.Pos, OpMIPSMUL, config.fe.TypeInt32())
  1896  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  1897  		v1.AddArg(x)
  1898  		v0.AddArg(v1)
  1899  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  1900  		v2.AddArg(y)
  1901  		v0.AddArg(v2)
  1902  		v.AddArg(v0)
  1903  		return true
  1904  	}
  1905  }
  1906  func rewriteValueMIPS_OpHmul16u(v *Value, config *Config) bool {
  1907  	b := v.Block
  1908  	_ = b
  1909  	// match: (Hmul16u x y)
  1910  	// cond:
  1911  	// result: (SRLconst (MUL <config.fe.TypeUInt32()> (ZeroExt16to32 x) (ZeroExt16to32 y)) [16])
  1912  	for {
  1913  		x := v.Args[0]
  1914  		y := v.Args[1]
  1915  		v.reset(OpMIPSSRLconst)
  1916  		v.AuxInt = 16
  1917  		v0 := b.NewValue0(v.Pos, OpMIPSMUL, config.fe.TypeUInt32())
  1918  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  1919  		v1.AddArg(x)
  1920  		v0.AddArg(v1)
  1921  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  1922  		v2.AddArg(y)
  1923  		v0.AddArg(v2)
  1924  		v.AddArg(v0)
  1925  		return true
  1926  	}
  1927  }
  1928  func rewriteValueMIPS_OpHmul32(v *Value, config *Config) bool {
  1929  	b := v.Block
  1930  	_ = b
  1931  	// match: (Hmul32 x y)
  1932  	// cond:
  1933  	// result: (Select0 (MULT x y))
  1934  	for {
  1935  		x := v.Args[0]
  1936  		y := v.Args[1]
  1937  		v.reset(OpSelect0)
  1938  		v0 := b.NewValue0(v.Pos, OpMIPSMULT, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  1939  		v0.AddArg(x)
  1940  		v0.AddArg(y)
  1941  		v.AddArg(v0)
  1942  		return true
  1943  	}
  1944  }
  1945  func rewriteValueMIPS_OpHmul32u(v *Value, config *Config) bool {
  1946  	b := v.Block
  1947  	_ = b
  1948  	// match: (Hmul32u x y)
  1949  	// cond:
  1950  	// result: (Select0 (MULTU x y))
  1951  	for {
  1952  		x := v.Args[0]
  1953  		y := v.Args[1]
  1954  		v.reset(OpSelect0)
  1955  		v0 := b.NewValue0(v.Pos, OpMIPSMULTU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  1956  		v0.AddArg(x)
  1957  		v0.AddArg(y)
  1958  		v.AddArg(v0)
  1959  		return true
  1960  	}
  1961  }
  1962  func rewriteValueMIPS_OpHmul8(v *Value, config *Config) bool {
  1963  	b := v.Block
  1964  	_ = b
  1965  	// match: (Hmul8 x y)
  1966  	// cond:
  1967  	// result: (SRAconst  (MUL <config.fe.TypeInt32()> (SignExt8to32 x) (SignExt8to32 y)) [8])
  1968  	for {
  1969  		x := v.Args[0]
  1970  		y := v.Args[1]
  1971  		v.reset(OpMIPSSRAconst)
  1972  		v.AuxInt = 8
  1973  		v0 := b.NewValue0(v.Pos, OpMIPSMUL, config.fe.TypeInt32())
  1974  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  1975  		v1.AddArg(x)
  1976  		v0.AddArg(v1)
  1977  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  1978  		v2.AddArg(y)
  1979  		v0.AddArg(v2)
  1980  		v.AddArg(v0)
  1981  		return true
  1982  	}
  1983  }
  1984  func rewriteValueMIPS_OpHmul8u(v *Value, config *Config) bool {
  1985  	b := v.Block
  1986  	_ = b
  1987  	// match: (Hmul8u x y)
  1988  	// cond:
  1989  	// result: (SRLconst (MUL <config.fe.TypeUInt32()> (ZeroExt8to32 x) (ZeroExt8to32 y)) [8])
  1990  	for {
  1991  		x := v.Args[0]
  1992  		y := v.Args[1]
  1993  		v.reset(OpMIPSSRLconst)
  1994  		v.AuxInt = 8
  1995  		v0 := b.NewValue0(v.Pos, OpMIPSMUL, config.fe.TypeUInt32())
  1996  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  1997  		v1.AddArg(x)
  1998  		v0.AddArg(v1)
  1999  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2000  		v2.AddArg(y)
  2001  		v0.AddArg(v2)
  2002  		v.AddArg(v0)
  2003  		return true
  2004  	}
  2005  }
  2006  func rewriteValueMIPS_OpInterCall(v *Value, config *Config) bool {
  2007  	b := v.Block
  2008  	_ = b
  2009  	// match: (InterCall [argwid] entry mem)
  2010  	// cond:
  2011  	// result: (CALLinter [argwid] entry mem)
  2012  	for {
  2013  		argwid := v.AuxInt
  2014  		entry := v.Args[0]
  2015  		mem := v.Args[1]
  2016  		v.reset(OpMIPSCALLinter)
  2017  		v.AuxInt = argwid
  2018  		v.AddArg(entry)
  2019  		v.AddArg(mem)
  2020  		return true
  2021  	}
  2022  }
  2023  func rewriteValueMIPS_OpIsInBounds(v *Value, config *Config) bool {
  2024  	b := v.Block
  2025  	_ = b
  2026  	// match: (IsInBounds idx len)
  2027  	// cond:
  2028  	// result: (SGTU len idx)
  2029  	for {
  2030  		idx := v.Args[0]
  2031  		len := v.Args[1]
  2032  		v.reset(OpMIPSSGTU)
  2033  		v.AddArg(len)
  2034  		v.AddArg(idx)
  2035  		return true
  2036  	}
  2037  }
  2038  func rewriteValueMIPS_OpIsNonNil(v *Value, config *Config) bool {
  2039  	b := v.Block
  2040  	_ = b
  2041  	// match: (IsNonNil ptr)
  2042  	// cond:
  2043  	// result: (SGTU ptr (MOVWconst [0]))
  2044  	for {
  2045  		ptr := v.Args[0]
  2046  		v.reset(OpMIPSSGTU)
  2047  		v.AddArg(ptr)
  2048  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2049  		v0.AuxInt = 0
  2050  		v.AddArg(v0)
  2051  		return true
  2052  	}
  2053  }
  2054  func rewriteValueMIPS_OpIsSliceInBounds(v *Value, config *Config) bool {
  2055  	b := v.Block
  2056  	_ = b
  2057  	// match: (IsSliceInBounds idx len)
  2058  	// cond:
  2059  	// result: (XORconst [1] (SGTU idx len))
  2060  	for {
  2061  		idx := v.Args[0]
  2062  		len := v.Args[1]
  2063  		v.reset(OpMIPSXORconst)
  2064  		v.AuxInt = 1
  2065  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, config.fe.TypeBool())
  2066  		v0.AddArg(idx)
  2067  		v0.AddArg(len)
  2068  		v.AddArg(v0)
  2069  		return true
  2070  	}
  2071  }
  2072  func rewriteValueMIPS_OpLeq16(v *Value, config *Config) bool {
  2073  	b := v.Block
  2074  	_ = b
  2075  	// match: (Leq16 x y)
  2076  	// cond:
  2077  	// result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y)))
  2078  	for {
  2079  		x := v.Args[0]
  2080  		y := v.Args[1]
  2081  		v.reset(OpMIPSXORconst)
  2082  		v.AuxInt = 1
  2083  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, config.fe.TypeBool())
  2084  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  2085  		v1.AddArg(x)
  2086  		v0.AddArg(v1)
  2087  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  2088  		v2.AddArg(y)
  2089  		v0.AddArg(v2)
  2090  		v.AddArg(v0)
  2091  		return true
  2092  	}
  2093  }
  2094  func rewriteValueMIPS_OpLeq16U(v *Value, config *Config) bool {
  2095  	b := v.Block
  2096  	_ = b
  2097  	// match: (Leq16U x y)
  2098  	// cond:
  2099  	// result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  2100  	for {
  2101  		x := v.Args[0]
  2102  		y := v.Args[1]
  2103  		v.reset(OpMIPSXORconst)
  2104  		v.AuxInt = 1
  2105  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, config.fe.TypeBool())
  2106  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  2107  		v1.AddArg(x)
  2108  		v0.AddArg(v1)
  2109  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  2110  		v2.AddArg(y)
  2111  		v0.AddArg(v2)
  2112  		v.AddArg(v0)
  2113  		return true
  2114  	}
  2115  }
  2116  func rewriteValueMIPS_OpLeq32(v *Value, config *Config) bool {
  2117  	b := v.Block
  2118  	_ = b
  2119  	// match: (Leq32 x y)
  2120  	// cond:
  2121  	// result: (XORconst [1] (SGT x y))
  2122  	for {
  2123  		x := v.Args[0]
  2124  		y := v.Args[1]
  2125  		v.reset(OpMIPSXORconst)
  2126  		v.AuxInt = 1
  2127  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, config.fe.TypeBool())
  2128  		v0.AddArg(x)
  2129  		v0.AddArg(y)
  2130  		v.AddArg(v0)
  2131  		return true
  2132  	}
  2133  }
  2134  func rewriteValueMIPS_OpLeq32F(v *Value, config *Config) bool {
  2135  	b := v.Block
  2136  	_ = b
  2137  	// match: (Leq32F x y)
  2138  	// cond:
  2139  	// result: (FPFlagTrue (CMPGEF y x))
  2140  	for {
  2141  		x := v.Args[0]
  2142  		y := v.Args[1]
  2143  		v.reset(OpMIPSFPFlagTrue)
  2144  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, TypeFlags)
  2145  		v0.AddArg(y)
  2146  		v0.AddArg(x)
  2147  		v.AddArg(v0)
  2148  		return true
  2149  	}
  2150  }
  2151  func rewriteValueMIPS_OpLeq32U(v *Value, config *Config) bool {
  2152  	b := v.Block
  2153  	_ = b
  2154  	// match: (Leq32U x y)
  2155  	// cond:
  2156  	// result: (XORconst [1] (SGTU x y))
  2157  	for {
  2158  		x := v.Args[0]
  2159  		y := v.Args[1]
  2160  		v.reset(OpMIPSXORconst)
  2161  		v.AuxInt = 1
  2162  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, config.fe.TypeBool())
  2163  		v0.AddArg(x)
  2164  		v0.AddArg(y)
  2165  		v.AddArg(v0)
  2166  		return true
  2167  	}
  2168  }
  2169  func rewriteValueMIPS_OpLeq64F(v *Value, config *Config) bool {
  2170  	b := v.Block
  2171  	_ = b
  2172  	// match: (Leq64F x y)
  2173  	// cond:
  2174  	// result: (FPFlagTrue (CMPGED y x))
  2175  	for {
  2176  		x := v.Args[0]
  2177  		y := v.Args[1]
  2178  		v.reset(OpMIPSFPFlagTrue)
  2179  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, TypeFlags)
  2180  		v0.AddArg(y)
  2181  		v0.AddArg(x)
  2182  		v.AddArg(v0)
  2183  		return true
  2184  	}
  2185  }
  2186  func rewriteValueMIPS_OpLeq8(v *Value, config *Config) bool {
  2187  	b := v.Block
  2188  	_ = b
  2189  	// match: (Leq8 x y)
  2190  	// cond:
  2191  	// result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y)))
  2192  	for {
  2193  		x := v.Args[0]
  2194  		y := v.Args[1]
  2195  		v.reset(OpMIPSXORconst)
  2196  		v.AuxInt = 1
  2197  		v0 := b.NewValue0(v.Pos, OpMIPSSGT, config.fe.TypeBool())
  2198  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  2199  		v1.AddArg(x)
  2200  		v0.AddArg(v1)
  2201  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  2202  		v2.AddArg(y)
  2203  		v0.AddArg(v2)
  2204  		v.AddArg(v0)
  2205  		return true
  2206  	}
  2207  }
  2208  func rewriteValueMIPS_OpLeq8U(v *Value, config *Config) bool {
  2209  	b := v.Block
  2210  	_ = b
  2211  	// match: (Leq8U x y)
  2212  	// cond:
  2213  	// result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  2214  	for {
  2215  		x := v.Args[0]
  2216  		y := v.Args[1]
  2217  		v.reset(OpMIPSXORconst)
  2218  		v.AuxInt = 1
  2219  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, config.fe.TypeBool())
  2220  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2221  		v1.AddArg(x)
  2222  		v0.AddArg(v1)
  2223  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2224  		v2.AddArg(y)
  2225  		v0.AddArg(v2)
  2226  		v.AddArg(v0)
  2227  		return true
  2228  	}
  2229  }
  2230  func rewriteValueMIPS_OpLess16(v *Value, config *Config) bool {
  2231  	b := v.Block
  2232  	_ = b
  2233  	// match: (Less16 x y)
  2234  	// cond:
  2235  	// result: (SGT (SignExt16to32 y) (SignExt16to32 x))
  2236  	for {
  2237  		x := v.Args[0]
  2238  		y := v.Args[1]
  2239  		v.reset(OpMIPSSGT)
  2240  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  2241  		v0.AddArg(y)
  2242  		v.AddArg(v0)
  2243  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  2244  		v1.AddArg(x)
  2245  		v.AddArg(v1)
  2246  		return true
  2247  	}
  2248  }
  2249  func rewriteValueMIPS_OpLess16U(v *Value, config *Config) bool {
  2250  	b := v.Block
  2251  	_ = b
  2252  	// match: (Less16U x y)
  2253  	// cond:
  2254  	// result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x))
  2255  	for {
  2256  		x := v.Args[0]
  2257  		y := v.Args[1]
  2258  		v.reset(OpMIPSSGTU)
  2259  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  2260  		v0.AddArg(y)
  2261  		v.AddArg(v0)
  2262  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  2263  		v1.AddArg(x)
  2264  		v.AddArg(v1)
  2265  		return true
  2266  	}
  2267  }
  2268  func rewriteValueMIPS_OpLess32(v *Value, config *Config) bool {
  2269  	b := v.Block
  2270  	_ = b
  2271  	// match: (Less32 x y)
  2272  	// cond:
  2273  	// result: (SGT y x)
  2274  	for {
  2275  		x := v.Args[0]
  2276  		y := v.Args[1]
  2277  		v.reset(OpMIPSSGT)
  2278  		v.AddArg(y)
  2279  		v.AddArg(x)
  2280  		return true
  2281  	}
  2282  }
  2283  func rewriteValueMIPS_OpLess32F(v *Value, config *Config) bool {
  2284  	b := v.Block
  2285  	_ = b
  2286  	// match: (Less32F x y)
  2287  	// cond:
  2288  	// result: (FPFlagTrue (CMPGTF y x))
  2289  	for {
  2290  		x := v.Args[0]
  2291  		y := v.Args[1]
  2292  		v.reset(OpMIPSFPFlagTrue)
  2293  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, TypeFlags)
  2294  		v0.AddArg(y)
  2295  		v0.AddArg(x)
  2296  		v.AddArg(v0)
  2297  		return true
  2298  	}
  2299  }
  2300  func rewriteValueMIPS_OpLess32U(v *Value, config *Config) bool {
  2301  	b := v.Block
  2302  	_ = b
  2303  	// match: (Less32U x y)
  2304  	// cond:
  2305  	// result: (SGTU y x)
  2306  	for {
  2307  		x := v.Args[0]
  2308  		y := v.Args[1]
  2309  		v.reset(OpMIPSSGTU)
  2310  		v.AddArg(y)
  2311  		v.AddArg(x)
  2312  		return true
  2313  	}
  2314  }
  2315  func rewriteValueMIPS_OpLess64F(v *Value, config *Config) bool {
  2316  	b := v.Block
  2317  	_ = b
  2318  	// match: (Less64F x y)
  2319  	// cond:
  2320  	// result: (FPFlagTrue (CMPGTD y x))
  2321  	for {
  2322  		x := v.Args[0]
  2323  		y := v.Args[1]
  2324  		v.reset(OpMIPSFPFlagTrue)
  2325  		v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, TypeFlags)
  2326  		v0.AddArg(y)
  2327  		v0.AddArg(x)
  2328  		v.AddArg(v0)
  2329  		return true
  2330  	}
  2331  }
  2332  func rewriteValueMIPS_OpLess8(v *Value, config *Config) bool {
  2333  	b := v.Block
  2334  	_ = b
  2335  	// match: (Less8 x y)
  2336  	// cond:
  2337  	// result: (SGT (SignExt8to32 y) (SignExt8to32 x))
  2338  	for {
  2339  		x := v.Args[0]
  2340  		y := v.Args[1]
  2341  		v.reset(OpMIPSSGT)
  2342  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  2343  		v0.AddArg(y)
  2344  		v.AddArg(v0)
  2345  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  2346  		v1.AddArg(x)
  2347  		v.AddArg(v1)
  2348  		return true
  2349  	}
  2350  }
  2351  func rewriteValueMIPS_OpLess8U(v *Value, config *Config) bool {
  2352  	b := v.Block
  2353  	_ = b
  2354  	// match: (Less8U x y)
  2355  	// cond:
  2356  	// result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x))
  2357  	for {
  2358  		x := v.Args[0]
  2359  		y := v.Args[1]
  2360  		v.reset(OpMIPSSGTU)
  2361  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2362  		v0.AddArg(y)
  2363  		v.AddArg(v0)
  2364  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2365  		v1.AddArg(x)
  2366  		v.AddArg(v1)
  2367  		return true
  2368  	}
  2369  }
  2370  func rewriteValueMIPS_OpLoad(v *Value, config *Config) bool {
  2371  	b := v.Block
  2372  	_ = b
  2373  	// match: (Load <t> ptr mem)
  2374  	// cond: t.IsBoolean()
  2375  	// result: (MOVBUload ptr mem)
  2376  	for {
  2377  		t := v.Type
  2378  		ptr := v.Args[0]
  2379  		mem := v.Args[1]
  2380  		if !(t.IsBoolean()) {
  2381  			break
  2382  		}
  2383  		v.reset(OpMIPSMOVBUload)
  2384  		v.AddArg(ptr)
  2385  		v.AddArg(mem)
  2386  		return true
  2387  	}
  2388  	// match: (Load <t> ptr mem)
  2389  	// cond: (is8BitInt(t) && isSigned(t))
  2390  	// result: (MOVBload ptr mem)
  2391  	for {
  2392  		t := v.Type
  2393  		ptr := v.Args[0]
  2394  		mem := v.Args[1]
  2395  		if !(is8BitInt(t) && isSigned(t)) {
  2396  			break
  2397  		}
  2398  		v.reset(OpMIPSMOVBload)
  2399  		v.AddArg(ptr)
  2400  		v.AddArg(mem)
  2401  		return true
  2402  	}
  2403  	// match: (Load <t> ptr mem)
  2404  	// cond: (is8BitInt(t) && !isSigned(t))
  2405  	// result: (MOVBUload ptr mem)
  2406  	for {
  2407  		t := v.Type
  2408  		ptr := v.Args[0]
  2409  		mem := v.Args[1]
  2410  		if !(is8BitInt(t) && !isSigned(t)) {
  2411  			break
  2412  		}
  2413  		v.reset(OpMIPSMOVBUload)
  2414  		v.AddArg(ptr)
  2415  		v.AddArg(mem)
  2416  		return true
  2417  	}
  2418  	// match: (Load <t> ptr mem)
  2419  	// cond: (is16BitInt(t) && isSigned(t))
  2420  	// result: (MOVHload ptr mem)
  2421  	for {
  2422  		t := v.Type
  2423  		ptr := v.Args[0]
  2424  		mem := v.Args[1]
  2425  		if !(is16BitInt(t) && isSigned(t)) {
  2426  			break
  2427  		}
  2428  		v.reset(OpMIPSMOVHload)
  2429  		v.AddArg(ptr)
  2430  		v.AddArg(mem)
  2431  		return true
  2432  	}
  2433  	// match: (Load <t> ptr mem)
  2434  	// cond: (is16BitInt(t) && !isSigned(t))
  2435  	// result: (MOVHUload ptr mem)
  2436  	for {
  2437  		t := v.Type
  2438  		ptr := v.Args[0]
  2439  		mem := v.Args[1]
  2440  		if !(is16BitInt(t) && !isSigned(t)) {
  2441  			break
  2442  		}
  2443  		v.reset(OpMIPSMOVHUload)
  2444  		v.AddArg(ptr)
  2445  		v.AddArg(mem)
  2446  		return true
  2447  	}
  2448  	// match: (Load <t> ptr mem)
  2449  	// cond: (is32BitInt(t) || isPtr(t))
  2450  	// result: (MOVWload ptr mem)
  2451  	for {
  2452  		t := v.Type
  2453  		ptr := v.Args[0]
  2454  		mem := v.Args[1]
  2455  		if !(is32BitInt(t) || isPtr(t)) {
  2456  			break
  2457  		}
  2458  		v.reset(OpMIPSMOVWload)
  2459  		v.AddArg(ptr)
  2460  		v.AddArg(mem)
  2461  		return true
  2462  	}
  2463  	// match: (Load <t> ptr mem)
  2464  	// cond: is32BitFloat(t)
  2465  	// result: (MOVFload ptr mem)
  2466  	for {
  2467  		t := v.Type
  2468  		ptr := v.Args[0]
  2469  		mem := v.Args[1]
  2470  		if !(is32BitFloat(t)) {
  2471  			break
  2472  		}
  2473  		v.reset(OpMIPSMOVFload)
  2474  		v.AddArg(ptr)
  2475  		v.AddArg(mem)
  2476  		return true
  2477  	}
  2478  	// match: (Load <t> ptr mem)
  2479  	// cond: is64BitFloat(t)
  2480  	// result: (MOVDload ptr mem)
  2481  	for {
  2482  		t := v.Type
  2483  		ptr := v.Args[0]
  2484  		mem := v.Args[1]
  2485  		if !(is64BitFloat(t)) {
  2486  			break
  2487  		}
  2488  		v.reset(OpMIPSMOVDload)
  2489  		v.AddArg(ptr)
  2490  		v.AddArg(mem)
  2491  		return true
  2492  	}
  2493  	return false
  2494  }
  2495  func rewriteValueMIPS_OpLsh16x16(v *Value, config *Config) bool {
  2496  	b := v.Block
  2497  	_ = b
  2498  	// match: (Lsh16x16 <t> x y)
  2499  	// cond:
  2500  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2501  	for {
  2502  		t := v.Type
  2503  		x := v.Args[0]
  2504  		y := v.Args[1]
  2505  		v.reset(OpMIPSCMOVZ)
  2506  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2507  		v0.AddArg(x)
  2508  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  2509  		v1.AddArg(y)
  2510  		v0.AddArg(v1)
  2511  		v.AddArg(v0)
  2512  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2513  		v2.AuxInt = 0
  2514  		v.AddArg(v2)
  2515  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  2516  		v3.AuxInt = 32
  2517  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  2518  		v4.AddArg(y)
  2519  		v3.AddArg(v4)
  2520  		v.AddArg(v3)
  2521  		return true
  2522  	}
  2523  }
  2524  func rewriteValueMIPS_OpLsh16x32(v *Value, config *Config) bool {
  2525  	b := v.Block
  2526  	_ = b
  2527  	// match: (Lsh16x32 <t> x y)
  2528  	// cond:
  2529  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2530  	for {
  2531  		t := v.Type
  2532  		x := v.Args[0]
  2533  		y := v.Args[1]
  2534  		v.reset(OpMIPSCMOVZ)
  2535  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2536  		v0.AddArg(x)
  2537  		v0.AddArg(y)
  2538  		v.AddArg(v0)
  2539  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2540  		v1.AuxInt = 0
  2541  		v.AddArg(v1)
  2542  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  2543  		v2.AuxInt = 32
  2544  		v2.AddArg(y)
  2545  		v.AddArg(v2)
  2546  		return true
  2547  	}
  2548  }
  2549  func rewriteValueMIPS_OpLsh16x64(v *Value, config *Config) bool {
  2550  	b := v.Block
  2551  	_ = b
  2552  	// match: (Lsh16x64 x (Const64 [c]))
  2553  	// cond: uint32(c) < 16
  2554  	// result: (SLLconst x [c])
  2555  	for {
  2556  		x := v.Args[0]
  2557  		v_1 := v.Args[1]
  2558  		if v_1.Op != OpConst64 {
  2559  			break
  2560  		}
  2561  		c := v_1.AuxInt
  2562  		if !(uint32(c) < 16) {
  2563  			break
  2564  		}
  2565  		v.reset(OpMIPSSLLconst)
  2566  		v.AuxInt = c
  2567  		v.AddArg(x)
  2568  		return true
  2569  	}
  2570  	// match: (Lsh16x64 _ (Const64 [c]))
  2571  	// cond: uint32(c) >= 16
  2572  	// result: (MOVWconst [0])
  2573  	for {
  2574  		v_1 := v.Args[1]
  2575  		if v_1.Op != OpConst64 {
  2576  			break
  2577  		}
  2578  		c := v_1.AuxInt
  2579  		if !(uint32(c) >= 16) {
  2580  			break
  2581  		}
  2582  		v.reset(OpMIPSMOVWconst)
  2583  		v.AuxInt = 0
  2584  		return true
  2585  	}
  2586  	return false
  2587  }
  2588  func rewriteValueMIPS_OpLsh16x8(v *Value, config *Config) bool {
  2589  	b := v.Block
  2590  	_ = b
  2591  	// match: (Lsh16x8 <t> x y)
  2592  	// cond:
  2593  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2594  	for {
  2595  		t := v.Type
  2596  		x := v.Args[0]
  2597  		y := v.Args[1]
  2598  		v.reset(OpMIPSCMOVZ)
  2599  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2600  		v0.AddArg(x)
  2601  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2602  		v1.AddArg(y)
  2603  		v0.AddArg(v1)
  2604  		v.AddArg(v0)
  2605  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2606  		v2.AuxInt = 0
  2607  		v.AddArg(v2)
  2608  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  2609  		v3.AuxInt = 32
  2610  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2611  		v4.AddArg(y)
  2612  		v3.AddArg(v4)
  2613  		v.AddArg(v3)
  2614  		return true
  2615  	}
  2616  }
  2617  func rewriteValueMIPS_OpLsh32x16(v *Value, config *Config) bool {
  2618  	b := v.Block
  2619  	_ = b
  2620  	// match: (Lsh32x16 <t> x y)
  2621  	// cond:
  2622  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2623  	for {
  2624  		t := v.Type
  2625  		x := v.Args[0]
  2626  		y := v.Args[1]
  2627  		v.reset(OpMIPSCMOVZ)
  2628  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2629  		v0.AddArg(x)
  2630  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  2631  		v1.AddArg(y)
  2632  		v0.AddArg(v1)
  2633  		v.AddArg(v0)
  2634  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2635  		v2.AuxInt = 0
  2636  		v.AddArg(v2)
  2637  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  2638  		v3.AuxInt = 32
  2639  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  2640  		v4.AddArg(y)
  2641  		v3.AddArg(v4)
  2642  		v.AddArg(v3)
  2643  		return true
  2644  	}
  2645  }
  2646  func rewriteValueMIPS_OpLsh32x32(v *Value, config *Config) bool {
  2647  	b := v.Block
  2648  	_ = b
  2649  	// match: (Lsh32x32 <t> x y)
  2650  	// cond:
  2651  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2652  	for {
  2653  		t := v.Type
  2654  		x := v.Args[0]
  2655  		y := v.Args[1]
  2656  		v.reset(OpMIPSCMOVZ)
  2657  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2658  		v0.AddArg(x)
  2659  		v0.AddArg(y)
  2660  		v.AddArg(v0)
  2661  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2662  		v1.AuxInt = 0
  2663  		v.AddArg(v1)
  2664  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  2665  		v2.AuxInt = 32
  2666  		v2.AddArg(y)
  2667  		v.AddArg(v2)
  2668  		return true
  2669  	}
  2670  }
  2671  func rewriteValueMIPS_OpLsh32x64(v *Value, config *Config) bool {
  2672  	b := v.Block
  2673  	_ = b
  2674  	// match: (Lsh32x64 x (Const64 [c]))
  2675  	// cond: uint32(c) < 32
  2676  	// result: (SLLconst x [c])
  2677  	for {
  2678  		x := v.Args[0]
  2679  		v_1 := v.Args[1]
  2680  		if v_1.Op != OpConst64 {
  2681  			break
  2682  		}
  2683  		c := v_1.AuxInt
  2684  		if !(uint32(c) < 32) {
  2685  			break
  2686  		}
  2687  		v.reset(OpMIPSSLLconst)
  2688  		v.AuxInt = c
  2689  		v.AddArg(x)
  2690  		return true
  2691  	}
  2692  	// match: (Lsh32x64 _ (Const64 [c]))
  2693  	// cond: uint32(c) >= 32
  2694  	// result: (MOVWconst [0])
  2695  	for {
  2696  		v_1 := v.Args[1]
  2697  		if v_1.Op != OpConst64 {
  2698  			break
  2699  		}
  2700  		c := v_1.AuxInt
  2701  		if !(uint32(c) >= 32) {
  2702  			break
  2703  		}
  2704  		v.reset(OpMIPSMOVWconst)
  2705  		v.AuxInt = 0
  2706  		return true
  2707  	}
  2708  	return false
  2709  }
  2710  func rewriteValueMIPS_OpLsh32x8(v *Value, config *Config) bool {
  2711  	b := v.Block
  2712  	_ = b
  2713  	// match: (Lsh32x8 <t> x y)
  2714  	// cond:
  2715  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2716  	for {
  2717  		t := v.Type
  2718  		x := v.Args[0]
  2719  		y := v.Args[1]
  2720  		v.reset(OpMIPSCMOVZ)
  2721  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2722  		v0.AddArg(x)
  2723  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2724  		v1.AddArg(y)
  2725  		v0.AddArg(v1)
  2726  		v.AddArg(v0)
  2727  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2728  		v2.AuxInt = 0
  2729  		v.AddArg(v2)
  2730  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  2731  		v3.AuxInt = 32
  2732  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2733  		v4.AddArg(y)
  2734  		v3.AddArg(v4)
  2735  		v.AddArg(v3)
  2736  		return true
  2737  	}
  2738  }
  2739  func rewriteValueMIPS_OpLsh8x16(v *Value, config *Config) bool {
  2740  	b := v.Block
  2741  	_ = b
  2742  	// match: (Lsh8x16 <t> x y)
  2743  	// cond:
  2744  	// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2745  	for {
  2746  		t := v.Type
  2747  		x := v.Args[0]
  2748  		y := v.Args[1]
  2749  		v.reset(OpMIPSCMOVZ)
  2750  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2751  		v0.AddArg(x)
  2752  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  2753  		v1.AddArg(y)
  2754  		v0.AddArg(v1)
  2755  		v.AddArg(v0)
  2756  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2757  		v2.AuxInt = 0
  2758  		v.AddArg(v2)
  2759  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  2760  		v3.AuxInt = 32
  2761  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  2762  		v4.AddArg(y)
  2763  		v3.AddArg(v4)
  2764  		v.AddArg(v3)
  2765  		return true
  2766  	}
  2767  }
  2768  func rewriteValueMIPS_OpLsh8x32(v *Value, config *Config) bool {
  2769  	b := v.Block
  2770  	_ = b
  2771  	// match: (Lsh8x32 <t> x y)
  2772  	// cond:
  2773  	// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2774  	for {
  2775  		t := v.Type
  2776  		x := v.Args[0]
  2777  		y := v.Args[1]
  2778  		v.reset(OpMIPSCMOVZ)
  2779  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2780  		v0.AddArg(x)
  2781  		v0.AddArg(y)
  2782  		v.AddArg(v0)
  2783  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2784  		v1.AuxInt = 0
  2785  		v.AddArg(v1)
  2786  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  2787  		v2.AuxInt = 32
  2788  		v2.AddArg(y)
  2789  		v.AddArg(v2)
  2790  		return true
  2791  	}
  2792  }
  2793  func rewriteValueMIPS_OpLsh8x64(v *Value, config *Config) bool {
  2794  	b := v.Block
  2795  	_ = b
  2796  	// match: (Lsh8x64 x (Const64 [c]))
  2797  	// cond: uint32(c) < 8
  2798  	// result: (SLLconst x [c])
  2799  	for {
  2800  		x := v.Args[0]
  2801  		v_1 := v.Args[1]
  2802  		if v_1.Op != OpConst64 {
  2803  			break
  2804  		}
  2805  		c := v_1.AuxInt
  2806  		if !(uint32(c) < 8) {
  2807  			break
  2808  		}
  2809  		v.reset(OpMIPSSLLconst)
  2810  		v.AuxInt = c
  2811  		v.AddArg(x)
  2812  		return true
  2813  	}
  2814  	// match: (Lsh8x64 _ (Const64 [c]))
  2815  	// cond: uint32(c) >= 8
  2816  	// result: (MOVWconst [0])
  2817  	for {
  2818  		v_1 := v.Args[1]
  2819  		if v_1.Op != OpConst64 {
  2820  			break
  2821  		}
  2822  		c := v_1.AuxInt
  2823  		if !(uint32(c) >= 8) {
  2824  			break
  2825  		}
  2826  		v.reset(OpMIPSMOVWconst)
  2827  		v.AuxInt = 0
  2828  		return true
  2829  	}
  2830  	return false
  2831  }
  2832  func rewriteValueMIPS_OpLsh8x8(v *Value, config *Config) bool {
  2833  	b := v.Block
  2834  	_ = b
  2835  	// match: (Lsh8x8 <t> x y)
  2836  	// cond:
  2837  	// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2838  	for {
  2839  		t := v.Type
  2840  		x := v.Args[0]
  2841  		y := v.Args[1]
  2842  		v.reset(OpMIPSCMOVZ)
  2843  		v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2844  		v0.AddArg(x)
  2845  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2846  		v1.AddArg(y)
  2847  		v0.AddArg(v1)
  2848  		v.AddArg(v0)
  2849  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  2850  		v2.AuxInt = 0
  2851  		v.AddArg(v2)
  2852  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  2853  		v3.AuxInt = 32
  2854  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  2855  		v4.AddArg(y)
  2856  		v3.AddArg(v4)
  2857  		v.AddArg(v3)
  2858  		return true
  2859  	}
  2860  }
  2861  func rewriteValueMIPS_OpMIPSADD(v *Value, config *Config) bool {
  2862  	b := v.Block
  2863  	_ = b
  2864  	// match: (ADD (MOVWconst [c]) x)
  2865  	// cond:
  2866  	// result: (ADDconst [c] x)
  2867  	for {
  2868  		v_0 := v.Args[0]
  2869  		if v_0.Op != OpMIPSMOVWconst {
  2870  			break
  2871  		}
  2872  		c := v_0.AuxInt
  2873  		x := v.Args[1]
  2874  		v.reset(OpMIPSADDconst)
  2875  		v.AuxInt = c
  2876  		v.AddArg(x)
  2877  		return true
  2878  	}
  2879  	// match: (ADD x (MOVWconst [c]))
  2880  	// cond:
  2881  	// result: (ADDconst [c] x)
  2882  	for {
  2883  		x := v.Args[0]
  2884  		v_1 := v.Args[1]
  2885  		if v_1.Op != OpMIPSMOVWconst {
  2886  			break
  2887  		}
  2888  		c := v_1.AuxInt
  2889  		v.reset(OpMIPSADDconst)
  2890  		v.AuxInt = c
  2891  		v.AddArg(x)
  2892  		return true
  2893  	}
  2894  	// match: (ADD x (NEG y))
  2895  	// cond:
  2896  	// result: (SUB x y)
  2897  	for {
  2898  		x := v.Args[0]
  2899  		v_1 := v.Args[1]
  2900  		if v_1.Op != OpMIPSNEG {
  2901  			break
  2902  		}
  2903  		y := v_1.Args[0]
  2904  		v.reset(OpMIPSSUB)
  2905  		v.AddArg(x)
  2906  		v.AddArg(y)
  2907  		return true
  2908  	}
  2909  	// match: (ADD (NEG y) x)
  2910  	// cond:
  2911  	// result: (SUB x y)
  2912  	for {
  2913  		v_0 := v.Args[0]
  2914  		if v_0.Op != OpMIPSNEG {
  2915  			break
  2916  		}
  2917  		y := v_0.Args[0]
  2918  		x := v.Args[1]
  2919  		v.reset(OpMIPSSUB)
  2920  		v.AddArg(x)
  2921  		v.AddArg(y)
  2922  		return true
  2923  	}
  2924  	return false
  2925  }
  2926  func rewriteValueMIPS_OpMIPSADDconst(v *Value, config *Config) bool {
  2927  	b := v.Block
  2928  	_ = b
  2929  	// match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr))
  2930  	// cond:
  2931  	// result: (MOVWaddr [off1+off2] {sym} ptr)
  2932  	for {
  2933  		off1 := v.AuxInt
  2934  		v_0 := v.Args[0]
  2935  		if v_0.Op != OpMIPSMOVWaddr {
  2936  			break
  2937  		}
  2938  		off2 := v_0.AuxInt
  2939  		sym := v_0.Aux
  2940  		ptr := v_0.Args[0]
  2941  		v.reset(OpMIPSMOVWaddr)
  2942  		v.AuxInt = off1 + off2
  2943  		v.Aux = sym
  2944  		v.AddArg(ptr)
  2945  		return true
  2946  	}
  2947  	// match: (ADDconst [0]  x)
  2948  	// cond:
  2949  	// result: x
  2950  	for {
  2951  		if v.AuxInt != 0 {
  2952  			break
  2953  		}
  2954  		x := v.Args[0]
  2955  		v.reset(OpCopy)
  2956  		v.Type = x.Type
  2957  		v.AddArg(x)
  2958  		return true
  2959  	}
  2960  	// match: (ADDconst [c] (MOVWconst [d]))
  2961  	// cond:
  2962  	// result: (MOVWconst [int64(int32(c+d))])
  2963  	for {
  2964  		c := v.AuxInt
  2965  		v_0 := v.Args[0]
  2966  		if v_0.Op != OpMIPSMOVWconst {
  2967  			break
  2968  		}
  2969  		d := v_0.AuxInt
  2970  		v.reset(OpMIPSMOVWconst)
  2971  		v.AuxInt = int64(int32(c + d))
  2972  		return true
  2973  	}
  2974  	// match: (ADDconst [c] (ADDconst [d] x))
  2975  	// cond:
  2976  	// result: (ADDconst [int64(int32(c+d))] x)
  2977  	for {
  2978  		c := v.AuxInt
  2979  		v_0 := v.Args[0]
  2980  		if v_0.Op != OpMIPSADDconst {
  2981  			break
  2982  		}
  2983  		d := v_0.AuxInt
  2984  		x := v_0.Args[0]
  2985  		v.reset(OpMIPSADDconst)
  2986  		v.AuxInt = int64(int32(c + d))
  2987  		v.AddArg(x)
  2988  		return true
  2989  	}
  2990  	// match: (ADDconst [c] (SUBconst [d] x))
  2991  	// cond:
  2992  	// result: (ADDconst [int64(int32(c-d))] x)
  2993  	for {
  2994  		c := v.AuxInt
  2995  		v_0 := v.Args[0]
  2996  		if v_0.Op != OpMIPSSUBconst {
  2997  			break
  2998  		}
  2999  		d := v_0.AuxInt
  3000  		x := v_0.Args[0]
  3001  		v.reset(OpMIPSADDconst)
  3002  		v.AuxInt = int64(int32(c - d))
  3003  		v.AddArg(x)
  3004  		return true
  3005  	}
  3006  	return false
  3007  }
  3008  func rewriteValueMIPS_OpMIPSAND(v *Value, config *Config) bool {
  3009  	b := v.Block
  3010  	_ = b
  3011  	// match: (AND (MOVWconst [c]) x)
  3012  	// cond:
  3013  	// result: (ANDconst [c] x)
  3014  	for {
  3015  		v_0 := v.Args[0]
  3016  		if v_0.Op != OpMIPSMOVWconst {
  3017  			break
  3018  		}
  3019  		c := v_0.AuxInt
  3020  		x := v.Args[1]
  3021  		v.reset(OpMIPSANDconst)
  3022  		v.AuxInt = c
  3023  		v.AddArg(x)
  3024  		return true
  3025  	}
  3026  	// match: (AND x (MOVWconst [c]))
  3027  	// cond:
  3028  	// result: (ANDconst [c] x)
  3029  	for {
  3030  		x := v.Args[0]
  3031  		v_1 := v.Args[1]
  3032  		if v_1.Op != OpMIPSMOVWconst {
  3033  			break
  3034  		}
  3035  		c := v_1.AuxInt
  3036  		v.reset(OpMIPSANDconst)
  3037  		v.AuxInt = c
  3038  		v.AddArg(x)
  3039  		return true
  3040  	}
  3041  	// match: (AND x x)
  3042  	// cond:
  3043  	// result: x
  3044  	for {
  3045  		x := v.Args[0]
  3046  		if x != v.Args[1] {
  3047  			break
  3048  		}
  3049  		v.reset(OpCopy)
  3050  		v.Type = x.Type
  3051  		v.AddArg(x)
  3052  		return true
  3053  	}
  3054  	// match: (AND (SGTUconst [1] x) (SGTUconst [1] y))
  3055  	// cond:
  3056  	// result: (SGTUconst [1] (OR <x.Type> x y))
  3057  	for {
  3058  		v_0 := v.Args[0]
  3059  		if v_0.Op != OpMIPSSGTUconst {
  3060  			break
  3061  		}
  3062  		if v_0.AuxInt != 1 {
  3063  			break
  3064  		}
  3065  		x := v_0.Args[0]
  3066  		v_1 := v.Args[1]
  3067  		if v_1.Op != OpMIPSSGTUconst {
  3068  			break
  3069  		}
  3070  		if v_1.AuxInt != 1 {
  3071  			break
  3072  		}
  3073  		y := v_1.Args[0]
  3074  		v.reset(OpMIPSSGTUconst)
  3075  		v.AuxInt = 1
  3076  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  3077  		v0.AddArg(x)
  3078  		v0.AddArg(y)
  3079  		v.AddArg(v0)
  3080  		return true
  3081  	}
  3082  	return false
  3083  }
  3084  func rewriteValueMIPS_OpMIPSANDconst(v *Value, config *Config) bool {
  3085  	b := v.Block
  3086  	_ = b
  3087  	// match: (ANDconst [0]  _)
  3088  	// cond:
  3089  	// result: (MOVWconst [0])
  3090  	for {
  3091  		if v.AuxInt != 0 {
  3092  			break
  3093  		}
  3094  		v.reset(OpMIPSMOVWconst)
  3095  		v.AuxInt = 0
  3096  		return true
  3097  	}
  3098  	// match: (ANDconst [-1] x)
  3099  	// cond:
  3100  	// result: x
  3101  	for {
  3102  		if v.AuxInt != -1 {
  3103  			break
  3104  		}
  3105  		x := v.Args[0]
  3106  		v.reset(OpCopy)
  3107  		v.Type = x.Type
  3108  		v.AddArg(x)
  3109  		return true
  3110  	}
  3111  	// match: (ANDconst [c] (MOVWconst [d]))
  3112  	// cond:
  3113  	// result: (MOVWconst [c&d])
  3114  	for {
  3115  		c := v.AuxInt
  3116  		v_0 := v.Args[0]
  3117  		if v_0.Op != OpMIPSMOVWconst {
  3118  			break
  3119  		}
  3120  		d := v_0.AuxInt
  3121  		v.reset(OpMIPSMOVWconst)
  3122  		v.AuxInt = c & d
  3123  		return true
  3124  	}
  3125  	// match: (ANDconst [c] (ANDconst [d] x))
  3126  	// cond:
  3127  	// result: (ANDconst [c&d] x)
  3128  	for {
  3129  		c := v.AuxInt
  3130  		v_0 := v.Args[0]
  3131  		if v_0.Op != OpMIPSANDconst {
  3132  			break
  3133  		}
  3134  		d := v_0.AuxInt
  3135  		x := v_0.Args[0]
  3136  		v.reset(OpMIPSANDconst)
  3137  		v.AuxInt = c & d
  3138  		v.AddArg(x)
  3139  		return true
  3140  	}
  3141  	return false
  3142  }
  3143  func rewriteValueMIPS_OpMIPSCMOVZ(v *Value, config *Config) bool {
  3144  	b := v.Block
  3145  	_ = b
  3146  	// match: (CMOVZ _ b (MOVWconst [0]))
  3147  	// cond:
  3148  	// result: b
  3149  	for {
  3150  		b := v.Args[1]
  3151  		v_2 := v.Args[2]
  3152  		if v_2.Op != OpMIPSMOVWconst {
  3153  			break
  3154  		}
  3155  		if v_2.AuxInt != 0 {
  3156  			break
  3157  		}
  3158  		v.reset(OpCopy)
  3159  		v.Type = b.Type
  3160  		v.AddArg(b)
  3161  		return true
  3162  	}
  3163  	// match: (CMOVZ a _ (MOVWconst [c]))
  3164  	// cond: c!=0
  3165  	// result: a
  3166  	for {
  3167  		a := v.Args[0]
  3168  		v_2 := v.Args[2]
  3169  		if v_2.Op != OpMIPSMOVWconst {
  3170  			break
  3171  		}
  3172  		c := v_2.AuxInt
  3173  		if !(c != 0) {
  3174  			break
  3175  		}
  3176  		v.reset(OpCopy)
  3177  		v.Type = a.Type
  3178  		v.AddArg(a)
  3179  		return true
  3180  	}
  3181  	// match: (CMOVZ a (MOVWconst [0]) c)
  3182  	// cond:
  3183  	// result: (CMOVZzero a c)
  3184  	for {
  3185  		a := v.Args[0]
  3186  		v_1 := v.Args[1]
  3187  		if v_1.Op != OpMIPSMOVWconst {
  3188  			break
  3189  		}
  3190  		if v_1.AuxInt != 0 {
  3191  			break
  3192  		}
  3193  		c := v.Args[2]
  3194  		v.reset(OpMIPSCMOVZzero)
  3195  		v.AddArg(a)
  3196  		v.AddArg(c)
  3197  		return true
  3198  	}
  3199  	return false
  3200  }
  3201  func rewriteValueMIPS_OpMIPSCMOVZzero(v *Value, config *Config) bool {
  3202  	b := v.Block
  3203  	_ = b
  3204  	// match: (CMOVZzero _ (MOVWconst [0]))
  3205  	// cond:
  3206  	// result: (MOVWconst [0])
  3207  	for {
  3208  		v_1 := v.Args[1]
  3209  		if v_1.Op != OpMIPSMOVWconst {
  3210  			break
  3211  		}
  3212  		if v_1.AuxInt != 0 {
  3213  			break
  3214  		}
  3215  		v.reset(OpMIPSMOVWconst)
  3216  		v.AuxInt = 0
  3217  		return true
  3218  	}
  3219  	// match: (CMOVZzero a (MOVWconst [c]))
  3220  	// cond: c!=0
  3221  	// result: a
  3222  	for {
  3223  		a := v.Args[0]
  3224  		v_1 := v.Args[1]
  3225  		if v_1.Op != OpMIPSMOVWconst {
  3226  			break
  3227  		}
  3228  		c := v_1.AuxInt
  3229  		if !(c != 0) {
  3230  			break
  3231  		}
  3232  		v.reset(OpCopy)
  3233  		v.Type = a.Type
  3234  		v.AddArg(a)
  3235  		return true
  3236  	}
  3237  	return false
  3238  }
  3239  func rewriteValueMIPS_OpMIPSLoweredAtomicAdd(v *Value, config *Config) bool {
  3240  	b := v.Block
  3241  	_ = b
  3242  	// match: (LoweredAtomicAdd ptr (MOVWconst [c]) mem)
  3243  	// cond: is16Bit(c)
  3244  	// result: (LoweredAtomicAddconst [c] ptr mem)
  3245  	for {
  3246  		ptr := v.Args[0]
  3247  		v_1 := v.Args[1]
  3248  		if v_1.Op != OpMIPSMOVWconst {
  3249  			break
  3250  		}
  3251  		c := v_1.AuxInt
  3252  		mem := v.Args[2]
  3253  		if !(is16Bit(c)) {
  3254  			break
  3255  		}
  3256  		v.reset(OpMIPSLoweredAtomicAddconst)
  3257  		v.AuxInt = c
  3258  		v.AddArg(ptr)
  3259  		v.AddArg(mem)
  3260  		return true
  3261  	}
  3262  	return false
  3263  }
  3264  func rewriteValueMIPS_OpMIPSLoweredAtomicStore(v *Value, config *Config) bool {
  3265  	b := v.Block
  3266  	_ = b
  3267  	// match: (LoweredAtomicStore ptr (MOVWconst [0]) mem)
  3268  	// cond:
  3269  	// result: (LoweredAtomicStorezero ptr mem)
  3270  	for {
  3271  		ptr := v.Args[0]
  3272  		v_1 := v.Args[1]
  3273  		if v_1.Op != OpMIPSMOVWconst {
  3274  			break
  3275  		}
  3276  		if v_1.AuxInt != 0 {
  3277  			break
  3278  		}
  3279  		mem := v.Args[2]
  3280  		v.reset(OpMIPSLoweredAtomicStorezero)
  3281  		v.AddArg(ptr)
  3282  		v.AddArg(mem)
  3283  		return true
  3284  	}
  3285  	return false
  3286  }
  3287  func rewriteValueMIPS_OpMIPSMOVBUload(v *Value, config *Config) bool {
  3288  	b := v.Block
  3289  	_ = b
  3290  	// match: (MOVBUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3291  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3292  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3293  	for {
  3294  		off1 := v.AuxInt
  3295  		sym := v.Aux
  3296  		x := v.Args[0]
  3297  		if x.Op != OpMIPSADDconst {
  3298  			break
  3299  		}
  3300  		off2 := x.AuxInt
  3301  		ptr := x.Args[0]
  3302  		mem := v.Args[1]
  3303  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3304  			break
  3305  		}
  3306  		v.reset(OpMIPSMOVBUload)
  3307  		v.AuxInt = off1 + off2
  3308  		v.Aux = sym
  3309  		v.AddArg(ptr)
  3310  		v.AddArg(mem)
  3311  		return true
  3312  	}
  3313  	// match: (MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3314  	// cond: canMergeSym(sym1,sym2)
  3315  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3316  	for {
  3317  		off1 := v.AuxInt
  3318  		sym1 := v.Aux
  3319  		v_0 := v.Args[0]
  3320  		if v_0.Op != OpMIPSMOVWaddr {
  3321  			break
  3322  		}
  3323  		off2 := v_0.AuxInt
  3324  		sym2 := v_0.Aux
  3325  		ptr := v_0.Args[0]
  3326  		mem := v.Args[1]
  3327  		if !(canMergeSym(sym1, sym2)) {
  3328  			break
  3329  		}
  3330  		v.reset(OpMIPSMOVBUload)
  3331  		v.AuxInt = off1 + off2
  3332  		v.Aux = mergeSym(sym1, sym2)
  3333  		v.AddArg(ptr)
  3334  		v.AddArg(mem)
  3335  		return true
  3336  	}
  3337  	// match: (MOVBUload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3338  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && !isSigned(x.Type)
  3339  	// result: x
  3340  	for {
  3341  		off := v.AuxInt
  3342  		sym := v.Aux
  3343  		ptr := v.Args[0]
  3344  		v_1 := v.Args[1]
  3345  		if v_1.Op != OpMIPSMOVBstore {
  3346  			break
  3347  		}
  3348  		off2 := v_1.AuxInt
  3349  		sym2 := v_1.Aux
  3350  		ptr2 := v_1.Args[0]
  3351  		x := v_1.Args[1]
  3352  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && !isSigned(x.Type)) {
  3353  			break
  3354  		}
  3355  		v.reset(OpCopy)
  3356  		v.Type = x.Type
  3357  		v.AddArg(x)
  3358  		return true
  3359  	}
  3360  	return false
  3361  }
  3362  func rewriteValueMIPS_OpMIPSMOVBUreg(v *Value, config *Config) bool {
  3363  	b := v.Block
  3364  	_ = b
  3365  	// match: (MOVBUreg x:(MOVBUload _ _))
  3366  	// cond:
  3367  	// result: (MOVWreg x)
  3368  	for {
  3369  		x := v.Args[0]
  3370  		if x.Op != OpMIPSMOVBUload {
  3371  			break
  3372  		}
  3373  		v.reset(OpMIPSMOVWreg)
  3374  		v.AddArg(x)
  3375  		return true
  3376  	}
  3377  	// match: (MOVBUreg x:(MOVBUreg _))
  3378  	// cond:
  3379  	// result: (MOVWreg x)
  3380  	for {
  3381  		x := v.Args[0]
  3382  		if x.Op != OpMIPSMOVBUreg {
  3383  			break
  3384  		}
  3385  		v.reset(OpMIPSMOVWreg)
  3386  		v.AddArg(x)
  3387  		return true
  3388  	}
  3389  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  3390  	// cond: x.Uses == 1 && clobber(x)
  3391  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  3392  	for {
  3393  		t := v.Type
  3394  		x := v.Args[0]
  3395  		if x.Op != OpMIPSMOVBload {
  3396  			break
  3397  		}
  3398  		off := x.AuxInt
  3399  		sym := x.Aux
  3400  		ptr := x.Args[0]
  3401  		mem := x.Args[1]
  3402  		if !(x.Uses == 1 && clobber(x)) {
  3403  			break
  3404  		}
  3405  		b = x.Block
  3406  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, t)
  3407  		v.reset(OpCopy)
  3408  		v.AddArg(v0)
  3409  		v0.AuxInt = off
  3410  		v0.Aux = sym
  3411  		v0.AddArg(ptr)
  3412  		v0.AddArg(mem)
  3413  		return true
  3414  	}
  3415  	// match: (MOVBUreg (ANDconst [c] x))
  3416  	// cond:
  3417  	// result: (ANDconst [c&0xff] x)
  3418  	for {
  3419  		v_0 := v.Args[0]
  3420  		if v_0.Op != OpMIPSANDconst {
  3421  			break
  3422  		}
  3423  		c := v_0.AuxInt
  3424  		x := v_0.Args[0]
  3425  		v.reset(OpMIPSANDconst)
  3426  		v.AuxInt = c & 0xff
  3427  		v.AddArg(x)
  3428  		return true
  3429  	}
  3430  	// match: (MOVBUreg (MOVWconst [c]))
  3431  	// cond:
  3432  	// result: (MOVWconst [int64(uint8(c))])
  3433  	for {
  3434  		v_0 := v.Args[0]
  3435  		if v_0.Op != OpMIPSMOVWconst {
  3436  			break
  3437  		}
  3438  		c := v_0.AuxInt
  3439  		v.reset(OpMIPSMOVWconst)
  3440  		v.AuxInt = int64(uint8(c))
  3441  		return true
  3442  	}
  3443  	return false
  3444  }
  3445  func rewriteValueMIPS_OpMIPSMOVBload(v *Value, config *Config) bool {
  3446  	b := v.Block
  3447  	_ = b
  3448  	// match: (MOVBload  [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3449  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3450  	// result: (MOVBload  [off1+off2] {sym} ptr mem)
  3451  	for {
  3452  		off1 := v.AuxInt
  3453  		sym := v.Aux
  3454  		x := v.Args[0]
  3455  		if x.Op != OpMIPSADDconst {
  3456  			break
  3457  		}
  3458  		off2 := x.AuxInt
  3459  		ptr := x.Args[0]
  3460  		mem := v.Args[1]
  3461  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3462  			break
  3463  		}
  3464  		v.reset(OpMIPSMOVBload)
  3465  		v.AuxInt = off1 + off2
  3466  		v.Aux = sym
  3467  		v.AddArg(ptr)
  3468  		v.AddArg(mem)
  3469  		return true
  3470  	}
  3471  	// match: (MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3472  	// cond: canMergeSym(sym1,sym2)
  3473  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3474  	for {
  3475  		off1 := v.AuxInt
  3476  		sym1 := v.Aux
  3477  		v_0 := v.Args[0]
  3478  		if v_0.Op != OpMIPSMOVWaddr {
  3479  			break
  3480  		}
  3481  		off2 := v_0.AuxInt
  3482  		sym2 := v_0.Aux
  3483  		ptr := v_0.Args[0]
  3484  		mem := v.Args[1]
  3485  		if !(canMergeSym(sym1, sym2)) {
  3486  			break
  3487  		}
  3488  		v.reset(OpMIPSMOVBload)
  3489  		v.AuxInt = off1 + off2
  3490  		v.Aux = mergeSym(sym1, sym2)
  3491  		v.AddArg(ptr)
  3492  		v.AddArg(mem)
  3493  		return true
  3494  	}
  3495  	// match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3496  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && isSigned(x.Type)
  3497  	// result: x
  3498  	for {
  3499  		off := v.AuxInt
  3500  		sym := v.Aux
  3501  		ptr := v.Args[0]
  3502  		v_1 := v.Args[1]
  3503  		if v_1.Op != OpMIPSMOVBstore {
  3504  			break
  3505  		}
  3506  		off2 := v_1.AuxInt
  3507  		sym2 := v_1.Aux
  3508  		ptr2 := v_1.Args[0]
  3509  		x := v_1.Args[1]
  3510  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && isSigned(x.Type)) {
  3511  			break
  3512  		}
  3513  		v.reset(OpCopy)
  3514  		v.Type = x.Type
  3515  		v.AddArg(x)
  3516  		return true
  3517  	}
  3518  	return false
  3519  }
  3520  func rewriteValueMIPS_OpMIPSMOVBreg(v *Value, config *Config) bool {
  3521  	b := v.Block
  3522  	_ = b
  3523  	// match: (MOVBreg x:(MOVBload _ _))
  3524  	// cond:
  3525  	// result: (MOVWreg x)
  3526  	for {
  3527  		x := v.Args[0]
  3528  		if x.Op != OpMIPSMOVBload {
  3529  			break
  3530  		}
  3531  		v.reset(OpMIPSMOVWreg)
  3532  		v.AddArg(x)
  3533  		return true
  3534  	}
  3535  	// match: (MOVBreg x:(MOVBreg _))
  3536  	// cond:
  3537  	// result: (MOVWreg x)
  3538  	for {
  3539  		x := v.Args[0]
  3540  		if x.Op != OpMIPSMOVBreg {
  3541  			break
  3542  		}
  3543  		v.reset(OpMIPSMOVWreg)
  3544  		v.AddArg(x)
  3545  		return true
  3546  	}
  3547  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  3548  	// cond: x.Uses == 1 && clobber(x)
  3549  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  3550  	for {
  3551  		t := v.Type
  3552  		x := v.Args[0]
  3553  		if x.Op != OpMIPSMOVBUload {
  3554  			break
  3555  		}
  3556  		off := x.AuxInt
  3557  		sym := x.Aux
  3558  		ptr := x.Args[0]
  3559  		mem := x.Args[1]
  3560  		if !(x.Uses == 1 && clobber(x)) {
  3561  			break
  3562  		}
  3563  		b = x.Block
  3564  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBload, t)
  3565  		v.reset(OpCopy)
  3566  		v.AddArg(v0)
  3567  		v0.AuxInt = off
  3568  		v0.Aux = sym
  3569  		v0.AddArg(ptr)
  3570  		v0.AddArg(mem)
  3571  		return true
  3572  	}
  3573  	// match: (MOVBreg (ANDconst [c] x))
  3574  	// cond: c & 0x80 == 0
  3575  	// result: (ANDconst [c&0x7f] x)
  3576  	for {
  3577  		v_0 := v.Args[0]
  3578  		if v_0.Op != OpMIPSANDconst {
  3579  			break
  3580  		}
  3581  		c := v_0.AuxInt
  3582  		x := v_0.Args[0]
  3583  		if !(c&0x80 == 0) {
  3584  			break
  3585  		}
  3586  		v.reset(OpMIPSANDconst)
  3587  		v.AuxInt = c & 0x7f
  3588  		v.AddArg(x)
  3589  		return true
  3590  	}
  3591  	// match: (MOVBreg  (MOVWconst [c]))
  3592  	// cond:
  3593  	// result: (MOVWconst [int64(int8(c))])
  3594  	for {
  3595  		v_0 := v.Args[0]
  3596  		if v_0.Op != OpMIPSMOVWconst {
  3597  			break
  3598  		}
  3599  		c := v_0.AuxInt
  3600  		v.reset(OpMIPSMOVWconst)
  3601  		v.AuxInt = int64(int8(c))
  3602  		return true
  3603  	}
  3604  	return false
  3605  }
  3606  func rewriteValueMIPS_OpMIPSMOVBstore(v *Value, config *Config) bool {
  3607  	b := v.Block
  3608  	_ = b
  3609  	// match: (MOVBstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3610  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3611  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3612  	for {
  3613  		off1 := v.AuxInt
  3614  		sym := v.Aux
  3615  		x := v.Args[0]
  3616  		if x.Op != OpMIPSADDconst {
  3617  			break
  3618  		}
  3619  		off2 := x.AuxInt
  3620  		ptr := x.Args[0]
  3621  		val := v.Args[1]
  3622  		mem := v.Args[2]
  3623  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3624  			break
  3625  		}
  3626  		v.reset(OpMIPSMOVBstore)
  3627  		v.AuxInt = off1 + off2
  3628  		v.Aux = sym
  3629  		v.AddArg(ptr)
  3630  		v.AddArg(val)
  3631  		v.AddArg(mem)
  3632  		return true
  3633  	}
  3634  	// match: (MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3635  	// cond: canMergeSym(sym1,sym2)
  3636  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3637  	for {
  3638  		off1 := v.AuxInt
  3639  		sym1 := v.Aux
  3640  		v_0 := v.Args[0]
  3641  		if v_0.Op != OpMIPSMOVWaddr {
  3642  			break
  3643  		}
  3644  		off2 := v_0.AuxInt
  3645  		sym2 := v_0.Aux
  3646  		ptr := v_0.Args[0]
  3647  		val := v.Args[1]
  3648  		mem := v.Args[2]
  3649  		if !(canMergeSym(sym1, sym2)) {
  3650  			break
  3651  		}
  3652  		v.reset(OpMIPSMOVBstore)
  3653  		v.AuxInt = off1 + off2
  3654  		v.Aux = mergeSym(sym1, sym2)
  3655  		v.AddArg(ptr)
  3656  		v.AddArg(val)
  3657  		v.AddArg(mem)
  3658  		return true
  3659  	}
  3660  	// match: (MOVBstore [off] {sym} ptr (MOVWconst [0]) mem)
  3661  	// cond:
  3662  	// result: (MOVBstorezero [off] {sym} ptr mem)
  3663  	for {
  3664  		off := v.AuxInt
  3665  		sym := v.Aux
  3666  		ptr := v.Args[0]
  3667  		v_1 := v.Args[1]
  3668  		if v_1.Op != OpMIPSMOVWconst {
  3669  			break
  3670  		}
  3671  		if v_1.AuxInt != 0 {
  3672  			break
  3673  		}
  3674  		mem := v.Args[2]
  3675  		v.reset(OpMIPSMOVBstorezero)
  3676  		v.AuxInt = off
  3677  		v.Aux = sym
  3678  		v.AddArg(ptr)
  3679  		v.AddArg(mem)
  3680  		return true
  3681  	}
  3682  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3683  	// cond:
  3684  	// result: (MOVBstore [off] {sym} ptr x mem)
  3685  	for {
  3686  		off := v.AuxInt
  3687  		sym := v.Aux
  3688  		ptr := v.Args[0]
  3689  		v_1 := v.Args[1]
  3690  		if v_1.Op != OpMIPSMOVBreg {
  3691  			break
  3692  		}
  3693  		x := v_1.Args[0]
  3694  		mem := v.Args[2]
  3695  		v.reset(OpMIPSMOVBstore)
  3696  		v.AuxInt = off
  3697  		v.Aux = sym
  3698  		v.AddArg(ptr)
  3699  		v.AddArg(x)
  3700  		v.AddArg(mem)
  3701  		return true
  3702  	}
  3703  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3704  	// cond:
  3705  	// result: (MOVBstore [off] {sym} ptr x mem)
  3706  	for {
  3707  		off := v.AuxInt
  3708  		sym := v.Aux
  3709  		ptr := v.Args[0]
  3710  		v_1 := v.Args[1]
  3711  		if v_1.Op != OpMIPSMOVBUreg {
  3712  			break
  3713  		}
  3714  		x := v_1.Args[0]
  3715  		mem := v.Args[2]
  3716  		v.reset(OpMIPSMOVBstore)
  3717  		v.AuxInt = off
  3718  		v.Aux = sym
  3719  		v.AddArg(ptr)
  3720  		v.AddArg(x)
  3721  		v.AddArg(mem)
  3722  		return true
  3723  	}
  3724  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3725  	// cond:
  3726  	// result: (MOVBstore [off] {sym} ptr x mem)
  3727  	for {
  3728  		off := v.AuxInt
  3729  		sym := v.Aux
  3730  		ptr := v.Args[0]
  3731  		v_1 := v.Args[1]
  3732  		if v_1.Op != OpMIPSMOVHreg {
  3733  			break
  3734  		}
  3735  		x := v_1.Args[0]
  3736  		mem := v.Args[2]
  3737  		v.reset(OpMIPSMOVBstore)
  3738  		v.AuxInt = off
  3739  		v.Aux = sym
  3740  		v.AddArg(ptr)
  3741  		v.AddArg(x)
  3742  		v.AddArg(mem)
  3743  		return true
  3744  	}
  3745  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3746  	// cond:
  3747  	// result: (MOVBstore [off] {sym} ptr x mem)
  3748  	for {
  3749  		off := v.AuxInt
  3750  		sym := v.Aux
  3751  		ptr := v.Args[0]
  3752  		v_1 := v.Args[1]
  3753  		if v_1.Op != OpMIPSMOVHUreg {
  3754  			break
  3755  		}
  3756  		x := v_1.Args[0]
  3757  		mem := v.Args[2]
  3758  		v.reset(OpMIPSMOVBstore)
  3759  		v.AuxInt = off
  3760  		v.Aux = sym
  3761  		v.AddArg(ptr)
  3762  		v.AddArg(x)
  3763  		v.AddArg(mem)
  3764  		return true
  3765  	}
  3766  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3767  	// cond:
  3768  	// result: (MOVBstore [off] {sym} ptr x mem)
  3769  	for {
  3770  		off := v.AuxInt
  3771  		sym := v.Aux
  3772  		ptr := v.Args[0]
  3773  		v_1 := v.Args[1]
  3774  		if v_1.Op != OpMIPSMOVWreg {
  3775  			break
  3776  		}
  3777  		x := v_1.Args[0]
  3778  		mem := v.Args[2]
  3779  		v.reset(OpMIPSMOVBstore)
  3780  		v.AuxInt = off
  3781  		v.Aux = sym
  3782  		v.AddArg(ptr)
  3783  		v.AddArg(x)
  3784  		v.AddArg(mem)
  3785  		return true
  3786  	}
  3787  	return false
  3788  }
  3789  func rewriteValueMIPS_OpMIPSMOVBstorezero(v *Value, config *Config) bool {
  3790  	b := v.Block
  3791  	_ = b
  3792  	// match: (MOVBstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3793  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3794  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3795  	for {
  3796  		off1 := v.AuxInt
  3797  		sym := v.Aux
  3798  		x := v.Args[0]
  3799  		if x.Op != OpMIPSADDconst {
  3800  			break
  3801  		}
  3802  		off2 := x.AuxInt
  3803  		ptr := x.Args[0]
  3804  		mem := v.Args[1]
  3805  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3806  			break
  3807  		}
  3808  		v.reset(OpMIPSMOVBstorezero)
  3809  		v.AuxInt = off1 + off2
  3810  		v.Aux = sym
  3811  		v.AddArg(ptr)
  3812  		v.AddArg(mem)
  3813  		return true
  3814  	}
  3815  	// match: (MOVBstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3816  	// cond: canMergeSym(sym1,sym2)
  3817  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3818  	for {
  3819  		off1 := v.AuxInt
  3820  		sym1 := v.Aux
  3821  		v_0 := v.Args[0]
  3822  		if v_0.Op != OpMIPSMOVWaddr {
  3823  			break
  3824  		}
  3825  		off2 := v_0.AuxInt
  3826  		sym2 := v_0.Aux
  3827  		ptr := v_0.Args[0]
  3828  		mem := v.Args[1]
  3829  		if !(canMergeSym(sym1, sym2)) {
  3830  			break
  3831  		}
  3832  		v.reset(OpMIPSMOVBstorezero)
  3833  		v.AuxInt = off1 + off2
  3834  		v.Aux = mergeSym(sym1, sym2)
  3835  		v.AddArg(ptr)
  3836  		v.AddArg(mem)
  3837  		return true
  3838  	}
  3839  	return false
  3840  }
  3841  func rewriteValueMIPS_OpMIPSMOVDload(v *Value, config *Config) bool {
  3842  	b := v.Block
  3843  	_ = b
  3844  	// match: (MOVDload  [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3845  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3846  	// result: (MOVDload  [off1+off2] {sym} ptr mem)
  3847  	for {
  3848  		off1 := v.AuxInt
  3849  		sym := v.Aux
  3850  		x := v.Args[0]
  3851  		if x.Op != OpMIPSADDconst {
  3852  			break
  3853  		}
  3854  		off2 := x.AuxInt
  3855  		ptr := x.Args[0]
  3856  		mem := v.Args[1]
  3857  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3858  			break
  3859  		}
  3860  		v.reset(OpMIPSMOVDload)
  3861  		v.AuxInt = off1 + off2
  3862  		v.Aux = sym
  3863  		v.AddArg(ptr)
  3864  		v.AddArg(mem)
  3865  		return true
  3866  	}
  3867  	// match: (MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3868  	// cond: canMergeSym(sym1,sym2)
  3869  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3870  	for {
  3871  		off1 := v.AuxInt
  3872  		sym1 := v.Aux
  3873  		v_0 := v.Args[0]
  3874  		if v_0.Op != OpMIPSMOVWaddr {
  3875  			break
  3876  		}
  3877  		off2 := v_0.AuxInt
  3878  		sym2 := v_0.Aux
  3879  		ptr := v_0.Args[0]
  3880  		mem := v.Args[1]
  3881  		if !(canMergeSym(sym1, sym2)) {
  3882  			break
  3883  		}
  3884  		v.reset(OpMIPSMOVDload)
  3885  		v.AuxInt = off1 + off2
  3886  		v.Aux = mergeSym(sym1, sym2)
  3887  		v.AddArg(ptr)
  3888  		v.AddArg(mem)
  3889  		return true
  3890  	}
  3891  	// match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _))
  3892  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3893  	// result: x
  3894  	for {
  3895  		off := v.AuxInt
  3896  		sym := v.Aux
  3897  		ptr := v.Args[0]
  3898  		v_1 := v.Args[1]
  3899  		if v_1.Op != OpMIPSMOVDstore {
  3900  			break
  3901  		}
  3902  		off2 := v_1.AuxInt
  3903  		sym2 := v_1.Aux
  3904  		ptr2 := v_1.Args[0]
  3905  		x := v_1.Args[1]
  3906  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3907  			break
  3908  		}
  3909  		v.reset(OpCopy)
  3910  		v.Type = x.Type
  3911  		v.AddArg(x)
  3912  		return true
  3913  	}
  3914  	return false
  3915  }
  3916  func rewriteValueMIPS_OpMIPSMOVDstore(v *Value, config *Config) bool {
  3917  	b := v.Block
  3918  	_ = b
  3919  	// match: (MOVDstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3920  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3921  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  3922  	for {
  3923  		off1 := v.AuxInt
  3924  		sym := v.Aux
  3925  		x := v.Args[0]
  3926  		if x.Op != OpMIPSADDconst {
  3927  			break
  3928  		}
  3929  		off2 := x.AuxInt
  3930  		ptr := x.Args[0]
  3931  		val := v.Args[1]
  3932  		mem := v.Args[2]
  3933  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3934  			break
  3935  		}
  3936  		v.reset(OpMIPSMOVDstore)
  3937  		v.AuxInt = off1 + off2
  3938  		v.Aux = sym
  3939  		v.AddArg(ptr)
  3940  		v.AddArg(val)
  3941  		v.AddArg(mem)
  3942  		return true
  3943  	}
  3944  	// match: (MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3945  	// cond: canMergeSym(sym1,sym2)
  3946  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3947  	for {
  3948  		off1 := v.AuxInt
  3949  		sym1 := v.Aux
  3950  		v_0 := v.Args[0]
  3951  		if v_0.Op != OpMIPSMOVWaddr {
  3952  			break
  3953  		}
  3954  		off2 := v_0.AuxInt
  3955  		sym2 := v_0.Aux
  3956  		ptr := v_0.Args[0]
  3957  		val := v.Args[1]
  3958  		mem := v.Args[2]
  3959  		if !(canMergeSym(sym1, sym2)) {
  3960  			break
  3961  		}
  3962  		v.reset(OpMIPSMOVDstore)
  3963  		v.AuxInt = off1 + off2
  3964  		v.Aux = mergeSym(sym1, sym2)
  3965  		v.AddArg(ptr)
  3966  		v.AddArg(val)
  3967  		v.AddArg(mem)
  3968  		return true
  3969  	}
  3970  	return false
  3971  }
  3972  func rewriteValueMIPS_OpMIPSMOVFload(v *Value, config *Config) bool {
  3973  	b := v.Block
  3974  	_ = b
  3975  	// match: (MOVFload  [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3976  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3977  	// result: (MOVFload  [off1+off2] {sym} ptr mem)
  3978  	for {
  3979  		off1 := v.AuxInt
  3980  		sym := v.Aux
  3981  		x := v.Args[0]
  3982  		if x.Op != OpMIPSADDconst {
  3983  			break
  3984  		}
  3985  		off2 := x.AuxInt
  3986  		ptr := x.Args[0]
  3987  		mem := v.Args[1]
  3988  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  3989  			break
  3990  		}
  3991  		v.reset(OpMIPSMOVFload)
  3992  		v.AuxInt = off1 + off2
  3993  		v.Aux = sym
  3994  		v.AddArg(ptr)
  3995  		v.AddArg(mem)
  3996  		return true
  3997  	}
  3998  	// match: (MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3999  	// cond: canMergeSym(sym1,sym2)
  4000  	// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4001  	for {
  4002  		off1 := v.AuxInt
  4003  		sym1 := v.Aux
  4004  		v_0 := v.Args[0]
  4005  		if v_0.Op != OpMIPSMOVWaddr {
  4006  			break
  4007  		}
  4008  		off2 := v_0.AuxInt
  4009  		sym2 := v_0.Aux
  4010  		ptr := v_0.Args[0]
  4011  		mem := v.Args[1]
  4012  		if !(canMergeSym(sym1, sym2)) {
  4013  			break
  4014  		}
  4015  		v.reset(OpMIPSMOVFload)
  4016  		v.AuxInt = off1 + off2
  4017  		v.Aux = mergeSym(sym1, sym2)
  4018  		v.AddArg(ptr)
  4019  		v.AddArg(mem)
  4020  		return true
  4021  	}
  4022  	// match: (MOVFload [off] {sym} ptr (MOVFstore [off2] {sym2} ptr2 x _))
  4023  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4024  	// result: x
  4025  	for {
  4026  		off := v.AuxInt
  4027  		sym := v.Aux
  4028  		ptr := v.Args[0]
  4029  		v_1 := v.Args[1]
  4030  		if v_1.Op != OpMIPSMOVFstore {
  4031  			break
  4032  		}
  4033  		off2 := v_1.AuxInt
  4034  		sym2 := v_1.Aux
  4035  		ptr2 := v_1.Args[0]
  4036  		x := v_1.Args[1]
  4037  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4038  			break
  4039  		}
  4040  		v.reset(OpCopy)
  4041  		v.Type = x.Type
  4042  		v.AddArg(x)
  4043  		return true
  4044  	}
  4045  	return false
  4046  }
  4047  func rewriteValueMIPS_OpMIPSMOVFstore(v *Value, config *Config) bool {
  4048  	b := v.Block
  4049  	_ = b
  4050  	// match: (MOVFstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4051  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4052  	// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  4053  	for {
  4054  		off1 := v.AuxInt
  4055  		sym := v.Aux
  4056  		x := v.Args[0]
  4057  		if x.Op != OpMIPSADDconst {
  4058  			break
  4059  		}
  4060  		off2 := x.AuxInt
  4061  		ptr := x.Args[0]
  4062  		val := v.Args[1]
  4063  		mem := v.Args[2]
  4064  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4065  			break
  4066  		}
  4067  		v.reset(OpMIPSMOVFstore)
  4068  		v.AuxInt = off1 + off2
  4069  		v.Aux = sym
  4070  		v.AddArg(ptr)
  4071  		v.AddArg(val)
  4072  		v.AddArg(mem)
  4073  		return true
  4074  	}
  4075  	// match: (MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4076  	// cond: canMergeSym(sym1,sym2)
  4077  	// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4078  	for {
  4079  		off1 := v.AuxInt
  4080  		sym1 := v.Aux
  4081  		v_0 := v.Args[0]
  4082  		if v_0.Op != OpMIPSMOVWaddr {
  4083  			break
  4084  		}
  4085  		off2 := v_0.AuxInt
  4086  		sym2 := v_0.Aux
  4087  		ptr := v_0.Args[0]
  4088  		val := v.Args[1]
  4089  		mem := v.Args[2]
  4090  		if !(canMergeSym(sym1, sym2)) {
  4091  			break
  4092  		}
  4093  		v.reset(OpMIPSMOVFstore)
  4094  		v.AuxInt = off1 + off2
  4095  		v.Aux = mergeSym(sym1, sym2)
  4096  		v.AddArg(ptr)
  4097  		v.AddArg(val)
  4098  		v.AddArg(mem)
  4099  		return true
  4100  	}
  4101  	return false
  4102  }
  4103  func rewriteValueMIPS_OpMIPSMOVHUload(v *Value, config *Config) bool {
  4104  	b := v.Block
  4105  	_ = b
  4106  	// match: (MOVHUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4107  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4108  	// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4109  	for {
  4110  		off1 := v.AuxInt
  4111  		sym := v.Aux
  4112  		x := v.Args[0]
  4113  		if x.Op != OpMIPSADDconst {
  4114  			break
  4115  		}
  4116  		off2 := x.AuxInt
  4117  		ptr := x.Args[0]
  4118  		mem := v.Args[1]
  4119  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4120  			break
  4121  		}
  4122  		v.reset(OpMIPSMOVHUload)
  4123  		v.AuxInt = off1 + off2
  4124  		v.Aux = sym
  4125  		v.AddArg(ptr)
  4126  		v.AddArg(mem)
  4127  		return true
  4128  	}
  4129  	// match: (MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4130  	// cond: canMergeSym(sym1,sym2)
  4131  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4132  	for {
  4133  		off1 := v.AuxInt
  4134  		sym1 := v.Aux
  4135  		v_0 := v.Args[0]
  4136  		if v_0.Op != OpMIPSMOVWaddr {
  4137  			break
  4138  		}
  4139  		off2 := v_0.AuxInt
  4140  		sym2 := v_0.Aux
  4141  		ptr := v_0.Args[0]
  4142  		mem := v.Args[1]
  4143  		if !(canMergeSym(sym1, sym2)) {
  4144  			break
  4145  		}
  4146  		v.reset(OpMIPSMOVHUload)
  4147  		v.AuxInt = off1 + off2
  4148  		v.Aux = mergeSym(sym1, sym2)
  4149  		v.AddArg(ptr)
  4150  		v.AddArg(mem)
  4151  		return true
  4152  	}
  4153  	// match: (MOVHUload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4154  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && !isSigned(x.Type)
  4155  	// result: x
  4156  	for {
  4157  		off := v.AuxInt
  4158  		sym := v.Aux
  4159  		ptr := v.Args[0]
  4160  		v_1 := v.Args[1]
  4161  		if v_1.Op != OpMIPSMOVHstore {
  4162  			break
  4163  		}
  4164  		off2 := v_1.AuxInt
  4165  		sym2 := v_1.Aux
  4166  		ptr2 := v_1.Args[0]
  4167  		x := v_1.Args[1]
  4168  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && !isSigned(x.Type)) {
  4169  			break
  4170  		}
  4171  		v.reset(OpCopy)
  4172  		v.Type = x.Type
  4173  		v.AddArg(x)
  4174  		return true
  4175  	}
  4176  	return false
  4177  }
  4178  func rewriteValueMIPS_OpMIPSMOVHUreg(v *Value, config *Config) bool {
  4179  	b := v.Block
  4180  	_ = b
  4181  	// match: (MOVHUreg x:(MOVBUload _ _))
  4182  	// cond:
  4183  	// result: (MOVWreg x)
  4184  	for {
  4185  		x := v.Args[0]
  4186  		if x.Op != OpMIPSMOVBUload {
  4187  			break
  4188  		}
  4189  		v.reset(OpMIPSMOVWreg)
  4190  		v.AddArg(x)
  4191  		return true
  4192  	}
  4193  	// match: (MOVHUreg x:(MOVHUload _ _))
  4194  	// cond:
  4195  	// result: (MOVWreg x)
  4196  	for {
  4197  		x := v.Args[0]
  4198  		if x.Op != OpMIPSMOVHUload {
  4199  			break
  4200  		}
  4201  		v.reset(OpMIPSMOVWreg)
  4202  		v.AddArg(x)
  4203  		return true
  4204  	}
  4205  	// match: (MOVHUreg x:(MOVBUreg _))
  4206  	// cond:
  4207  	// result: (MOVWreg x)
  4208  	for {
  4209  		x := v.Args[0]
  4210  		if x.Op != OpMIPSMOVBUreg {
  4211  			break
  4212  		}
  4213  		v.reset(OpMIPSMOVWreg)
  4214  		v.AddArg(x)
  4215  		return true
  4216  	}
  4217  	// match: (MOVHUreg x:(MOVHUreg _))
  4218  	// cond:
  4219  	// result: (MOVWreg x)
  4220  	for {
  4221  		x := v.Args[0]
  4222  		if x.Op != OpMIPSMOVHUreg {
  4223  			break
  4224  		}
  4225  		v.reset(OpMIPSMOVWreg)
  4226  		v.AddArg(x)
  4227  		return true
  4228  	}
  4229  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  4230  	// cond: x.Uses == 1 && clobber(x)
  4231  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  4232  	for {
  4233  		t := v.Type
  4234  		x := v.Args[0]
  4235  		if x.Op != OpMIPSMOVHload {
  4236  			break
  4237  		}
  4238  		off := x.AuxInt
  4239  		sym := x.Aux
  4240  		ptr := x.Args[0]
  4241  		mem := x.Args[1]
  4242  		if !(x.Uses == 1 && clobber(x)) {
  4243  			break
  4244  		}
  4245  		b = x.Block
  4246  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, t)
  4247  		v.reset(OpCopy)
  4248  		v.AddArg(v0)
  4249  		v0.AuxInt = off
  4250  		v0.Aux = sym
  4251  		v0.AddArg(ptr)
  4252  		v0.AddArg(mem)
  4253  		return true
  4254  	}
  4255  	// match: (MOVHUreg (ANDconst [c] x))
  4256  	// cond:
  4257  	// result: (ANDconst [c&0xffff] x)
  4258  	for {
  4259  		v_0 := v.Args[0]
  4260  		if v_0.Op != OpMIPSANDconst {
  4261  			break
  4262  		}
  4263  		c := v_0.AuxInt
  4264  		x := v_0.Args[0]
  4265  		v.reset(OpMIPSANDconst)
  4266  		v.AuxInt = c & 0xffff
  4267  		v.AddArg(x)
  4268  		return true
  4269  	}
  4270  	// match: (MOVHUreg (MOVWconst [c]))
  4271  	// cond:
  4272  	// result: (MOVWconst [int64(uint16(c))])
  4273  	for {
  4274  		v_0 := v.Args[0]
  4275  		if v_0.Op != OpMIPSMOVWconst {
  4276  			break
  4277  		}
  4278  		c := v_0.AuxInt
  4279  		v.reset(OpMIPSMOVWconst)
  4280  		v.AuxInt = int64(uint16(c))
  4281  		return true
  4282  	}
  4283  	return false
  4284  }
  4285  func rewriteValueMIPS_OpMIPSMOVHload(v *Value, config *Config) bool {
  4286  	b := v.Block
  4287  	_ = b
  4288  	// match: (MOVHload  [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4289  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4290  	// result: (MOVHload  [off1+off2] {sym} ptr mem)
  4291  	for {
  4292  		off1 := v.AuxInt
  4293  		sym := v.Aux
  4294  		x := v.Args[0]
  4295  		if x.Op != OpMIPSADDconst {
  4296  			break
  4297  		}
  4298  		off2 := x.AuxInt
  4299  		ptr := x.Args[0]
  4300  		mem := v.Args[1]
  4301  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4302  			break
  4303  		}
  4304  		v.reset(OpMIPSMOVHload)
  4305  		v.AuxInt = off1 + off2
  4306  		v.Aux = sym
  4307  		v.AddArg(ptr)
  4308  		v.AddArg(mem)
  4309  		return true
  4310  	}
  4311  	// match: (MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4312  	// cond: canMergeSym(sym1,sym2)
  4313  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4314  	for {
  4315  		off1 := v.AuxInt
  4316  		sym1 := v.Aux
  4317  		v_0 := v.Args[0]
  4318  		if v_0.Op != OpMIPSMOVWaddr {
  4319  			break
  4320  		}
  4321  		off2 := v_0.AuxInt
  4322  		sym2 := v_0.Aux
  4323  		ptr := v_0.Args[0]
  4324  		mem := v.Args[1]
  4325  		if !(canMergeSym(sym1, sym2)) {
  4326  			break
  4327  		}
  4328  		v.reset(OpMIPSMOVHload)
  4329  		v.AuxInt = off1 + off2
  4330  		v.Aux = mergeSym(sym1, sym2)
  4331  		v.AddArg(ptr)
  4332  		v.AddArg(mem)
  4333  		return true
  4334  	}
  4335  	// match: (MOVHload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4336  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && isSigned(x.Type)
  4337  	// result: x
  4338  	for {
  4339  		off := v.AuxInt
  4340  		sym := v.Aux
  4341  		ptr := v.Args[0]
  4342  		v_1 := v.Args[1]
  4343  		if v_1.Op != OpMIPSMOVHstore {
  4344  			break
  4345  		}
  4346  		off2 := v_1.AuxInt
  4347  		sym2 := v_1.Aux
  4348  		ptr2 := v_1.Args[0]
  4349  		x := v_1.Args[1]
  4350  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) && isSigned(x.Type)) {
  4351  			break
  4352  		}
  4353  		v.reset(OpCopy)
  4354  		v.Type = x.Type
  4355  		v.AddArg(x)
  4356  		return true
  4357  	}
  4358  	return false
  4359  }
  4360  func rewriteValueMIPS_OpMIPSMOVHreg(v *Value, config *Config) bool {
  4361  	b := v.Block
  4362  	_ = b
  4363  	// match: (MOVHreg x:(MOVBload _ _))
  4364  	// cond:
  4365  	// result: (MOVWreg x)
  4366  	for {
  4367  		x := v.Args[0]
  4368  		if x.Op != OpMIPSMOVBload {
  4369  			break
  4370  		}
  4371  		v.reset(OpMIPSMOVWreg)
  4372  		v.AddArg(x)
  4373  		return true
  4374  	}
  4375  	// match: (MOVHreg x:(MOVBUload _ _))
  4376  	// cond:
  4377  	// result: (MOVWreg x)
  4378  	for {
  4379  		x := v.Args[0]
  4380  		if x.Op != OpMIPSMOVBUload {
  4381  			break
  4382  		}
  4383  		v.reset(OpMIPSMOVWreg)
  4384  		v.AddArg(x)
  4385  		return true
  4386  	}
  4387  	// match: (MOVHreg x:(MOVHload _ _))
  4388  	// cond:
  4389  	// result: (MOVWreg x)
  4390  	for {
  4391  		x := v.Args[0]
  4392  		if x.Op != OpMIPSMOVHload {
  4393  			break
  4394  		}
  4395  		v.reset(OpMIPSMOVWreg)
  4396  		v.AddArg(x)
  4397  		return true
  4398  	}
  4399  	// match: (MOVHreg x:(MOVBreg _))
  4400  	// cond:
  4401  	// result: (MOVWreg x)
  4402  	for {
  4403  		x := v.Args[0]
  4404  		if x.Op != OpMIPSMOVBreg {
  4405  			break
  4406  		}
  4407  		v.reset(OpMIPSMOVWreg)
  4408  		v.AddArg(x)
  4409  		return true
  4410  	}
  4411  	// match: (MOVHreg x:(MOVBUreg _))
  4412  	// cond:
  4413  	// result: (MOVWreg x)
  4414  	for {
  4415  		x := v.Args[0]
  4416  		if x.Op != OpMIPSMOVBUreg {
  4417  			break
  4418  		}
  4419  		v.reset(OpMIPSMOVWreg)
  4420  		v.AddArg(x)
  4421  		return true
  4422  	}
  4423  	// match: (MOVHreg x:(MOVHreg _))
  4424  	// cond:
  4425  	// result: (MOVWreg x)
  4426  	for {
  4427  		x := v.Args[0]
  4428  		if x.Op != OpMIPSMOVHreg {
  4429  			break
  4430  		}
  4431  		v.reset(OpMIPSMOVWreg)
  4432  		v.AddArg(x)
  4433  		return true
  4434  	}
  4435  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  4436  	// cond: x.Uses == 1 && clobber(x)
  4437  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  4438  	for {
  4439  		t := v.Type
  4440  		x := v.Args[0]
  4441  		if x.Op != OpMIPSMOVHUload {
  4442  			break
  4443  		}
  4444  		off := x.AuxInt
  4445  		sym := x.Aux
  4446  		ptr := x.Args[0]
  4447  		mem := x.Args[1]
  4448  		if !(x.Uses == 1 && clobber(x)) {
  4449  			break
  4450  		}
  4451  		b = x.Block
  4452  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, t)
  4453  		v.reset(OpCopy)
  4454  		v.AddArg(v0)
  4455  		v0.AuxInt = off
  4456  		v0.Aux = sym
  4457  		v0.AddArg(ptr)
  4458  		v0.AddArg(mem)
  4459  		return true
  4460  	}
  4461  	// match: (MOVHreg (ANDconst [c] x))
  4462  	// cond: c & 0x8000 == 0
  4463  	// result: (ANDconst [c&0x7fff] x)
  4464  	for {
  4465  		v_0 := v.Args[0]
  4466  		if v_0.Op != OpMIPSANDconst {
  4467  			break
  4468  		}
  4469  		c := v_0.AuxInt
  4470  		x := v_0.Args[0]
  4471  		if !(c&0x8000 == 0) {
  4472  			break
  4473  		}
  4474  		v.reset(OpMIPSANDconst)
  4475  		v.AuxInt = c & 0x7fff
  4476  		v.AddArg(x)
  4477  		return true
  4478  	}
  4479  	// match: (MOVHreg  (MOVWconst [c]))
  4480  	// cond:
  4481  	// result: (MOVWconst [int64(int16(c))])
  4482  	for {
  4483  		v_0 := v.Args[0]
  4484  		if v_0.Op != OpMIPSMOVWconst {
  4485  			break
  4486  		}
  4487  		c := v_0.AuxInt
  4488  		v.reset(OpMIPSMOVWconst)
  4489  		v.AuxInt = int64(int16(c))
  4490  		return true
  4491  	}
  4492  	return false
  4493  }
  4494  func rewriteValueMIPS_OpMIPSMOVHstore(v *Value, config *Config) bool {
  4495  	b := v.Block
  4496  	_ = b
  4497  	// match: (MOVHstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4498  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4499  	// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4500  	for {
  4501  		off1 := v.AuxInt
  4502  		sym := v.Aux
  4503  		x := v.Args[0]
  4504  		if x.Op != OpMIPSADDconst {
  4505  			break
  4506  		}
  4507  		off2 := x.AuxInt
  4508  		ptr := x.Args[0]
  4509  		val := v.Args[1]
  4510  		mem := v.Args[2]
  4511  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4512  			break
  4513  		}
  4514  		v.reset(OpMIPSMOVHstore)
  4515  		v.AuxInt = off1 + off2
  4516  		v.Aux = sym
  4517  		v.AddArg(ptr)
  4518  		v.AddArg(val)
  4519  		v.AddArg(mem)
  4520  		return true
  4521  	}
  4522  	// match: (MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4523  	// cond: canMergeSym(sym1,sym2)
  4524  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4525  	for {
  4526  		off1 := v.AuxInt
  4527  		sym1 := v.Aux
  4528  		v_0 := v.Args[0]
  4529  		if v_0.Op != OpMIPSMOVWaddr {
  4530  			break
  4531  		}
  4532  		off2 := v_0.AuxInt
  4533  		sym2 := v_0.Aux
  4534  		ptr := v_0.Args[0]
  4535  		val := v.Args[1]
  4536  		mem := v.Args[2]
  4537  		if !(canMergeSym(sym1, sym2)) {
  4538  			break
  4539  		}
  4540  		v.reset(OpMIPSMOVHstore)
  4541  		v.AuxInt = off1 + off2
  4542  		v.Aux = mergeSym(sym1, sym2)
  4543  		v.AddArg(ptr)
  4544  		v.AddArg(val)
  4545  		v.AddArg(mem)
  4546  		return true
  4547  	}
  4548  	// match: (MOVHstore [off] {sym} ptr (MOVWconst [0]) mem)
  4549  	// cond:
  4550  	// result: (MOVHstorezero [off] {sym} ptr mem)
  4551  	for {
  4552  		off := v.AuxInt
  4553  		sym := v.Aux
  4554  		ptr := v.Args[0]
  4555  		v_1 := v.Args[1]
  4556  		if v_1.Op != OpMIPSMOVWconst {
  4557  			break
  4558  		}
  4559  		if v_1.AuxInt != 0 {
  4560  			break
  4561  		}
  4562  		mem := v.Args[2]
  4563  		v.reset(OpMIPSMOVHstorezero)
  4564  		v.AuxInt = off
  4565  		v.Aux = sym
  4566  		v.AddArg(ptr)
  4567  		v.AddArg(mem)
  4568  		return true
  4569  	}
  4570  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4571  	// cond:
  4572  	// result: (MOVHstore [off] {sym} ptr x mem)
  4573  	for {
  4574  		off := v.AuxInt
  4575  		sym := v.Aux
  4576  		ptr := v.Args[0]
  4577  		v_1 := v.Args[1]
  4578  		if v_1.Op != OpMIPSMOVHreg {
  4579  			break
  4580  		}
  4581  		x := v_1.Args[0]
  4582  		mem := v.Args[2]
  4583  		v.reset(OpMIPSMOVHstore)
  4584  		v.AuxInt = off
  4585  		v.Aux = sym
  4586  		v.AddArg(ptr)
  4587  		v.AddArg(x)
  4588  		v.AddArg(mem)
  4589  		return true
  4590  	}
  4591  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4592  	// cond:
  4593  	// result: (MOVHstore [off] {sym} ptr x mem)
  4594  	for {
  4595  		off := v.AuxInt
  4596  		sym := v.Aux
  4597  		ptr := v.Args[0]
  4598  		v_1 := v.Args[1]
  4599  		if v_1.Op != OpMIPSMOVHUreg {
  4600  			break
  4601  		}
  4602  		x := v_1.Args[0]
  4603  		mem := v.Args[2]
  4604  		v.reset(OpMIPSMOVHstore)
  4605  		v.AuxInt = off
  4606  		v.Aux = sym
  4607  		v.AddArg(ptr)
  4608  		v.AddArg(x)
  4609  		v.AddArg(mem)
  4610  		return true
  4611  	}
  4612  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4613  	// cond:
  4614  	// result: (MOVHstore [off] {sym} ptr x mem)
  4615  	for {
  4616  		off := v.AuxInt
  4617  		sym := v.Aux
  4618  		ptr := v.Args[0]
  4619  		v_1 := v.Args[1]
  4620  		if v_1.Op != OpMIPSMOVWreg {
  4621  			break
  4622  		}
  4623  		x := v_1.Args[0]
  4624  		mem := v.Args[2]
  4625  		v.reset(OpMIPSMOVHstore)
  4626  		v.AuxInt = off
  4627  		v.Aux = sym
  4628  		v.AddArg(ptr)
  4629  		v.AddArg(x)
  4630  		v.AddArg(mem)
  4631  		return true
  4632  	}
  4633  	return false
  4634  }
  4635  func rewriteValueMIPS_OpMIPSMOVHstorezero(v *Value, config *Config) bool {
  4636  	b := v.Block
  4637  	_ = b
  4638  	// match: (MOVHstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4639  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4640  	// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4641  	for {
  4642  		off1 := v.AuxInt
  4643  		sym := v.Aux
  4644  		x := v.Args[0]
  4645  		if x.Op != OpMIPSADDconst {
  4646  			break
  4647  		}
  4648  		off2 := x.AuxInt
  4649  		ptr := x.Args[0]
  4650  		mem := v.Args[1]
  4651  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4652  			break
  4653  		}
  4654  		v.reset(OpMIPSMOVHstorezero)
  4655  		v.AuxInt = off1 + off2
  4656  		v.Aux = sym
  4657  		v.AddArg(ptr)
  4658  		v.AddArg(mem)
  4659  		return true
  4660  	}
  4661  	// match: (MOVHstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4662  	// cond: canMergeSym(sym1,sym2)
  4663  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4664  	for {
  4665  		off1 := v.AuxInt
  4666  		sym1 := v.Aux
  4667  		v_0 := v.Args[0]
  4668  		if v_0.Op != OpMIPSMOVWaddr {
  4669  			break
  4670  		}
  4671  		off2 := v_0.AuxInt
  4672  		sym2 := v_0.Aux
  4673  		ptr := v_0.Args[0]
  4674  		mem := v.Args[1]
  4675  		if !(canMergeSym(sym1, sym2)) {
  4676  			break
  4677  		}
  4678  		v.reset(OpMIPSMOVHstorezero)
  4679  		v.AuxInt = off1 + off2
  4680  		v.Aux = mergeSym(sym1, sym2)
  4681  		v.AddArg(ptr)
  4682  		v.AddArg(mem)
  4683  		return true
  4684  	}
  4685  	return false
  4686  }
  4687  func rewriteValueMIPS_OpMIPSMOVWload(v *Value, config *Config) bool {
  4688  	b := v.Block
  4689  	_ = b
  4690  	// match: (MOVWload  [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4691  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4692  	// result: (MOVWload  [off1+off2] {sym} ptr mem)
  4693  	for {
  4694  		off1 := v.AuxInt
  4695  		sym := v.Aux
  4696  		x := v.Args[0]
  4697  		if x.Op != OpMIPSADDconst {
  4698  			break
  4699  		}
  4700  		off2 := x.AuxInt
  4701  		ptr := x.Args[0]
  4702  		mem := v.Args[1]
  4703  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4704  			break
  4705  		}
  4706  		v.reset(OpMIPSMOVWload)
  4707  		v.AuxInt = off1 + off2
  4708  		v.Aux = sym
  4709  		v.AddArg(ptr)
  4710  		v.AddArg(mem)
  4711  		return true
  4712  	}
  4713  	// match: (MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4714  	// cond: canMergeSym(sym1,sym2)
  4715  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4716  	for {
  4717  		off1 := v.AuxInt
  4718  		sym1 := v.Aux
  4719  		v_0 := v.Args[0]
  4720  		if v_0.Op != OpMIPSMOVWaddr {
  4721  			break
  4722  		}
  4723  		off2 := v_0.AuxInt
  4724  		sym2 := v_0.Aux
  4725  		ptr := v_0.Args[0]
  4726  		mem := v.Args[1]
  4727  		if !(canMergeSym(sym1, sym2)) {
  4728  			break
  4729  		}
  4730  		v.reset(OpMIPSMOVWload)
  4731  		v.AuxInt = off1 + off2
  4732  		v.Aux = mergeSym(sym1, sym2)
  4733  		v.AddArg(ptr)
  4734  		v.AddArg(mem)
  4735  		return true
  4736  	}
  4737  	// match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
  4738  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4739  	// result: x
  4740  	for {
  4741  		off := v.AuxInt
  4742  		sym := v.Aux
  4743  		ptr := v.Args[0]
  4744  		v_1 := v.Args[1]
  4745  		if v_1.Op != OpMIPSMOVWstore {
  4746  			break
  4747  		}
  4748  		off2 := v_1.AuxInt
  4749  		sym2 := v_1.Aux
  4750  		ptr2 := v_1.Args[0]
  4751  		x := v_1.Args[1]
  4752  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4753  			break
  4754  		}
  4755  		v.reset(OpCopy)
  4756  		v.Type = x.Type
  4757  		v.AddArg(x)
  4758  		return true
  4759  	}
  4760  	return false
  4761  }
  4762  func rewriteValueMIPS_OpMIPSMOVWreg(v *Value, config *Config) bool {
  4763  	b := v.Block
  4764  	_ = b
  4765  	// match: (MOVWreg x)
  4766  	// cond: x.Uses == 1
  4767  	// result: (MOVWnop x)
  4768  	for {
  4769  		x := v.Args[0]
  4770  		if !(x.Uses == 1) {
  4771  			break
  4772  		}
  4773  		v.reset(OpMIPSMOVWnop)
  4774  		v.AddArg(x)
  4775  		return true
  4776  	}
  4777  	// match: (MOVWreg  (MOVWconst [c]))
  4778  	// cond:
  4779  	// result: (MOVWconst [c])
  4780  	for {
  4781  		v_0 := v.Args[0]
  4782  		if v_0.Op != OpMIPSMOVWconst {
  4783  			break
  4784  		}
  4785  		c := v_0.AuxInt
  4786  		v.reset(OpMIPSMOVWconst)
  4787  		v.AuxInt = c
  4788  		return true
  4789  	}
  4790  	return false
  4791  }
  4792  func rewriteValueMIPS_OpMIPSMOVWstore(v *Value, config *Config) bool {
  4793  	b := v.Block
  4794  	_ = b
  4795  	// match: (MOVWstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4796  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4797  	// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  4798  	for {
  4799  		off1 := v.AuxInt
  4800  		sym := v.Aux
  4801  		x := v.Args[0]
  4802  		if x.Op != OpMIPSADDconst {
  4803  			break
  4804  		}
  4805  		off2 := x.AuxInt
  4806  		ptr := x.Args[0]
  4807  		val := v.Args[1]
  4808  		mem := v.Args[2]
  4809  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4810  			break
  4811  		}
  4812  		v.reset(OpMIPSMOVWstore)
  4813  		v.AuxInt = off1 + off2
  4814  		v.Aux = sym
  4815  		v.AddArg(ptr)
  4816  		v.AddArg(val)
  4817  		v.AddArg(mem)
  4818  		return true
  4819  	}
  4820  	// match: (MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4821  	// cond: canMergeSym(sym1,sym2)
  4822  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4823  	for {
  4824  		off1 := v.AuxInt
  4825  		sym1 := v.Aux
  4826  		v_0 := v.Args[0]
  4827  		if v_0.Op != OpMIPSMOVWaddr {
  4828  			break
  4829  		}
  4830  		off2 := v_0.AuxInt
  4831  		sym2 := v_0.Aux
  4832  		ptr := v_0.Args[0]
  4833  		val := v.Args[1]
  4834  		mem := v.Args[2]
  4835  		if !(canMergeSym(sym1, sym2)) {
  4836  			break
  4837  		}
  4838  		v.reset(OpMIPSMOVWstore)
  4839  		v.AuxInt = off1 + off2
  4840  		v.Aux = mergeSym(sym1, sym2)
  4841  		v.AddArg(ptr)
  4842  		v.AddArg(val)
  4843  		v.AddArg(mem)
  4844  		return true
  4845  	}
  4846  	// match: (MOVWstore [off] {sym} ptr (MOVWconst [0]) mem)
  4847  	// cond:
  4848  	// result: (MOVWstorezero [off] {sym} ptr mem)
  4849  	for {
  4850  		off := v.AuxInt
  4851  		sym := v.Aux
  4852  		ptr := v.Args[0]
  4853  		v_1 := v.Args[1]
  4854  		if v_1.Op != OpMIPSMOVWconst {
  4855  			break
  4856  		}
  4857  		if v_1.AuxInt != 0 {
  4858  			break
  4859  		}
  4860  		mem := v.Args[2]
  4861  		v.reset(OpMIPSMOVWstorezero)
  4862  		v.AuxInt = off
  4863  		v.Aux = sym
  4864  		v.AddArg(ptr)
  4865  		v.AddArg(mem)
  4866  		return true
  4867  	}
  4868  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4869  	// cond:
  4870  	// result: (MOVWstore [off] {sym} ptr x mem)
  4871  	for {
  4872  		off := v.AuxInt
  4873  		sym := v.Aux
  4874  		ptr := v.Args[0]
  4875  		v_1 := v.Args[1]
  4876  		if v_1.Op != OpMIPSMOVWreg {
  4877  			break
  4878  		}
  4879  		x := v_1.Args[0]
  4880  		mem := v.Args[2]
  4881  		v.reset(OpMIPSMOVWstore)
  4882  		v.AuxInt = off
  4883  		v.Aux = sym
  4884  		v.AddArg(ptr)
  4885  		v.AddArg(x)
  4886  		v.AddArg(mem)
  4887  		return true
  4888  	}
  4889  	return false
  4890  }
  4891  func rewriteValueMIPS_OpMIPSMOVWstorezero(v *Value, config *Config) bool {
  4892  	b := v.Block
  4893  	_ = b
  4894  	// match: (MOVWstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4895  	// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4896  	// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  4897  	for {
  4898  		off1 := v.AuxInt
  4899  		sym := v.Aux
  4900  		x := v.Args[0]
  4901  		if x.Op != OpMIPSADDconst {
  4902  			break
  4903  		}
  4904  		off2 := x.AuxInt
  4905  		ptr := x.Args[0]
  4906  		mem := v.Args[1]
  4907  		if !(is16Bit(off1+off2) || x.Uses == 1) {
  4908  			break
  4909  		}
  4910  		v.reset(OpMIPSMOVWstorezero)
  4911  		v.AuxInt = off1 + off2
  4912  		v.Aux = sym
  4913  		v.AddArg(ptr)
  4914  		v.AddArg(mem)
  4915  		return true
  4916  	}
  4917  	// match: (MOVWstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4918  	// cond: canMergeSym(sym1,sym2)
  4919  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4920  	for {
  4921  		off1 := v.AuxInt
  4922  		sym1 := v.Aux
  4923  		v_0 := v.Args[0]
  4924  		if v_0.Op != OpMIPSMOVWaddr {
  4925  			break
  4926  		}
  4927  		off2 := v_0.AuxInt
  4928  		sym2 := v_0.Aux
  4929  		ptr := v_0.Args[0]
  4930  		mem := v.Args[1]
  4931  		if !(canMergeSym(sym1, sym2)) {
  4932  			break
  4933  		}
  4934  		v.reset(OpMIPSMOVWstorezero)
  4935  		v.AuxInt = off1 + off2
  4936  		v.Aux = mergeSym(sym1, sym2)
  4937  		v.AddArg(ptr)
  4938  		v.AddArg(mem)
  4939  		return true
  4940  	}
  4941  	return false
  4942  }
  4943  func rewriteValueMIPS_OpMIPSMUL(v *Value, config *Config) bool {
  4944  	b := v.Block
  4945  	_ = b
  4946  	// match: (MUL (MOVWconst [0]) _ )
  4947  	// cond:
  4948  	// result: (MOVWconst [0])
  4949  	for {
  4950  		v_0 := v.Args[0]
  4951  		if v_0.Op != OpMIPSMOVWconst {
  4952  			break
  4953  		}
  4954  		if v_0.AuxInt != 0 {
  4955  			break
  4956  		}
  4957  		v.reset(OpMIPSMOVWconst)
  4958  		v.AuxInt = 0
  4959  		return true
  4960  	}
  4961  	// match: (MUL (MOVWconst [1]) x )
  4962  	// cond:
  4963  	// result: x
  4964  	for {
  4965  		v_0 := v.Args[0]
  4966  		if v_0.Op != OpMIPSMOVWconst {
  4967  			break
  4968  		}
  4969  		if v_0.AuxInt != 1 {
  4970  			break
  4971  		}
  4972  		x := v.Args[1]
  4973  		v.reset(OpCopy)
  4974  		v.Type = x.Type
  4975  		v.AddArg(x)
  4976  		return true
  4977  	}
  4978  	// match: (MUL (MOVWconst [-1]) x )
  4979  	// cond:
  4980  	// result: (NEG x)
  4981  	for {
  4982  		v_0 := v.Args[0]
  4983  		if v_0.Op != OpMIPSMOVWconst {
  4984  			break
  4985  		}
  4986  		if v_0.AuxInt != -1 {
  4987  			break
  4988  		}
  4989  		x := v.Args[1]
  4990  		v.reset(OpMIPSNEG)
  4991  		v.AddArg(x)
  4992  		return true
  4993  	}
  4994  	// match: (MUL (MOVWconst [c]) x )
  4995  	// cond: isPowerOfTwo(int64(uint32(c)))
  4996  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  4997  	for {
  4998  		v_0 := v.Args[0]
  4999  		if v_0.Op != OpMIPSMOVWconst {
  5000  			break
  5001  		}
  5002  		c := v_0.AuxInt
  5003  		x := v.Args[1]
  5004  		if !(isPowerOfTwo(int64(uint32(c)))) {
  5005  			break
  5006  		}
  5007  		v.reset(OpMIPSSLLconst)
  5008  		v.AuxInt = log2(int64(uint32(c)))
  5009  		v.AddArg(x)
  5010  		return true
  5011  	}
  5012  	// match: (MUL (MOVWconst [c]) (MOVWconst [d]))
  5013  	// cond:
  5014  	// result: (MOVWconst [int64(int32(c)*int32(d))])
  5015  	for {
  5016  		v_0 := v.Args[0]
  5017  		if v_0.Op != OpMIPSMOVWconst {
  5018  			break
  5019  		}
  5020  		c := v_0.AuxInt
  5021  		v_1 := v.Args[1]
  5022  		if v_1.Op != OpMIPSMOVWconst {
  5023  			break
  5024  		}
  5025  		d := v_1.AuxInt
  5026  		v.reset(OpMIPSMOVWconst)
  5027  		v.AuxInt = int64(int32(c) * int32(d))
  5028  		return true
  5029  	}
  5030  	return false
  5031  }
  5032  func rewriteValueMIPS_OpMIPSNEG(v *Value, config *Config) bool {
  5033  	b := v.Block
  5034  	_ = b
  5035  	// match: (NEG (MOVWconst [c]))
  5036  	// cond:
  5037  	// result: (MOVWconst [int64(int32(-c))])
  5038  	for {
  5039  		v_0 := v.Args[0]
  5040  		if v_0.Op != OpMIPSMOVWconst {
  5041  			break
  5042  		}
  5043  		c := v_0.AuxInt
  5044  		v.reset(OpMIPSMOVWconst)
  5045  		v.AuxInt = int64(int32(-c))
  5046  		return true
  5047  	}
  5048  	return false
  5049  }
  5050  func rewriteValueMIPS_OpMIPSNOR(v *Value, config *Config) bool {
  5051  	b := v.Block
  5052  	_ = b
  5053  	// match: (NOR (MOVWconst [c]) x)
  5054  	// cond:
  5055  	// result: (NORconst [c] x)
  5056  	for {
  5057  		v_0 := v.Args[0]
  5058  		if v_0.Op != OpMIPSMOVWconst {
  5059  			break
  5060  		}
  5061  		c := v_0.AuxInt
  5062  		x := v.Args[1]
  5063  		v.reset(OpMIPSNORconst)
  5064  		v.AuxInt = c
  5065  		v.AddArg(x)
  5066  		return true
  5067  	}
  5068  	// match: (NOR x (MOVWconst [c]))
  5069  	// cond:
  5070  	// result: (NORconst [c] x)
  5071  	for {
  5072  		x := v.Args[0]
  5073  		v_1 := v.Args[1]
  5074  		if v_1.Op != OpMIPSMOVWconst {
  5075  			break
  5076  		}
  5077  		c := v_1.AuxInt
  5078  		v.reset(OpMIPSNORconst)
  5079  		v.AuxInt = c
  5080  		v.AddArg(x)
  5081  		return true
  5082  	}
  5083  	return false
  5084  }
  5085  func rewriteValueMIPS_OpMIPSNORconst(v *Value, config *Config) bool {
  5086  	b := v.Block
  5087  	_ = b
  5088  	// match: (NORconst [c] (MOVWconst [d]))
  5089  	// cond:
  5090  	// result: (MOVWconst [^(c|d)])
  5091  	for {
  5092  		c := v.AuxInt
  5093  		v_0 := v.Args[0]
  5094  		if v_0.Op != OpMIPSMOVWconst {
  5095  			break
  5096  		}
  5097  		d := v_0.AuxInt
  5098  		v.reset(OpMIPSMOVWconst)
  5099  		v.AuxInt = ^(c | d)
  5100  		return true
  5101  	}
  5102  	return false
  5103  }
  5104  func rewriteValueMIPS_OpMIPSOR(v *Value, config *Config) bool {
  5105  	b := v.Block
  5106  	_ = b
  5107  	// match: (OR  (MOVWconst [c]) x)
  5108  	// cond:
  5109  	// result: (ORconst  [c] x)
  5110  	for {
  5111  		v_0 := v.Args[0]
  5112  		if v_0.Op != OpMIPSMOVWconst {
  5113  			break
  5114  		}
  5115  		c := v_0.AuxInt
  5116  		x := v.Args[1]
  5117  		v.reset(OpMIPSORconst)
  5118  		v.AuxInt = c
  5119  		v.AddArg(x)
  5120  		return true
  5121  	}
  5122  	// match: (OR  x (MOVWconst [c]))
  5123  	// cond:
  5124  	// result: (ORconst  [c] x)
  5125  	for {
  5126  		x := v.Args[0]
  5127  		v_1 := v.Args[1]
  5128  		if v_1.Op != OpMIPSMOVWconst {
  5129  			break
  5130  		}
  5131  		c := v_1.AuxInt
  5132  		v.reset(OpMIPSORconst)
  5133  		v.AuxInt = c
  5134  		v.AddArg(x)
  5135  		return true
  5136  	}
  5137  	// match: (OR  x x)
  5138  	// cond:
  5139  	// result: x
  5140  	for {
  5141  		x := v.Args[0]
  5142  		if x != v.Args[1] {
  5143  			break
  5144  		}
  5145  		v.reset(OpCopy)
  5146  		v.Type = x.Type
  5147  		v.AddArg(x)
  5148  		return true
  5149  	}
  5150  	// match: (OR (SGTUzero x) (SGTUzero y))
  5151  	// cond:
  5152  	// result: (SGTUzero (OR <x.Type> x y))
  5153  	for {
  5154  		v_0 := v.Args[0]
  5155  		if v_0.Op != OpMIPSSGTUzero {
  5156  			break
  5157  		}
  5158  		x := v_0.Args[0]
  5159  		v_1 := v.Args[1]
  5160  		if v_1.Op != OpMIPSSGTUzero {
  5161  			break
  5162  		}
  5163  		y := v_1.Args[0]
  5164  		v.reset(OpMIPSSGTUzero)
  5165  		v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5166  		v0.AddArg(x)
  5167  		v0.AddArg(y)
  5168  		v.AddArg(v0)
  5169  		return true
  5170  	}
  5171  	return false
  5172  }
  5173  func rewriteValueMIPS_OpMIPSORconst(v *Value, config *Config) bool {
  5174  	b := v.Block
  5175  	_ = b
  5176  	// match: (ORconst  [0]  x)
  5177  	// cond:
  5178  	// result: x
  5179  	for {
  5180  		if v.AuxInt != 0 {
  5181  			break
  5182  		}
  5183  		x := v.Args[0]
  5184  		v.reset(OpCopy)
  5185  		v.Type = x.Type
  5186  		v.AddArg(x)
  5187  		return true
  5188  	}
  5189  	// match: (ORconst  [-1] _)
  5190  	// cond:
  5191  	// result: (MOVWconst [-1])
  5192  	for {
  5193  		if v.AuxInt != -1 {
  5194  			break
  5195  		}
  5196  		v.reset(OpMIPSMOVWconst)
  5197  		v.AuxInt = -1
  5198  		return true
  5199  	}
  5200  	// match: (ORconst [c] (MOVWconst [d]))
  5201  	// cond:
  5202  	// result: (MOVWconst [c|d])
  5203  	for {
  5204  		c := v.AuxInt
  5205  		v_0 := v.Args[0]
  5206  		if v_0.Op != OpMIPSMOVWconst {
  5207  			break
  5208  		}
  5209  		d := v_0.AuxInt
  5210  		v.reset(OpMIPSMOVWconst)
  5211  		v.AuxInt = c | d
  5212  		return true
  5213  	}
  5214  	// match: (ORconst [c] (ORconst [d] x))
  5215  	// cond:
  5216  	// result: (ORconst [c|d] x)
  5217  	for {
  5218  		c := v.AuxInt
  5219  		v_0 := v.Args[0]
  5220  		if v_0.Op != OpMIPSORconst {
  5221  			break
  5222  		}
  5223  		d := v_0.AuxInt
  5224  		x := v_0.Args[0]
  5225  		v.reset(OpMIPSORconst)
  5226  		v.AuxInt = c | d
  5227  		v.AddArg(x)
  5228  		return true
  5229  	}
  5230  	return false
  5231  }
  5232  func rewriteValueMIPS_OpMIPSSGT(v *Value, config *Config) bool {
  5233  	b := v.Block
  5234  	_ = b
  5235  	// match: (SGT  (MOVWconst [c]) x)
  5236  	// cond:
  5237  	// result: (SGTconst  [c] x)
  5238  	for {
  5239  		v_0 := v.Args[0]
  5240  		if v_0.Op != OpMIPSMOVWconst {
  5241  			break
  5242  		}
  5243  		c := v_0.AuxInt
  5244  		x := v.Args[1]
  5245  		v.reset(OpMIPSSGTconst)
  5246  		v.AuxInt = c
  5247  		v.AddArg(x)
  5248  		return true
  5249  	}
  5250  	// match: (SGT x (MOVWconst [0]))
  5251  	// cond:
  5252  	// result: (SGTzero x)
  5253  	for {
  5254  		x := v.Args[0]
  5255  		v_1 := v.Args[1]
  5256  		if v_1.Op != OpMIPSMOVWconst {
  5257  			break
  5258  		}
  5259  		if v_1.AuxInt != 0 {
  5260  			break
  5261  		}
  5262  		v.reset(OpMIPSSGTzero)
  5263  		v.AddArg(x)
  5264  		return true
  5265  	}
  5266  	return false
  5267  }
  5268  func rewriteValueMIPS_OpMIPSSGTU(v *Value, config *Config) bool {
  5269  	b := v.Block
  5270  	_ = b
  5271  	// match: (SGTU (MOVWconst [c]) x)
  5272  	// cond:
  5273  	// result: (SGTUconst [c] x)
  5274  	for {
  5275  		v_0 := v.Args[0]
  5276  		if v_0.Op != OpMIPSMOVWconst {
  5277  			break
  5278  		}
  5279  		c := v_0.AuxInt
  5280  		x := v.Args[1]
  5281  		v.reset(OpMIPSSGTUconst)
  5282  		v.AuxInt = c
  5283  		v.AddArg(x)
  5284  		return true
  5285  	}
  5286  	// match: (SGTU x (MOVWconst [0]))
  5287  	// cond:
  5288  	// result: (SGTUzero x)
  5289  	for {
  5290  		x := v.Args[0]
  5291  		v_1 := v.Args[1]
  5292  		if v_1.Op != OpMIPSMOVWconst {
  5293  			break
  5294  		}
  5295  		if v_1.AuxInt != 0 {
  5296  			break
  5297  		}
  5298  		v.reset(OpMIPSSGTUzero)
  5299  		v.AddArg(x)
  5300  		return true
  5301  	}
  5302  	return false
  5303  }
  5304  func rewriteValueMIPS_OpMIPSSGTUconst(v *Value, config *Config) bool {
  5305  	b := v.Block
  5306  	_ = b
  5307  	// match: (SGTUconst [c] (MOVWconst [d]))
  5308  	// cond: uint32(c)>uint32(d)
  5309  	// result: (MOVWconst [1])
  5310  	for {
  5311  		c := v.AuxInt
  5312  		v_0 := v.Args[0]
  5313  		if v_0.Op != OpMIPSMOVWconst {
  5314  			break
  5315  		}
  5316  		d := v_0.AuxInt
  5317  		if !(uint32(c) > uint32(d)) {
  5318  			break
  5319  		}
  5320  		v.reset(OpMIPSMOVWconst)
  5321  		v.AuxInt = 1
  5322  		return true
  5323  	}
  5324  	// match: (SGTUconst [c] (MOVWconst [d]))
  5325  	// cond: uint32(c)<=uint32(d)
  5326  	// result: (MOVWconst [0])
  5327  	for {
  5328  		c := v.AuxInt
  5329  		v_0 := v.Args[0]
  5330  		if v_0.Op != OpMIPSMOVWconst {
  5331  			break
  5332  		}
  5333  		d := v_0.AuxInt
  5334  		if !(uint32(c) <= uint32(d)) {
  5335  			break
  5336  		}
  5337  		v.reset(OpMIPSMOVWconst)
  5338  		v.AuxInt = 0
  5339  		return true
  5340  	}
  5341  	// match: (SGTUconst [c] (MOVBUreg _))
  5342  	// cond: 0xff < uint32(c)
  5343  	// result: (MOVWconst [1])
  5344  	for {
  5345  		c := v.AuxInt
  5346  		v_0 := v.Args[0]
  5347  		if v_0.Op != OpMIPSMOVBUreg {
  5348  			break
  5349  		}
  5350  		if !(0xff < uint32(c)) {
  5351  			break
  5352  		}
  5353  		v.reset(OpMIPSMOVWconst)
  5354  		v.AuxInt = 1
  5355  		return true
  5356  	}
  5357  	// match: (SGTUconst [c] (MOVHUreg _))
  5358  	// cond: 0xffff < uint32(c)
  5359  	// result: (MOVWconst [1])
  5360  	for {
  5361  		c := v.AuxInt
  5362  		v_0 := v.Args[0]
  5363  		if v_0.Op != OpMIPSMOVHUreg {
  5364  			break
  5365  		}
  5366  		if !(0xffff < uint32(c)) {
  5367  			break
  5368  		}
  5369  		v.reset(OpMIPSMOVWconst)
  5370  		v.AuxInt = 1
  5371  		return true
  5372  	}
  5373  	// match: (SGTUconst [c] (ANDconst [m] _))
  5374  	// cond: uint32(m) < uint32(c)
  5375  	// result: (MOVWconst [1])
  5376  	for {
  5377  		c := v.AuxInt
  5378  		v_0 := v.Args[0]
  5379  		if v_0.Op != OpMIPSANDconst {
  5380  			break
  5381  		}
  5382  		m := v_0.AuxInt
  5383  		if !(uint32(m) < uint32(c)) {
  5384  			break
  5385  		}
  5386  		v.reset(OpMIPSMOVWconst)
  5387  		v.AuxInt = 1
  5388  		return true
  5389  	}
  5390  	// match: (SGTUconst [c] (SRLconst _ [d]))
  5391  	// cond: uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)
  5392  	// result: (MOVWconst [1])
  5393  	for {
  5394  		c := v.AuxInt
  5395  		v_0 := v.Args[0]
  5396  		if v_0.Op != OpMIPSSRLconst {
  5397  			break
  5398  		}
  5399  		d := v_0.AuxInt
  5400  		if !(uint32(d) <= 31 && 1<<(32-uint32(d)) <= uint32(c)) {
  5401  			break
  5402  		}
  5403  		v.reset(OpMIPSMOVWconst)
  5404  		v.AuxInt = 1
  5405  		return true
  5406  	}
  5407  	return false
  5408  }
  5409  func rewriteValueMIPS_OpMIPSSGTUzero(v *Value, config *Config) bool {
  5410  	b := v.Block
  5411  	_ = b
  5412  	// match: (SGTUzero (MOVWconst [d]))
  5413  	// cond: uint32(d) != 0
  5414  	// result: (MOVWconst [1])
  5415  	for {
  5416  		v_0 := v.Args[0]
  5417  		if v_0.Op != OpMIPSMOVWconst {
  5418  			break
  5419  		}
  5420  		d := v_0.AuxInt
  5421  		if !(uint32(d) != 0) {
  5422  			break
  5423  		}
  5424  		v.reset(OpMIPSMOVWconst)
  5425  		v.AuxInt = 1
  5426  		return true
  5427  	}
  5428  	// match: (SGTUzero (MOVWconst [d]))
  5429  	// cond: uint32(d) == 0
  5430  	// result: (MOVWconst [0])
  5431  	for {
  5432  		v_0 := v.Args[0]
  5433  		if v_0.Op != OpMIPSMOVWconst {
  5434  			break
  5435  		}
  5436  		d := v_0.AuxInt
  5437  		if !(uint32(d) == 0) {
  5438  			break
  5439  		}
  5440  		v.reset(OpMIPSMOVWconst)
  5441  		v.AuxInt = 0
  5442  		return true
  5443  	}
  5444  	return false
  5445  }
  5446  func rewriteValueMIPS_OpMIPSSGTconst(v *Value, config *Config) bool {
  5447  	b := v.Block
  5448  	_ = b
  5449  	// match: (SGTconst [c] (MOVWconst [d]))
  5450  	// cond: int32(c) > int32(d)
  5451  	// result: (MOVWconst [1])
  5452  	for {
  5453  		c := v.AuxInt
  5454  		v_0 := v.Args[0]
  5455  		if v_0.Op != OpMIPSMOVWconst {
  5456  			break
  5457  		}
  5458  		d := v_0.AuxInt
  5459  		if !(int32(c) > int32(d)) {
  5460  			break
  5461  		}
  5462  		v.reset(OpMIPSMOVWconst)
  5463  		v.AuxInt = 1
  5464  		return true
  5465  	}
  5466  	// match: (SGTconst [c] (MOVWconst [d]))
  5467  	// cond: int32(c) <= int32(d)
  5468  	// result: (MOVWconst [0])
  5469  	for {
  5470  		c := v.AuxInt
  5471  		v_0 := v.Args[0]
  5472  		if v_0.Op != OpMIPSMOVWconst {
  5473  			break
  5474  		}
  5475  		d := v_0.AuxInt
  5476  		if !(int32(c) <= int32(d)) {
  5477  			break
  5478  		}
  5479  		v.reset(OpMIPSMOVWconst)
  5480  		v.AuxInt = 0
  5481  		return true
  5482  	}
  5483  	// match: (SGTconst [c] (MOVBreg _))
  5484  	// cond: 0x7f < int32(c)
  5485  	// result: (MOVWconst [1])
  5486  	for {
  5487  		c := v.AuxInt
  5488  		v_0 := v.Args[0]
  5489  		if v_0.Op != OpMIPSMOVBreg {
  5490  			break
  5491  		}
  5492  		if !(0x7f < int32(c)) {
  5493  			break
  5494  		}
  5495  		v.reset(OpMIPSMOVWconst)
  5496  		v.AuxInt = 1
  5497  		return true
  5498  	}
  5499  	// match: (SGTconst [c] (MOVBreg _))
  5500  	// cond: int32(c) <= -0x80
  5501  	// result: (MOVWconst [0])
  5502  	for {
  5503  		c := v.AuxInt
  5504  		v_0 := v.Args[0]
  5505  		if v_0.Op != OpMIPSMOVBreg {
  5506  			break
  5507  		}
  5508  		if !(int32(c) <= -0x80) {
  5509  			break
  5510  		}
  5511  		v.reset(OpMIPSMOVWconst)
  5512  		v.AuxInt = 0
  5513  		return true
  5514  	}
  5515  	// match: (SGTconst [c] (MOVBUreg _))
  5516  	// cond: 0xff < int32(c)
  5517  	// result: (MOVWconst [1])
  5518  	for {
  5519  		c := v.AuxInt
  5520  		v_0 := v.Args[0]
  5521  		if v_0.Op != OpMIPSMOVBUreg {
  5522  			break
  5523  		}
  5524  		if !(0xff < int32(c)) {
  5525  			break
  5526  		}
  5527  		v.reset(OpMIPSMOVWconst)
  5528  		v.AuxInt = 1
  5529  		return true
  5530  	}
  5531  	// match: (SGTconst [c] (MOVBUreg _))
  5532  	// cond: int32(c) < 0
  5533  	// result: (MOVWconst [0])
  5534  	for {
  5535  		c := v.AuxInt
  5536  		v_0 := v.Args[0]
  5537  		if v_0.Op != OpMIPSMOVBUreg {
  5538  			break
  5539  		}
  5540  		if !(int32(c) < 0) {
  5541  			break
  5542  		}
  5543  		v.reset(OpMIPSMOVWconst)
  5544  		v.AuxInt = 0
  5545  		return true
  5546  	}
  5547  	// match: (SGTconst [c] (MOVHreg _))
  5548  	// cond: 0x7fff < int32(c)
  5549  	// result: (MOVWconst [1])
  5550  	for {
  5551  		c := v.AuxInt
  5552  		v_0 := v.Args[0]
  5553  		if v_0.Op != OpMIPSMOVHreg {
  5554  			break
  5555  		}
  5556  		if !(0x7fff < int32(c)) {
  5557  			break
  5558  		}
  5559  		v.reset(OpMIPSMOVWconst)
  5560  		v.AuxInt = 1
  5561  		return true
  5562  	}
  5563  	// match: (SGTconst [c] (MOVHreg _))
  5564  	// cond: int32(c) <= -0x8000
  5565  	// result: (MOVWconst [0])
  5566  	for {
  5567  		c := v.AuxInt
  5568  		v_0 := v.Args[0]
  5569  		if v_0.Op != OpMIPSMOVHreg {
  5570  			break
  5571  		}
  5572  		if !(int32(c) <= -0x8000) {
  5573  			break
  5574  		}
  5575  		v.reset(OpMIPSMOVWconst)
  5576  		v.AuxInt = 0
  5577  		return true
  5578  	}
  5579  	// match: (SGTconst [c] (MOVHUreg _))
  5580  	// cond: 0xffff < int32(c)
  5581  	// result: (MOVWconst [1])
  5582  	for {
  5583  		c := v.AuxInt
  5584  		v_0 := v.Args[0]
  5585  		if v_0.Op != OpMIPSMOVHUreg {
  5586  			break
  5587  		}
  5588  		if !(0xffff < int32(c)) {
  5589  			break
  5590  		}
  5591  		v.reset(OpMIPSMOVWconst)
  5592  		v.AuxInt = 1
  5593  		return true
  5594  	}
  5595  	// match: (SGTconst [c] (MOVHUreg _))
  5596  	// cond: int32(c) < 0
  5597  	// result: (MOVWconst [0])
  5598  	for {
  5599  		c := v.AuxInt
  5600  		v_0 := v.Args[0]
  5601  		if v_0.Op != OpMIPSMOVHUreg {
  5602  			break
  5603  		}
  5604  		if !(int32(c) < 0) {
  5605  			break
  5606  		}
  5607  		v.reset(OpMIPSMOVWconst)
  5608  		v.AuxInt = 0
  5609  		return true
  5610  	}
  5611  	// match: (SGTconst [c] (ANDconst [m] _))
  5612  	// cond: 0 <= int32(m) && int32(m) < int32(c)
  5613  	// result: (MOVWconst [1])
  5614  	for {
  5615  		c := v.AuxInt
  5616  		v_0 := v.Args[0]
  5617  		if v_0.Op != OpMIPSANDconst {
  5618  			break
  5619  		}
  5620  		m := v_0.AuxInt
  5621  		if !(0 <= int32(m) && int32(m) < int32(c)) {
  5622  			break
  5623  		}
  5624  		v.reset(OpMIPSMOVWconst)
  5625  		v.AuxInt = 1
  5626  		return true
  5627  	}
  5628  	// match: (SGTconst [c] (SRLconst _ [d]))
  5629  	// cond: 0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)
  5630  	// result: (MOVWconst [1])
  5631  	for {
  5632  		c := v.AuxInt
  5633  		v_0 := v.Args[0]
  5634  		if v_0.Op != OpMIPSSRLconst {
  5635  			break
  5636  		}
  5637  		d := v_0.AuxInt
  5638  		if !(0 <= int32(c) && uint32(d) <= 31 && 1<<(32-uint32(d)) <= int32(c)) {
  5639  			break
  5640  		}
  5641  		v.reset(OpMIPSMOVWconst)
  5642  		v.AuxInt = 1
  5643  		return true
  5644  	}
  5645  	return false
  5646  }
  5647  func rewriteValueMIPS_OpMIPSSGTzero(v *Value, config *Config) bool {
  5648  	b := v.Block
  5649  	_ = b
  5650  	// match: (SGTzero (MOVWconst [d]))
  5651  	// cond: int32(d) > 0
  5652  	// result: (MOVWconst [1])
  5653  	for {
  5654  		v_0 := v.Args[0]
  5655  		if v_0.Op != OpMIPSMOVWconst {
  5656  			break
  5657  		}
  5658  		d := v_0.AuxInt
  5659  		if !(int32(d) > 0) {
  5660  			break
  5661  		}
  5662  		v.reset(OpMIPSMOVWconst)
  5663  		v.AuxInt = 1
  5664  		return true
  5665  	}
  5666  	// match: (SGTzero (MOVWconst [d]))
  5667  	// cond: int32(d) <= 0
  5668  	// result: (MOVWconst [0])
  5669  	for {
  5670  		v_0 := v.Args[0]
  5671  		if v_0.Op != OpMIPSMOVWconst {
  5672  			break
  5673  		}
  5674  		d := v_0.AuxInt
  5675  		if !(int32(d) <= 0) {
  5676  			break
  5677  		}
  5678  		v.reset(OpMIPSMOVWconst)
  5679  		v.AuxInt = 0
  5680  		return true
  5681  	}
  5682  	return false
  5683  }
  5684  func rewriteValueMIPS_OpMIPSSLL(v *Value, config *Config) bool {
  5685  	b := v.Block
  5686  	_ = b
  5687  	// match: (SLL _ (MOVWconst [c]))
  5688  	// cond: uint32(c)>=32
  5689  	// result: (MOVWconst [0])
  5690  	for {
  5691  		v_1 := v.Args[1]
  5692  		if v_1.Op != OpMIPSMOVWconst {
  5693  			break
  5694  		}
  5695  		c := v_1.AuxInt
  5696  		if !(uint32(c) >= 32) {
  5697  			break
  5698  		}
  5699  		v.reset(OpMIPSMOVWconst)
  5700  		v.AuxInt = 0
  5701  		return true
  5702  	}
  5703  	// match: (SLL x (MOVWconst [c]))
  5704  	// cond:
  5705  	// result: (SLLconst x [c])
  5706  	for {
  5707  		x := v.Args[0]
  5708  		v_1 := v.Args[1]
  5709  		if v_1.Op != OpMIPSMOVWconst {
  5710  			break
  5711  		}
  5712  		c := v_1.AuxInt
  5713  		v.reset(OpMIPSSLLconst)
  5714  		v.AuxInt = c
  5715  		v.AddArg(x)
  5716  		return true
  5717  	}
  5718  	return false
  5719  }
  5720  func rewriteValueMIPS_OpMIPSSLLconst(v *Value, config *Config) bool {
  5721  	b := v.Block
  5722  	_ = b
  5723  	// match: (SLLconst [c] (MOVWconst [d]))
  5724  	// cond:
  5725  	// result: (MOVWconst [int64(int32(uint32(d)<<uint32(c)))])
  5726  	for {
  5727  		c := v.AuxInt
  5728  		v_0 := v.Args[0]
  5729  		if v_0.Op != OpMIPSMOVWconst {
  5730  			break
  5731  		}
  5732  		d := v_0.AuxInt
  5733  		v.reset(OpMIPSMOVWconst)
  5734  		v.AuxInt = int64(int32(uint32(d) << uint32(c)))
  5735  		return true
  5736  	}
  5737  	return false
  5738  }
  5739  func rewriteValueMIPS_OpMIPSSRA(v *Value, config *Config) bool {
  5740  	b := v.Block
  5741  	_ = b
  5742  	// match: (SRA x (MOVWconst [c]))
  5743  	// cond: uint32(c)>=32
  5744  	// result: (SRAconst x [31])
  5745  	for {
  5746  		x := v.Args[0]
  5747  		v_1 := v.Args[1]
  5748  		if v_1.Op != OpMIPSMOVWconst {
  5749  			break
  5750  		}
  5751  		c := v_1.AuxInt
  5752  		if !(uint32(c) >= 32) {
  5753  			break
  5754  		}
  5755  		v.reset(OpMIPSSRAconst)
  5756  		v.AuxInt = 31
  5757  		v.AddArg(x)
  5758  		return true
  5759  	}
  5760  	// match: (SRA x (MOVWconst [c]))
  5761  	// cond:
  5762  	// result: (SRAconst x [c])
  5763  	for {
  5764  		x := v.Args[0]
  5765  		v_1 := v.Args[1]
  5766  		if v_1.Op != OpMIPSMOVWconst {
  5767  			break
  5768  		}
  5769  		c := v_1.AuxInt
  5770  		v.reset(OpMIPSSRAconst)
  5771  		v.AuxInt = c
  5772  		v.AddArg(x)
  5773  		return true
  5774  	}
  5775  	return false
  5776  }
  5777  func rewriteValueMIPS_OpMIPSSRAconst(v *Value, config *Config) bool {
  5778  	b := v.Block
  5779  	_ = b
  5780  	// match: (SRAconst [c] (MOVWconst [d]))
  5781  	// cond:
  5782  	// result: (MOVWconst [int64(int32(d)>>uint32(c))])
  5783  	for {
  5784  		c := v.AuxInt
  5785  		v_0 := v.Args[0]
  5786  		if v_0.Op != OpMIPSMOVWconst {
  5787  			break
  5788  		}
  5789  		d := v_0.AuxInt
  5790  		v.reset(OpMIPSMOVWconst)
  5791  		v.AuxInt = int64(int32(d) >> uint32(c))
  5792  		return true
  5793  	}
  5794  	return false
  5795  }
  5796  func rewriteValueMIPS_OpMIPSSRL(v *Value, config *Config) bool {
  5797  	b := v.Block
  5798  	_ = b
  5799  	// match: (SRL _ (MOVWconst [c]))
  5800  	// cond: uint32(c)>=32
  5801  	// result: (MOVWconst [0])
  5802  	for {
  5803  		v_1 := v.Args[1]
  5804  		if v_1.Op != OpMIPSMOVWconst {
  5805  			break
  5806  		}
  5807  		c := v_1.AuxInt
  5808  		if !(uint32(c) >= 32) {
  5809  			break
  5810  		}
  5811  		v.reset(OpMIPSMOVWconst)
  5812  		v.AuxInt = 0
  5813  		return true
  5814  	}
  5815  	// match: (SRL x (MOVWconst [c]))
  5816  	// cond:
  5817  	// result: (SRLconst x [c])
  5818  	for {
  5819  		x := v.Args[0]
  5820  		v_1 := v.Args[1]
  5821  		if v_1.Op != OpMIPSMOVWconst {
  5822  			break
  5823  		}
  5824  		c := v_1.AuxInt
  5825  		v.reset(OpMIPSSRLconst)
  5826  		v.AuxInt = c
  5827  		v.AddArg(x)
  5828  		return true
  5829  	}
  5830  	return false
  5831  }
  5832  func rewriteValueMIPS_OpMIPSSRLconst(v *Value, config *Config) bool {
  5833  	b := v.Block
  5834  	_ = b
  5835  	// match: (SRLconst [c] (MOVWconst [d]))
  5836  	// cond:
  5837  	// result: (MOVWconst [int64(uint32(d)>>uint32(c))])
  5838  	for {
  5839  		c := v.AuxInt
  5840  		v_0 := v.Args[0]
  5841  		if v_0.Op != OpMIPSMOVWconst {
  5842  			break
  5843  		}
  5844  		d := v_0.AuxInt
  5845  		v.reset(OpMIPSMOVWconst)
  5846  		v.AuxInt = int64(uint32(d) >> uint32(c))
  5847  		return true
  5848  	}
  5849  	return false
  5850  }
  5851  func rewriteValueMIPS_OpMIPSSUB(v *Value, config *Config) bool {
  5852  	b := v.Block
  5853  	_ = b
  5854  	// match: (SUB x (MOVWconst [c]))
  5855  	// cond:
  5856  	// result: (SUBconst [c] x)
  5857  	for {
  5858  		x := v.Args[0]
  5859  		v_1 := v.Args[1]
  5860  		if v_1.Op != OpMIPSMOVWconst {
  5861  			break
  5862  		}
  5863  		c := v_1.AuxInt
  5864  		v.reset(OpMIPSSUBconst)
  5865  		v.AuxInt = c
  5866  		v.AddArg(x)
  5867  		return true
  5868  	}
  5869  	// match: (SUB x x)
  5870  	// cond:
  5871  	// result: (MOVWconst [0])
  5872  	for {
  5873  		x := v.Args[0]
  5874  		if x != v.Args[1] {
  5875  			break
  5876  		}
  5877  		v.reset(OpMIPSMOVWconst)
  5878  		v.AuxInt = 0
  5879  		return true
  5880  	}
  5881  	// match: (SUB (MOVWconst [0]) x)
  5882  	// cond:
  5883  	// result: (NEG x)
  5884  	for {
  5885  		v_0 := v.Args[0]
  5886  		if v_0.Op != OpMIPSMOVWconst {
  5887  			break
  5888  		}
  5889  		if v_0.AuxInt != 0 {
  5890  			break
  5891  		}
  5892  		x := v.Args[1]
  5893  		v.reset(OpMIPSNEG)
  5894  		v.AddArg(x)
  5895  		return true
  5896  	}
  5897  	return false
  5898  }
  5899  func rewriteValueMIPS_OpMIPSSUBconst(v *Value, config *Config) bool {
  5900  	b := v.Block
  5901  	_ = b
  5902  	// match: (SUBconst [0]  x)
  5903  	// cond:
  5904  	// result: x
  5905  	for {
  5906  		if v.AuxInt != 0 {
  5907  			break
  5908  		}
  5909  		x := v.Args[0]
  5910  		v.reset(OpCopy)
  5911  		v.Type = x.Type
  5912  		v.AddArg(x)
  5913  		return true
  5914  	}
  5915  	// match: (SUBconst [c] (MOVWconst [d]))
  5916  	// cond:
  5917  	// result: (MOVWconst [int64(int32(d-c))])
  5918  	for {
  5919  		c := v.AuxInt
  5920  		v_0 := v.Args[0]
  5921  		if v_0.Op != OpMIPSMOVWconst {
  5922  			break
  5923  		}
  5924  		d := v_0.AuxInt
  5925  		v.reset(OpMIPSMOVWconst)
  5926  		v.AuxInt = int64(int32(d - c))
  5927  		return true
  5928  	}
  5929  	// match: (SUBconst [c] (SUBconst [d] x))
  5930  	// cond:
  5931  	// result: (ADDconst [int64(int32(-c-d))] x)
  5932  	for {
  5933  		c := v.AuxInt
  5934  		v_0 := v.Args[0]
  5935  		if v_0.Op != OpMIPSSUBconst {
  5936  			break
  5937  		}
  5938  		d := v_0.AuxInt
  5939  		x := v_0.Args[0]
  5940  		v.reset(OpMIPSADDconst)
  5941  		v.AuxInt = int64(int32(-c - d))
  5942  		v.AddArg(x)
  5943  		return true
  5944  	}
  5945  	// match: (SUBconst [c] (ADDconst [d] x))
  5946  	// cond:
  5947  	// result: (ADDconst [int64(int32(-c+d))] x)
  5948  	for {
  5949  		c := v.AuxInt
  5950  		v_0 := v.Args[0]
  5951  		if v_0.Op != OpMIPSADDconst {
  5952  			break
  5953  		}
  5954  		d := v_0.AuxInt
  5955  		x := v_0.Args[0]
  5956  		v.reset(OpMIPSADDconst)
  5957  		v.AuxInt = int64(int32(-c + d))
  5958  		v.AddArg(x)
  5959  		return true
  5960  	}
  5961  	return false
  5962  }
  5963  func rewriteValueMIPS_OpMIPSXOR(v *Value, config *Config) bool {
  5964  	b := v.Block
  5965  	_ = b
  5966  	// match: (XOR (MOVWconst [c]) x)
  5967  	// cond:
  5968  	// result: (XORconst [c] x)
  5969  	for {
  5970  		v_0 := v.Args[0]
  5971  		if v_0.Op != OpMIPSMOVWconst {
  5972  			break
  5973  		}
  5974  		c := v_0.AuxInt
  5975  		x := v.Args[1]
  5976  		v.reset(OpMIPSXORconst)
  5977  		v.AuxInt = c
  5978  		v.AddArg(x)
  5979  		return true
  5980  	}
  5981  	// match: (XOR x (MOVWconst [c]))
  5982  	// cond:
  5983  	// result: (XORconst [c] x)
  5984  	for {
  5985  		x := v.Args[0]
  5986  		v_1 := v.Args[1]
  5987  		if v_1.Op != OpMIPSMOVWconst {
  5988  			break
  5989  		}
  5990  		c := v_1.AuxInt
  5991  		v.reset(OpMIPSXORconst)
  5992  		v.AuxInt = c
  5993  		v.AddArg(x)
  5994  		return true
  5995  	}
  5996  	// match: (XOR x x)
  5997  	// cond:
  5998  	// result: (MOVWconst [0])
  5999  	for {
  6000  		x := v.Args[0]
  6001  		if x != v.Args[1] {
  6002  			break
  6003  		}
  6004  		v.reset(OpMIPSMOVWconst)
  6005  		v.AuxInt = 0
  6006  		return true
  6007  	}
  6008  	return false
  6009  }
  6010  func rewriteValueMIPS_OpMIPSXORconst(v *Value, config *Config) bool {
  6011  	b := v.Block
  6012  	_ = b
  6013  	// match: (XORconst [0]  x)
  6014  	// cond:
  6015  	// result: x
  6016  	for {
  6017  		if v.AuxInt != 0 {
  6018  			break
  6019  		}
  6020  		x := v.Args[0]
  6021  		v.reset(OpCopy)
  6022  		v.Type = x.Type
  6023  		v.AddArg(x)
  6024  		return true
  6025  	}
  6026  	// match: (XORconst [-1] x)
  6027  	// cond:
  6028  	// result: (NORconst [0] x)
  6029  	for {
  6030  		if v.AuxInt != -1 {
  6031  			break
  6032  		}
  6033  		x := v.Args[0]
  6034  		v.reset(OpMIPSNORconst)
  6035  		v.AuxInt = 0
  6036  		v.AddArg(x)
  6037  		return true
  6038  	}
  6039  	// match: (XORconst [c] (MOVWconst [d]))
  6040  	// cond:
  6041  	// result: (MOVWconst [c^d])
  6042  	for {
  6043  		c := v.AuxInt
  6044  		v_0 := v.Args[0]
  6045  		if v_0.Op != OpMIPSMOVWconst {
  6046  			break
  6047  		}
  6048  		d := v_0.AuxInt
  6049  		v.reset(OpMIPSMOVWconst)
  6050  		v.AuxInt = c ^ d
  6051  		return true
  6052  	}
  6053  	// match: (XORconst [c] (XORconst [d] x))
  6054  	// cond:
  6055  	// result: (XORconst [c^d] x)
  6056  	for {
  6057  		c := v.AuxInt
  6058  		v_0 := v.Args[0]
  6059  		if v_0.Op != OpMIPSXORconst {
  6060  			break
  6061  		}
  6062  		d := v_0.AuxInt
  6063  		x := v_0.Args[0]
  6064  		v.reset(OpMIPSXORconst)
  6065  		v.AuxInt = c ^ d
  6066  		v.AddArg(x)
  6067  		return true
  6068  	}
  6069  	return false
  6070  }
  6071  func rewriteValueMIPS_OpMod16(v *Value, config *Config) bool {
  6072  	b := v.Block
  6073  	_ = b
  6074  	// match: (Mod16 x y)
  6075  	// cond:
  6076  	// result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  6077  	for {
  6078  		x := v.Args[0]
  6079  		y := v.Args[1]
  6080  		v.reset(OpSelect0)
  6081  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  6082  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  6083  		v1.AddArg(x)
  6084  		v0.AddArg(v1)
  6085  		v2 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  6086  		v2.AddArg(y)
  6087  		v0.AddArg(v2)
  6088  		v.AddArg(v0)
  6089  		return true
  6090  	}
  6091  }
  6092  func rewriteValueMIPS_OpMod16u(v *Value, config *Config) bool {
  6093  	b := v.Block
  6094  	_ = b
  6095  	// match: (Mod16u x y)
  6096  	// cond:
  6097  	// result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  6098  	for {
  6099  		x := v.Args[0]
  6100  		y := v.Args[1]
  6101  		v.reset(OpSelect0)
  6102  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  6103  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  6104  		v1.AddArg(x)
  6105  		v0.AddArg(v1)
  6106  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  6107  		v2.AddArg(y)
  6108  		v0.AddArg(v2)
  6109  		v.AddArg(v0)
  6110  		return true
  6111  	}
  6112  }
  6113  func rewriteValueMIPS_OpMod32(v *Value, config *Config) bool {
  6114  	b := v.Block
  6115  	_ = b
  6116  	// match: (Mod32 x y)
  6117  	// cond:
  6118  	// result: (Select0 (DIV x y))
  6119  	for {
  6120  		x := v.Args[0]
  6121  		y := v.Args[1]
  6122  		v.reset(OpSelect0)
  6123  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  6124  		v0.AddArg(x)
  6125  		v0.AddArg(y)
  6126  		v.AddArg(v0)
  6127  		return true
  6128  	}
  6129  }
  6130  func rewriteValueMIPS_OpMod32u(v *Value, config *Config) bool {
  6131  	b := v.Block
  6132  	_ = b
  6133  	// match: (Mod32u x y)
  6134  	// cond:
  6135  	// result: (Select0 (DIVU x y))
  6136  	for {
  6137  		x := v.Args[0]
  6138  		y := v.Args[1]
  6139  		v.reset(OpSelect0)
  6140  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  6141  		v0.AddArg(x)
  6142  		v0.AddArg(y)
  6143  		v.AddArg(v0)
  6144  		return true
  6145  	}
  6146  }
  6147  func rewriteValueMIPS_OpMod8(v *Value, config *Config) bool {
  6148  	b := v.Block
  6149  	_ = b
  6150  	// match: (Mod8 x y)
  6151  	// cond:
  6152  	// result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  6153  	for {
  6154  		x := v.Args[0]
  6155  		y := v.Args[1]
  6156  		v.reset(OpSelect0)
  6157  		v0 := b.NewValue0(v.Pos, OpMIPSDIV, MakeTuple(config.fe.TypeInt32(), config.fe.TypeInt32()))
  6158  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  6159  		v1.AddArg(x)
  6160  		v0.AddArg(v1)
  6161  		v2 := b.NewValue0(v.Pos, OpSignExt8to32, config.fe.TypeInt32())
  6162  		v2.AddArg(y)
  6163  		v0.AddArg(v2)
  6164  		v.AddArg(v0)
  6165  		return true
  6166  	}
  6167  }
  6168  func rewriteValueMIPS_OpMod8u(v *Value, config *Config) bool {
  6169  	b := v.Block
  6170  	_ = b
  6171  	// match: (Mod8u x y)
  6172  	// cond:
  6173  	// result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  6174  	for {
  6175  		x := v.Args[0]
  6176  		y := v.Args[1]
  6177  		v.reset(OpSelect0)
  6178  		v0 := b.NewValue0(v.Pos, OpMIPSDIVU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  6179  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  6180  		v1.AddArg(x)
  6181  		v0.AddArg(v1)
  6182  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  6183  		v2.AddArg(y)
  6184  		v0.AddArg(v2)
  6185  		v.AddArg(v0)
  6186  		return true
  6187  	}
  6188  }
  6189  func rewriteValueMIPS_OpMove(v *Value, config *Config) bool {
  6190  	b := v.Block
  6191  	_ = b
  6192  	// match: (Move [s] _ _ mem)
  6193  	// cond: SizeAndAlign(s).Size() == 0
  6194  	// result: mem
  6195  	for {
  6196  		s := v.AuxInt
  6197  		mem := v.Args[2]
  6198  		if !(SizeAndAlign(s).Size() == 0) {
  6199  			break
  6200  		}
  6201  		v.reset(OpCopy)
  6202  		v.Type = mem.Type
  6203  		v.AddArg(mem)
  6204  		return true
  6205  	}
  6206  	// match: (Move [s] dst src mem)
  6207  	// cond: SizeAndAlign(s).Size() == 1
  6208  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  6209  	for {
  6210  		s := v.AuxInt
  6211  		dst := v.Args[0]
  6212  		src := v.Args[1]
  6213  		mem := v.Args[2]
  6214  		if !(SizeAndAlign(s).Size() == 1) {
  6215  			break
  6216  		}
  6217  		v.reset(OpMIPSMOVBstore)
  6218  		v.AddArg(dst)
  6219  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6220  		v0.AddArg(src)
  6221  		v0.AddArg(mem)
  6222  		v.AddArg(v0)
  6223  		v.AddArg(mem)
  6224  		return true
  6225  	}
  6226  	// match: (Move [s] dst src mem)
  6227  	// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0
  6228  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  6229  	for {
  6230  		s := v.AuxInt
  6231  		dst := v.Args[0]
  6232  		src := v.Args[1]
  6233  		mem := v.Args[2]
  6234  		if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) {
  6235  			break
  6236  		}
  6237  		v.reset(OpMIPSMOVHstore)
  6238  		v.AddArg(dst)
  6239  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, config.fe.TypeUInt16())
  6240  		v0.AddArg(src)
  6241  		v0.AddArg(mem)
  6242  		v.AddArg(v0)
  6243  		v.AddArg(mem)
  6244  		return true
  6245  	}
  6246  	// match: (Move [s] dst src mem)
  6247  	// cond: SizeAndAlign(s).Size() == 2
  6248  	// result: (MOVBstore [1] dst (MOVBUload [1] src mem) 		(MOVBstore dst (MOVBUload src mem) mem))
  6249  	for {
  6250  		s := v.AuxInt
  6251  		dst := v.Args[0]
  6252  		src := v.Args[1]
  6253  		mem := v.Args[2]
  6254  		if !(SizeAndAlign(s).Size() == 2) {
  6255  			break
  6256  		}
  6257  		v.reset(OpMIPSMOVBstore)
  6258  		v.AuxInt = 1
  6259  		v.AddArg(dst)
  6260  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6261  		v0.AuxInt = 1
  6262  		v0.AddArg(src)
  6263  		v0.AddArg(mem)
  6264  		v.AddArg(v0)
  6265  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6266  		v1.AddArg(dst)
  6267  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6268  		v2.AddArg(src)
  6269  		v2.AddArg(mem)
  6270  		v1.AddArg(v2)
  6271  		v1.AddArg(mem)
  6272  		v.AddArg(v1)
  6273  		return true
  6274  	}
  6275  	// match: (Move [s] dst src mem)
  6276  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0
  6277  	// result: (MOVWstore dst (MOVWload src mem) mem)
  6278  	for {
  6279  		s := v.AuxInt
  6280  		dst := v.Args[0]
  6281  		src := v.Args[1]
  6282  		mem := v.Args[2]
  6283  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) {
  6284  			break
  6285  		}
  6286  		v.reset(OpMIPSMOVWstore)
  6287  		v.AddArg(dst)
  6288  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, config.fe.TypeUInt32())
  6289  		v0.AddArg(src)
  6290  		v0.AddArg(mem)
  6291  		v.AddArg(v0)
  6292  		v.AddArg(mem)
  6293  		return true
  6294  	}
  6295  	// match: (Move [s] dst src mem)
  6296  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0
  6297  	// result: (MOVHstore [2] dst (MOVHUload [2] src mem) 		(MOVHstore dst (MOVHUload src mem) mem))
  6298  	for {
  6299  		s := v.AuxInt
  6300  		dst := v.Args[0]
  6301  		src := v.Args[1]
  6302  		mem := v.Args[2]
  6303  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) {
  6304  			break
  6305  		}
  6306  		v.reset(OpMIPSMOVHstore)
  6307  		v.AuxInt = 2
  6308  		v.AddArg(dst)
  6309  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, config.fe.TypeUInt16())
  6310  		v0.AuxInt = 2
  6311  		v0.AddArg(src)
  6312  		v0.AddArg(mem)
  6313  		v.AddArg(v0)
  6314  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6315  		v1.AddArg(dst)
  6316  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHUload, config.fe.TypeUInt16())
  6317  		v2.AddArg(src)
  6318  		v2.AddArg(mem)
  6319  		v1.AddArg(v2)
  6320  		v1.AddArg(mem)
  6321  		v.AddArg(v1)
  6322  		return true
  6323  	}
  6324  	// match: (Move [s] dst src mem)
  6325  	// cond: SizeAndAlign(s).Size() == 4
  6326  	// result: (MOVBstore [3] dst (MOVBUload [3] src mem) 		(MOVBstore [2] dst (MOVBUload [2] src mem) 			(MOVBstore [1] dst (MOVBUload [1] src mem) 				(MOVBstore dst (MOVBUload src mem) mem))))
  6327  	for {
  6328  		s := v.AuxInt
  6329  		dst := v.Args[0]
  6330  		src := v.Args[1]
  6331  		mem := v.Args[2]
  6332  		if !(SizeAndAlign(s).Size() == 4) {
  6333  			break
  6334  		}
  6335  		v.reset(OpMIPSMOVBstore)
  6336  		v.AuxInt = 3
  6337  		v.AddArg(dst)
  6338  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6339  		v0.AuxInt = 3
  6340  		v0.AddArg(src)
  6341  		v0.AddArg(mem)
  6342  		v.AddArg(v0)
  6343  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6344  		v1.AuxInt = 2
  6345  		v1.AddArg(dst)
  6346  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6347  		v2.AuxInt = 2
  6348  		v2.AddArg(src)
  6349  		v2.AddArg(mem)
  6350  		v1.AddArg(v2)
  6351  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6352  		v3.AuxInt = 1
  6353  		v3.AddArg(dst)
  6354  		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6355  		v4.AuxInt = 1
  6356  		v4.AddArg(src)
  6357  		v4.AddArg(mem)
  6358  		v3.AddArg(v4)
  6359  		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6360  		v5.AddArg(dst)
  6361  		v6 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6362  		v6.AddArg(src)
  6363  		v6.AddArg(mem)
  6364  		v5.AddArg(v6)
  6365  		v5.AddArg(mem)
  6366  		v3.AddArg(v5)
  6367  		v1.AddArg(v3)
  6368  		v.AddArg(v1)
  6369  		return true
  6370  	}
  6371  	// match: (Move [s] dst src mem)
  6372  	// cond: SizeAndAlign(s).Size() == 3
  6373  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) 		(MOVBstore [1] dst (MOVBUload [1] src mem) 			(MOVBstore dst (MOVBUload src mem) mem)))
  6374  	for {
  6375  		s := v.AuxInt
  6376  		dst := v.Args[0]
  6377  		src := v.Args[1]
  6378  		mem := v.Args[2]
  6379  		if !(SizeAndAlign(s).Size() == 3) {
  6380  			break
  6381  		}
  6382  		v.reset(OpMIPSMOVBstore)
  6383  		v.AuxInt = 2
  6384  		v.AddArg(dst)
  6385  		v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6386  		v0.AuxInt = 2
  6387  		v0.AddArg(src)
  6388  		v0.AddArg(mem)
  6389  		v.AddArg(v0)
  6390  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6391  		v1.AuxInt = 1
  6392  		v1.AddArg(dst)
  6393  		v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6394  		v2.AuxInt = 1
  6395  		v2.AddArg(src)
  6396  		v2.AddArg(mem)
  6397  		v1.AddArg(v2)
  6398  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  6399  		v3.AddArg(dst)
  6400  		v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, config.fe.TypeUInt8())
  6401  		v4.AddArg(src)
  6402  		v4.AddArg(mem)
  6403  		v3.AddArg(v4)
  6404  		v3.AddArg(mem)
  6405  		v1.AddArg(v3)
  6406  		v.AddArg(v1)
  6407  		return true
  6408  	}
  6409  	// match: (Move [s] dst src mem)
  6410  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0
  6411  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) 		(MOVWstore dst (MOVWload src mem) mem))
  6412  	for {
  6413  		s := v.AuxInt
  6414  		dst := v.Args[0]
  6415  		src := v.Args[1]
  6416  		mem := v.Args[2]
  6417  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) {
  6418  			break
  6419  		}
  6420  		v.reset(OpMIPSMOVWstore)
  6421  		v.AuxInt = 4
  6422  		v.AddArg(dst)
  6423  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, config.fe.TypeUInt32())
  6424  		v0.AuxInt = 4
  6425  		v0.AddArg(src)
  6426  		v0.AddArg(mem)
  6427  		v.AddArg(v0)
  6428  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6429  		v1.AddArg(dst)
  6430  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, config.fe.TypeUInt32())
  6431  		v2.AddArg(src)
  6432  		v2.AddArg(mem)
  6433  		v1.AddArg(v2)
  6434  		v1.AddArg(mem)
  6435  		v.AddArg(v1)
  6436  		return true
  6437  	}
  6438  	// match: (Move [s] dst src mem)
  6439  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0
  6440  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) 		(MOVHstore [4] dst (MOVHload [4] src mem) 			(MOVHstore [2] dst (MOVHload [2] src mem) 				(MOVHstore dst (MOVHload src mem) mem))))
  6441  	for {
  6442  		s := v.AuxInt
  6443  		dst := v.Args[0]
  6444  		src := v.Args[1]
  6445  		mem := v.Args[2]
  6446  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%2 == 0) {
  6447  			break
  6448  		}
  6449  		v.reset(OpMIPSMOVHstore)
  6450  		v.AuxInt = 6
  6451  		v.AddArg(dst)
  6452  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, config.fe.TypeInt16())
  6453  		v0.AuxInt = 6
  6454  		v0.AddArg(src)
  6455  		v0.AddArg(mem)
  6456  		v.AddArg(v0)
  6457  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6458  		v1.AuxInt = 4
  6459  		v1.AddArg(dst)
  6460  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, config.fe.TypeInt16())
  6461  		v2.AuxInt = 4
  6462  		v2.AddArg(src)
  6463  		v2.AddArg(mem)
  6464  		v1.AddArg(v2)
  6465  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6466  		v3.AuxInt = 2
  6467  		v3.AddArg(dst)
  6468  		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, config.fe.TypeInt16())
  6469  		v4.AuxInt = 2
  6470  		v4.AddArg(src)
  6471  		v4.AddArg(mem)
  6472  		v3.AddArg(v4)
  6473  		v5 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6474  		v5.AddArg(dst)
  6475  		v6 := b.NewValue0(v.Pos, OpMIPSMOVHload, config.fe.TypeInt16())
  6476  		v6.AddArg(src)
  6477  		v6.AddArg(mem)
  6478  		v5.AddArg(v6)
  6479  		v5.AddArg(mem)
  6480  		v3.AddArg(v5)
  6481  		v1.AddArg(v3)
  6482  		v.AddArg(v1)
  6483  		return true
  6484  	}
  6485  	// match: (Move [s] dst src mem)
  6486  	// cond: SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0
  6487  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) 		(MOVHstore [2] dst (MOVHload [2] src mem) 			(MOVHstore dst (MOVHload src mem) mem)))
  6488  	for {
  6489  		s := v.AuxInt
  6490  		dst := v.Args[0]
  6491  		src := v.Args[1]
  6492  		mem := v.Args[2]
  6493  		if !(SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0) {
  6494  			break
  6495  		}
  6496  		v.reset(OpMIPSMOVHstore)
  6497  		v.AuxInt = 4
  6498  		v.AddArg(dst)
  6499  		v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, config.fe.TypeInt16())
  6500  		v0.AuxInt = 4
  6501  		v0.AddArg(src)
  6502  		v0.AddArg(mem)
  6503  		v.AddArg(v0)
  6504  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6505  		v1.AuxInt = 2
  6506  		v1.AddArg(dst)
  6507  		v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, config.fe.TypeInt16())
  6508  		v2.AuxInt = 2
  6509  		v2.AddArg(src)
  6510  		v2.AddArg(mem)
  6511  		v1.AddArg(v2)
  6512  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  6513  		v3.AddArg(dst)
  6514  		v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, config.fe.TypeInt16())
  6515  		v4.AddArg(src)
  6516  		v4.AddArg(mem)
  6517  		v3.AddArg(v4)
  6518  		v3.AddArg(mem)
  6519  		v1.AddArg(v3)
  6520  		v.AddArg(v1)
  6521  		return true
  6522  	}
  6523  	// match: (Move [s] dst src mem)
  6524  	// cond: SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0
  6525  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) 		(MOVWstore [4] dst (MOVWload [4] src mem) 			(MOVWstore dst (MOVWload src mem) mem)))
  6526  	for {
  6527  		s := v.AuxInt
  6528  		dst := v.Args[0]
  6529  		src := v.Args[1]
  6530  		mem := v.Args[2]
  6531  		if !(SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0) {
  6532  			break
  6533  		}
  6534  		v.reset(OpMIPSMOVWstore)
  6535  		v.AuxInt = 8
  6536  		v.AddArg(dst)
  6537  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, config.fe.TypeUInt32())
  6538  		v0.AuxInt = 8
  6539  		v0.AddArg(src)
  6540  		v0.AddArg(mem)
  6541  		v.AddArg(v0)
  6542  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6543  		v1.AuxInt = 4
  6544  		v1.AddArg(dst)
  6545  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, config.fe.TypeUInt32())
  6546  		v2.AuxInt = 4
  6547  		v2.AddArg(src)
  6548  		v2.AddArg(mem)
  6549  		v1.AddArg(v2)
  6550  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6551  		v3.AddArg(dst)
  6552  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, config.fe.TypeUInt32())
  6553  		v4.AddArg(src)
  6554  		v4.AddArg(mem)
  6555  		v3.AddArg(v4)
  6556  		v3.AddArg(mem)
  6557  		v1.AddArg(v3)
  6558  		v.AddArg(v1)
  6559  		return true
  6560  	}
  6561  	// match: (Move [s] dst src mem)
  6562  	// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0
  6563  	// result: (MOVWstore [12] dst (MOVWload [12] src mem) 		(MOVWstore [8] dst (MOVWload [8] src mem) 			(MOVWstore [4] dst (MOVWload [4] src mem) 				(MOVWstore dst (MOVWload src mem) mem))))
  6564  	for {
  6565  		s := v.AuxInt
  6566  		dst := v.Args[0]
  6567  		src := v.Args[1]
  6568  		mem := v.Args[2]
  6569  		if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0) {
  6570  			break
  6571  		}
  6572  		v.reset(OpMIPSMOVWstore)
  6573  		v.AuxInt = 12
  6574  		v.AddArg(dst)
  6575  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, config.fe.TypeUInt32())
  6576  		v0.AuxInt = 12
  6577  		v0.AddArg(src)
  6578  		v0.AddArg(mem)
  6579  		v.AddArg(v0)
  6580  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6581  		v1.AuxInt = 8
  6582  		v1.AddArg(dst)
  6583  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, config.fe.TypeUInt32())
  6584  		v2.AuxInt = 8
  6585  		v2.AddArg(src)
  6586  		v2.AddArg(mem)
  6587  		v1.AddArg(v2)
  6588  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6589  		v3.AuxInt = 4
  6590  		v3.AddArg(dst)
  6591  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, config.fe.TypeUInt32())
  6592  		v4.AuxInt = 4
  6593  		v4.AddArg(src)
  6594  		v4.AddArg(mem)
  6595  		v3.AddArg(v4)
  6596  		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  6597  		v5.AddArg(dst)
  6598  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWload, config.fe.TypeUInt32())
  6599  		v6.AddArg(src)
  6600  		v6.AddArg(mem)
  6601  		v5.AddArg(v6)
  6602  		v5.AddArg(mem)
  6603  		v3.AddArg(v5)
  6604  		v1.AddArg(v3)
  6605  		v.AddArg(v1)
  6606  		return true
  6607  	}
  6608  	// match: (Move [s] dst src mem)
  6609  	// cond: (SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0)
  6610  	// result: (LoweredMove [SizeAndAlign(s).Align()] 		dst 		src 		(ADDconst <src.Type> src [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) 		mem)
  6611  	for {
  6612  		s := v.AuxInt
  6613  		dst := v.Args[0]
  6614  		src := v.Args[1]
  6615  		mem := v.Args[2]
  6616  		if !(SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0) {
  6617  			break
  6618  		}
  6619  		v.reset(OpMIPSLoweredMove)
  6620  		v.AuxInt = SizeAndAlign(s).Align()
  6621  		v.AddArg(dst)
  6622  		v.AddArg(src)
  6623  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type)
  6624  		v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config)
  6625  		v0.AddArg(src)
  6626  		v.AddArg(v0)
  6627  		v.AddArg(mem)
  6628  		return true
  6629  	}
  6630  	return false
  6631  }
  6632  func rewriteValueMIPS_OpMul16(v *Value, config *Config) bool {
  6633  	b := v.Block
  6634  	_ = b
  6635  	// match: (Mul16 x y)
  6636  	// cond:
  6637  	// result: (MUL x y)
  6638  	for {
  6639  		x := v.Args[0]
  6640  		y := v.Args[1]
  6641  		v.reset(OpMIPSMUL)
  6642  		v.AddArg(x)
  6643  		v.AddArg(y)
  6644  		return true
  6645  	}
  6646  }
  6647  func rewriteValueMIPS_OpMul32(v *Value, config *Config) bool {
  6648  	b := v.Block
  6649  	_ = b
  6650  	// match: (Mul32 x y)
  6651  	// cond:
  6652  	// result: (MUL x y)
  6653  	for {
  6654  		x := v.Args[0]
  6655  		y := v.Args[1]
  6656  		v.reset(OpMIPSMUL)
  6657  		v.AddArg(x)
  6658  		v.AddArg(y)
  6659  		return true
  6660  	}
  6661  }
  6662  func rewriteValueMIPS_OpMul32F(v *Value, config *Config) bool {
  6663  	b := v.Block
  6664  	_ = b
  6665  	// match: (Mul32F x y)
  6666  	// cond:
  6667  	// result: (MULF x y)
  6668  	for {
  6669  		x := v.Args[0]
  6670  		y := v.Args[1]
  6671  		v.reset(OpMIPSMULF)
  6672  		v.AddArg(x)
  6673  		v.AddArg(y)
  6674  		return true
  6675  	}
  6676  }
  6677  func rewriteValueMIPS_OpMul32uhilo(v *Value, config *Config) bool {
  6678  	b := v.Block
  6679  	_ = b
  6680  	// match: (Mul32uhilo x y)
  6681  	// cond:
  6682  	// result: (MULTU x y)
  6683  	for {
  6684  		x := v.Args[0]
  6685  		y := v.Args[1]
  6686  		v.reset(OpMIPSMULTU)
  6687  		v.AddArg(x)
  6688  		v.AddArg(y)
  6689  		return true
  6690  	}
  6691  }
  6692  func rewriteValueMIPS_OpMul64F(v *Value, config *Config) bool {
  6693  	b := v.Block
  6694  	_ = b
  6695  	// match: (Mul64F x y)
  6696  	// cond:
  6697  	// result: (MULD x y)
  6698  	for {
  6699  		x := v.Args[0]
  6700  		y := v.Args[1]
  6701  		v.reset(OpMIPSMULD)
  6702  		v.AddArg(x)
  6703  		v.AddArg(y)
  6704  		return true
  6705  	}
  6706  }
  6707  func rewriteValueMIPS_OpMul8(v *Value, config *Config) bool {
  6708  	b := v.Block
  6709  	_ = b
  6710  	// match: (Mul8 x y)
  6711  	// cond:
  6712  	// result: (MUL x y)
  6713  	for {
  6714  		x := v.Args[0]
  6715  		y := v.Args[1]
  6716  		v.reset(OpMIPSMUL)
  6717  		v.AddArg(x)
  6718  		v.AddArg(y)
  6719  		return true
  6720  	}
  6721  }
  6722  func rewriteValueMIPS_OpNeg16(v *Value, config *Config) bool {
  6723  	b := v.Block
  6724  	_ = b
  6725  	// match: (Neg16 x)
  6726  	// cond:
  6727  	// result: (NEG x)
  6728  	for {
  6729  		x := v.Args[0]
  6730  		v.reset(OpMIPSNEG)
  6731  		v.AddArg(x)
  6732  		return true
  6733  	}
  6734  }
  6735  func rewriteValueMIPS_OpNeg32(v *Value, config *Config) bool {
  6736  	b := v.Block
  6737  	_ = b
  6738  	// match: (Neg32 x)
  6739  	// cond:
  6740  	// result: (NEG x)
  6741  	for {
  6742  		x := v.Args[0]
  6743  		v.reset(OpMIPSNEG)
  6744  		v.AddArg(x)
  6745  		return true
  6746  	}
  6747  }
  6748  func rewriteValueMIPS_OpNeg32F(v *Value, config *Config) bool {
  6749  	b := v.Block
  6750  	_ = b
  6751  	// match: (Neg32F x)
  6752  	// cond:
  6753  	// result: (NEGF x)
  6754  	for {
  6755  		x := v.Args[0]
  6756  		v.reset(OpMIPSNEGF)
  6757  		v.AddArg(x)
  6758  		return true
  6759  	}
  6760  }
  6761  func rewriteValueMIPS_OpNeg64F(v *Value, config *Config) bool {
  6762  	b := v.Block
  6763  	_ = b
  6764  	// match: (Neg64F x)
  6765  	// cond:
  6766  	// result: (NEGD x)
  6767  	for {
  6768  		x := v.Args[0]
  6769  		v.reset(OpMIPSNEGD)
  6770  		v.AddArg(x)
  6771  		return true
  6772  	}
  6773  }
  6774  func rewriteValueMIPS_OpNeg8(v *Value, config *Config) bool {
  6775  	b := v.Block
  6776  	_ = b
  6777  	// match: (Neg8 x)
  6778  	// cond:
  6779  	// result: (NEG x)
  6780  	for {
  6781  		x := v.Args[0]
  6782  		v.reset(OpMIPSNEG)
  6783  		v.AddArg(x)
  6784  		return true
  6785  	}
  6786  }
  6787  func rewriteValueMIPS_OpNeq16(v *Value, config *Config) bool {
  6788  	b := v.Block
  6789  	_ = b
  6790  	// match: (Neq16 x y)
  6791  	// cond:
  6792  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0]))
  6793  	for {
  6794  		x := v.Args[0]
  6795  		y := v.Args[1]
  6796  		v.reset(OpMIPSSGTU)
  6797  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, config.fe.TypeUInt32())
  6798  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  6799  		v1.AddArg(x)
  6800  		v0.AddArg(v1)
  6801  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  6802  		v2.AddArg(y)
  6803  		v0.AddArg(v2)
  6804  		v.AddArg(v0)
  6805  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  6806  		v3.AuxInt = 0
  6807  		v.AddArg(v3)
  6808  		return true
  6809  	}
  6810  }
  6811  func rewriteValueMIPS_OpNeq32(v *Value, config *Config) bool {
  6812  	b := v.Block
  6813  	_ = b
  6814  	// match: (Neq32 x y)
  6815  	// cond:
  6816  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  6817  	for {
  6818  		x := v.Args[0]
  6819  		y := v.Args[1]
  6820  		v.reset(OpMIPSSGTU)
  6821  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, config.fe.TypeUInt32())
  6822  		v0.AddArg(x)
  6823  		v0.AddArg(y)
  6824  		v.AddArg(v0)
  6825  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  6826  		v1.AuxInt = 0
  6827  		v.AddArg(v1)
  6828  		return true
  6829  	}
  6830  }
  6831  func rewriteValueMIPS_OpNeq32F(v *Value, config *Config) bool {
  6832  	b := v.Block
  6833  	_ = b
  6834  	// match: (Neq32F x y)
  6835  	// cond:
  6836  	// result: (FPFlagFalse (CMPEQF x y))
  6837  	for {
  6838  		x := v.Args[0]
  6839  		y := v.Args[1]
  6840  		v.reset(OpMIPSFPFlagFalse)
  6841  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, TypeFlags)
  6842  		v0.AddArg(x)
  6843  		v0.AddArg(y)
  6844  		v.AddArg(v0)
  6845  		return true
  6846  	}
  6847  }
  6848  func rewriteValueMIPS_OpNeq64F(v *Value, config *Config) bool {
  6849  	b := v.Block
  6850  	_ = b
  6851  	// match: (Neq64F x y)
  6852  	// cond:
  6853  	// result: (FPFlagFalse (CMPEQD x y))
  6854  	for {
  6855  		x := v.Args[0]
  6856  		y := v.Args[1]
  6857  		v.reset(OpMIPSFPFlagFalse)
  6858  		v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, TypeFlags)
  6859  		v0.AddArg(x)
  6860  		v0.AddArg(y)
  6861  		v.AddArg(v0)
  6862  		return true
  6863  	}
  6864  }
  6865  func rewriteValueMIPS_OpNeq8(v *Value, config *Config) bool {
  6866  	b := v.Block
  6867  	_ = b
  6868  	// match: (Neq8 x y)
  6869  	// cond:
  6870  	// result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0]))
  6871  	for {
  6872  		x := v.Args[0]
  6873  		y := v.Args[1]
  6874  		v.reset(OpMIPSSGTU)
  6875  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, config.fe.TypeUInt32())
  6876  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  6877  		v1.AddArg(x)
  6878  		v0.AddArg(v1)
  6879  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  6880  		v2.AddArg(y)
  6881  		v0.AddArg(v2)
  6882  		v.AddArg(v0)
  6883  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  6884  		v3.AuxInt = 0
  6885  		v.AddArg(v3)
  6886  		return true
  6887  	}
  6888  }
  6889  func rewriteValueMIPS_OpNeqB(v *Value, config *Config) bool {
  6890  	b := v.Block
  6891  	_ = b
  6892  	// match: (NeqB x y)
  6893  	// cond:
  6894  	// result: (XOR x y)
  6895  	for {
  6896  		x := v.Args[0]
  6897  		y := v.Args[1]
  6898  		v.reset(OpMIPSXOR)
  6899  		v.AddArg(x)
  6900  		v.AddArg(y)
  6901  		return true
  6902  	}
  6903  }
  6904  func rewriteValueMIPS_OpNeqPtr(v *Value, config *Config) bool {
  6905  	b := v.Block
  6906  	_ = b
  6907  	// match: (NeqPtr x y)
  6908  	// cond:
  6909  	// result: (SGTU (XOR x y) (MOVWconst [0]))
  6910  	for {
  6911  		x := v.Args[0]
  6912  		y := v.Args[1]
  6913  		v.reset(OpMIPSSGTU)
  6914  		v0 := b.NewValue0(v.Pos, OpMIPSXOR, config.fe.TypeUInt32())
  6915  		v0.AddArg(x)
  6916  		v0.AddArg(y)
  6917  		v.AddArg(v0)
  6918  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  6919  		v1.AuxInt = 0
  6920  		v.AddArg(v1)
  6921  		return true
  6922  	}
  6923  }
  6924  func rewriteValueMIPS_OpNilCheck(v *Value, config *Config) bool {
  6925  	b := v.Block
  6926  	_ = b
  6927  	// match: (NilCheck ptr mem)
  6928  	// cond:
  6929  	// result: (LoweredNilCheck ptr mem)
  6930  	for {
  6931  		ptr := v.Args[0]
  6932  		mem := v.Args[1]
  6933  		v.reset(OpMIPSLoweredNilCheck)
  6934  		v.AddArg(ptr)
  6935  		v.AddArg(mem)
  6936  		return true
  6937  	}
  6938  }
  6939  func rewriteValueMIPS_OpNot(v *Value, config *Config) bool {
  6940  	b := v.Block
  6941  	_ = b
  6942  	// match: (Not x)
  6943  	// cond:
  6944  	// result: (XORconst [1] x)
  6945  	for {
  6946  		x := v.Args[0]
  6947  		v.reset(OpMIPSXORconst)
  6948  		v.AuxInt = 1
  6949  		v.AddArg(x)
  6950  		return true
  6951  	}
  6952  }
  6953  func rewriteValueMIPS_OpOffPtr(v *Value, config *Config) bool {
  6954  	b := v.Block
  6955  	_ = b
  6956  	// match: (OffPtr [off] ptr:(SP))
  6957  	// cond:
  6958  	// result: (MOVWaddr [off] ptr)
  6959  	for {
  6960  		off := v.AuxInt
  6961  		ptr := v.Args[0]
  6962  		if ptr.Op != OpSP {
  6963  			break
  6964  		}
  6965  		v.reset(OpMIPSMOVWaddr)
  6966  		v.AuxInt = off
  6967  		v.AddArg(ptr)
  6968  		return true
  6969  	}
  6970  	// match: (OffPtr [off] ptr)
  6971  	// cond:
  6972  	// result: (ADDconst [off] ptr)
  6973  	for {
  6974  		off := v.AuxInt
  6975  		ptr := v.Args[0]
  6976  		v.reset(OpMIPSADDconst)
  6977  		v.AuxInt = off
  6978  		v.AddArg(ptr)
  6979  		return true
  6980  	}
  6981  }
  6982  func rewriteValueMIPS_OpOr16(v *Value, config *Config) bool {
  6983  	b := v.Block
  6984  	_ = b
  6985  	// match: (Or16 x y)
  6986  	// cond:
  6987  	// result: (OR x y)
  6988  	for {
  6989  		x := v.Args[0]
  6990  		y := v.Args[1]
  6991  		v.reset(OpMIPSOR)
  6992  		v.AddArg(x)
  6993  		v.AddArg(y)
  6994  		return true
  6995  	}
  6996  }
  6997  func rewriteValueMIPS_OpOr32(v *Value, config *Config) bool {
  6998  	b := v.Block
  6999  	_ = b
  7000  	// match: (Or32 x y)
  7001  	// cond:
  7002  	// result: (OR x y)
  7003  	for {
  7004  		x := v.Args[0]
  7005  		y := v.Args[1]
  7006  		v.reset(OpMIPSOR)
  7007  		v.AddArg(x)
  7008  		v.AddArg(y)
  7009  		return true
  7010  	}
  7011  }
  7012  func rewriteValueMIPS_OpOr8(v *Value, config *Config) bool {
  7013  	b := v.Block
  7014  	_ = b
  7015  	// match: (Or8 x y)
  7016  	// cond:
  7017  	// result: (OR x y)
  7018  	for {
  7019  		x := v.Args[0]
  7020  		y := v.Args[1]
  7021  		v.reset(OpMIPSOR)
  7022  		v.AddArg(x)
  7023  		v.AddArg(y)
  7024  		return true
  7025  	}
  7026  }
  7027  func rewriteValueMIPS_OpOrB(v *Value, config *Config) bool {
  7028  	b := v.Block
  7029  	_ = b
  7030  	// match: (OrB x y)
  7031  	// cond:
  7032  	// result: (OR x y)
  7033  	for {
  7034  		x := v.Args[0]
  7035  		y := v.Args[1]
  7036  		v.reset(OpMIPSOR)
  7037  		v.AddArg(x)
  7038  		v.AddArg(y)
  7039  		return true
  7040  	}
  7041  }
  7042  func rewriteValueMIPS_OpRsh16Ux16(v *Value, config *Config) bool {
  7043  	b := v.Block
  7044  	_ = b
  7045  	// match: (Rsh16Ux16 <t> x y)
  7046  	// cond:
  7047  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7048  	for {
  7049  		t := v.Type
  7050  		x := v.Args[0]
  7051  		y := v.Args[1]
  7052  		v.reset(OpMIPSCMOVZ)
  7053  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7054  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7055  		v1.AddArg(x)
  7056  		v0.AddArg(v1)
  7057  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7058  		v2.AddArg(y)
  7059  		v0.AddArg(v2)
  7060  		v.AddArg(v0)
  7061  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7062  		v3.AuxInt = 0
  7063  		v.AddArg(v3)
  7064  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7065  		v4.AuxInt = 32
  7066  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7067  		v5.AddArg(y)
  7068  		v4.AddArg(v5)
  7069  		v.AddArg(v4)
  7070  		return true
  7071  	}
  7072  }
  7073  func rewriteValueMIPS_OpRsh16Ux32(v *Value, config *Config) bool {
  7074  	b := v.Block
  7075  	_ = b
  7076  	// match: (Rsh16Ux32 <t> x y)
  7077  	// cond:
  7078  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7079  	for {
  7080  		t := v.Type
  7081  		x := v.Args[0]
  7082  		y := v.Args[1]
  7083  		v.reset(OpMIPSCMOVZ)
  7084  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7085  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7086  		v1.AddArg(x)
  7087  		v0.AddArg(v1)
  7088  		v0.AddArg(y)
  7089  		v.AddArg(v0)
  7090  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7091  		v2.AuxInt = 0
  7092  		v.AddArg(v2)
  7093  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7094  		v3.AuxInt = 32
  7095  		v3.AddArg(y)
  7096  		v.AddArg(v3)
  7097  		return true
  7098  	}
  7099  }
  7100  func rewriteValueMIPS_OpRsh16Ux64(v *Value, config *Config) bool {
  7101  	b := v.Block
  7102  	_ = b
  7103  	// match: (Rsh16Ux64 x (Const64 [c]))
  7104  	// cond: uint32(c) < 16
  7105  	// result: (SRLconst (SLLconst <config.fe.TypeUInt32()> x [16]) [c+16])
  7106  	for {
  7107  		x := v.Args[0]
  7108  		v_1 := v.Args[1]
  7109  		if v_1.Op != OpConst64 {
  7110  			break
  7111  		}
  7112  		c := v_1.AuxInt
  7113  		if !(uint32(c) < 16) {
  7114  			break
  7115  		}
  7116  		v.reset(OpMIPSSRLconst)
  7117  		v.AuxInt = c + 16
  7118  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
  7119  		v0.AuxInt = 16
  7120  		v0.AddArg(x)
  7121  		v.AddArg(v0)
  7122  		return true
  7123  	}
  7124  	// match: (Rsh16Ux64 _ (Const64 [c]))
  7125  	// cond: uint32(c) >= 16
  7126  	// result: (MOVWconst [0])
  7127  	for {
  7128  		v_1 := v.Args[1]
  7129  		if v_1.Op != OpConst64 {
  7130  			break
  7131  		}
  7132  		c := v_1.AuxInt
  7133  		if !(uint32(c) >= 16) {
  7134  			break
  7135  		}
  7136  		v.reset(OpMIPSMOVWconst)
  7137  		v.AuxInt = 0
  7138  		return true
  7139  	}
  7140  	return false
  7141  }
  7142  func rewriteValueMIPS_OpRsh16Ux8(v *Value, config *Config) bool {
  7143  	b := v.Block
  7144  	_ = b
  7145  	// match: (Rsh16Ux8 <t> x y)
  7146  	// cond:
  7147  	// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7148  	for {
  7149  		t := v.Type
  7150  		x := v.Args[0]
  7151  		y := v.Args[1]
  7152  		v.reset(OpMIPSCMOVZ)
  7153  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7154  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7155  		v1.AddArg(x)
  7156  		v0.AddArg(v1)
  7157  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7158  		v2.AddArg(y)
  7159  		v0.AddArg(v2)
  7160  		v.AddArg(v0)
  7161  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7162  		v3.AuxInt = 0
  7163  		v.AddArg(v3)
  7164  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7165  		v4.AuxInt = 32
  7166  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7167  		v5.AddArg(y)
  7168  		v4.AddArg(v5)
  7169  		v.AddArg(v4)
  7170  		return true
  7171  	}
  7172  }
  7173  func rewriteValueMIPS_OpRsh16x16(v *Value, config *Config) bool {
  7174  	b := v.Block
  7175  	_ = b
  7176  	// match: (Rsh16x16 x y)
  7177  	// cond:
  7178  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7179  	for {
  7180  		x := v.Args[0]
  7181  		y := v.Args[1]
  7182  		v.reset(OpMIPSSRA)
  7183  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  7184  		v0.AddArg(x)
  7185  		v.AddArg(v0)
  7186  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7187  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7188  		v2.AddArg(y)
  7189  		v1.AddArg(v2)
  7190  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7191  		v3.AuxInt = -1
  7192  		v1.AddArg(v3)
  7193  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7194  		v4.AuxInt = 32
  7195  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7196  		v5.AddArg(y)
  7197  		v4.AddArg(v5)
  7198  		v1.AddArg(v4)
  7199  		v.AddArg(v1)
  7200  		return true
  7201  	}
  7202  }
  7203  func rewriteValueMIPS_OpRsh16x32(v *Value, config *Config) bool {
  7204  	b := v.Block
  7205  	_ = b
  7206  	// match: (Rsh16x32 x y)
  7207  	// cond:
  7208  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7209  	for {
  7210  		x := v.Args[0]
  7211  		y := v.Args[1]
  7212  		v.reset(OpMIPSSRA)
  7213  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  7214  		v0.AddArg(x)
  7215  		v.AddArg(v0)
  7216  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7217  		v1.AddArg(y)
  7218  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7219  		v2.AuxInt = -1
  7220  		v1.AddArg(v2)
  7221  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7222  		v3.AuxInt = 32
  7223  		v3.AddArg(y)
  7224  		v1.AddArg(v3)
  7225  		v.AddArg(v1)
  7226  		return true
  7227  	}
  7228  }
  7229  func rewriteValueMIPS_OpRsh16x64(v *Value, config *Config) bool {
  7230  	b := v.Block
  7231  	_ = b
  7232  	// match: (Rsh16x64 x (Const64 [c]))
  7233  	// cond: uint32(c) < 16
  7234  	// result: (SRAconst (SLLconst <config.fe.TypeUInt32()> x [16]) [c+16])
  7235  	for {
  7236  		x := v.Args[0]
  7237  		v_1 := v.Args[1]
  7238  		if v_1.Op != OpConst64 {
  7239  			break
  7240  		}
  7241  		c := v_1.AuxInt
  7242  		if !(uint32(c) < 16) {
  7243  			break
  7244  		}
  7245  		v.reset(OpMIPSSRAconst)
  7246  		v.AuxInt = c + 16
  7247  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
  7248  		v0.AuxInt = 16
  7249  		v0.AddArg(x)
  7250  		v.AddArg(v0)
  7251  		return true
  7252  	}
  7253  	// match: (Rsh16x64 x (Const64 [c]))
  7254  	// cond: uint32(c) >= 16
  7255  	// result: (SRAconst (SLLconst <config.fe.TypeUInt32()> x [16]) [31])
  7256  	for {
  7257  		x := v.Args[0]
  7258  		v_1 := v.Args[1]
  7259  		if v_1.Op != OpConst64 {
  7260  			break
  7261  		}
  7262  		c := v_1.AuxInt
  7263  		if !(uint32(c) >= 16) {
  7264  			break
  7265  		}
  7266  		v.reset(OpMIPSSRAconst)
  7267  		v.AuxInt = 31
  7268  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
  7269  		v0.AuxInt = 16
  7270  		v0.AddArg(x)
  7271  		v.AddArg(v0)
  7272  		return true
  7273  	}
  7274  	return false
  7275  }
  7276  func rewriteValueMIPS_OpRsh16x8(v *Value, config *Config) bool {
  7277  	b := v.Block
  7278  	_ = b
  7279  	// match: (Rsh16x8 x y)
  7280  	// cond:
  7281  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7282  	for {
  7283  		x := v.Args[0]
  7284  		y := v.Args[1]
  7285  		v.reset(OpMIPSSRA)
  7286  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  7287  		v0.AddArg(x)
  7288  		v.AddArg(v0)
  7289  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7290  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7291  		v2.AddArg(y)
  7292  		v1.AddArg(v2)
  7293  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7294  		v3.AuxInt = -1
  7295  		v1.AddArg(v3)
  7296  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7297  		v4.AuxInt = 32
  7298  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7299  		v5.AddArg(y)
  7300  		v4.AddArg(v5)
  7301  		v1.AddArg(v4)
  7302  		v.AddArg(v1)
  7303  		return true
  7304  	}
  7305  }
  7306  func rewriteValueMIPS_OpRsh32Ux16(v *Value, config *Config) bool {
  7307  	b := v.Block
  7308  	_ = b
  7309  	// match: (Rsh32Ux16 <t> x y)
  7310  	// cond:
  7311  	// result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7312  	for {
  7313  		t := v.Type
  7314  		x := v.Args[0]
  7315  		y := v.Args[1]
  7316  		v.reset(OpMIPSCMOVZ)
  7317  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7318  		v0.AddArg(x)
  7319  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7320  		v1.AddArg(y)
  7321  		v0.AddArg(v1)
  7322  		v.AddArg(v0)
  7323  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7324  		v2.AuxInt = 0
  7325  		v.AddArg(v2)
  7326  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7327  		v3.AuxInt = 32
  7328  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7329  		v4.AddArg(y)
  7330  		v3.AddArg(v4)
  7331  		v.AddArg(v3)
  7332  		return true
  7333  	}
  7334  }
  7335  func rewriteValueMIPS_OpRsh32Ux32(v *Value, config *Config) bool {
  7336  	b := v.Block
  7337  	_ = b
  7338  	// match: (Rsh32Ux32 <t> x y)
  7339  	// cond:
  7340  	// result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  7341  	for {
  7342  		t := v.Type
  7343  		x := v.Args[0]
  7344  		y := v.Args[1]
  7345  		v.reset(OpMIPSCMOVZ)
  7346  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7347  		v0.AddArg(x)
  7348  		v0.AddArg(y)
  7349  		v.AddArg(v0)
  7350  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7351  		v1.AuxInt = 0
  7352  		v.AddArg(v1)
  7353  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7354  		v2.AuxInt = 32
  7355  		v2.AddArg(y)
  7356  		v.AddArg(v2)
  7357  		return true
  7358  	}
  7359  }
  7360  func rewriteValueMIPS_OpRsh32Ux64(v *Value, config *Config) bool {
  7361  	b := v.Block
  7362  	_ = b
  7363  	// match: (Rsh32Ux64 x (Const64 [c]))
  7364  	// cond: uint32(c) < 32
  7365  	// result: (SRLconst x [c])
  7366  	for {
  7367  		x := v.Args[0]
  7368  		v_1 := v.Args[1]
  7369  		if v_1.Op != OpConst64 {
  7370  			break
  7371  		}
  7372  		c := v_1.AuxInt
  7373  		if !(uint32(c) < 32) {
  7374  			break
  7375  		}
  7376  		v.reset(OpMIPSSRLconst)
  7377  		v.AuxInt = c
  7378  		v.AddArg(x)
  7379  		return true
  7380  	}
  7381  	// match: (Rsh32Ux64 _ (Const64 [c]))
  7382  	// cond: uint32(c) >= 32
  7383  	// result: (MOVWconst [0])
  7384  	for {
  7385  		v_1 := v.Args[1]
  7386  		if v_1.Op != OpConst64 {
  7387  			break
  7388  		}
  7389  		c := v_1.AuxInt
  7390  		if !(uint32(c) >= 32) {
  7391  			break
  7392  		}
  7393  		v.reset(OpMIPSMOVWconst)
  7394  		v.AuxInt = 0
  7395  		return true
  7396  	}
  7397  	return false
  7398  }
  7399  func rewriteValueMIPS_OpRsh32Ux8(v *Value, config *Config) bool {
  7400  	b := v.Block
  7401  	_ = b
  7402  	// match: (Rsh32Ux8 <t> x y)
  7403  	// cond:
  7404  	// result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7405  	for {
  7406  		t := v.Type
  7407  		x := v.Args[0]
  7408  		y := v.Args[1]
  7409  		v.reset(OpMIPSCMOVZ)
  7410  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7411  		v0.AddArg(x)
  7412  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7413  		v1.AddArg(y)
  7414  		v0.AddArg(v1)
  7415  		v.AddArg(v0)
  7416  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7417  		v2.AuxInt = 0
  7418  		v.AddArg(v2)
  7419  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7420  		v3.AuxInt = 32
  7421  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7422  		v4.AddArg(y)
  7423  		v3.AddArg(v4)
  7424  		v.AddArg(v3)
  7425  		return true
  7426  	}
  7427  }
  7428  func rewriteValueMIPS_OpRsh32x16(v *Value, config *Config) bool {
  7429  	b := v.Block
  7430  	_ = b
  7431  	// match: (Rsh32x16 x y)
  7432  	// cond:
  7433  	// result: (SRA x ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7434  	for {
  7435  		x := v.Args[0]
  7436  		y := v.Args[1]
  7437  		v.reset(OpMIPSSRA)
  7438  		v.AddArg(x)
  7439  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7440  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7441  		v1.AddArg(y)
  7442  		v0.AddArg(v1)
  7443  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7444  		v2.AuxInt = -1
  7445  		v0.AddArg(v2)
  7446  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7447  		v3.AuxInt = 32
  7448  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7449  		v4.AddArg(y)
  7450  		v3.AddArg(v4)
  7451  		v0.AddArg(v3)
  7452  		v.AddArg(v0)
  7453  		return true
  7454  	}
  7455  }
  7456  func rewriteValueMIPS_OpRsh32x32(v *Value, config *Config) bool {
  7457  	b := v.Block
  7458  	_ = b
  7459  	// match: (Rsh32x32 x y)
  7460  	// cond:
  7461  	// result: (SRA x ( CMOVZ <config.fe.TypeUInt32()> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7462  	for {
  7463  		x := v.Args[0]
  7464  		y := v.Args[1]
  7465  		v.reset(OpMIPSSRA)
  7466  		v.AddArg(x)
  7467  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7468  		v0.AddArg(y)
  7469  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7470  		v1.AuxInt = -1
  7471  		v0.AddArg(v1)
  7472  		v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7473  		v2.AuxInt = 32
  7474  		v2.AddArg(y)
  7475  		v0.AddArg(v2)
  7476  		v.AddArg(v0)
  7477  		return true
  7478  	}
  7479  }
  7480  func rewriteValueMIPS_OpRsh32x64(v *Value, config *Config) bool {
  7481  	b := v.Block
  7482  	_ = b
  7483  	// match: (Rsh32x64 x (Const64 [c]))
  7484  	// cond: uint32(c) < 32
  7485  	// result: (SRAconst x [c])
  7486  	for {
  7487  		x := v.Args[0]
  7488  		v_1 := v.Args[1]
  7489  		if v_1.Op != OpConst64 {
  7490  			break
  7491  		}
  7492  		c := v_1.AuxInt
  7493  		if !(uint32(c) < 32) {
  7494  			break
  7495  		}
  7496  		v.reset(OpMIPSSRAconst)
  7497  		v.AuxInt = c
  7498  		v.AddArg(x)
  7499  		return true
  7500  	}
  7501  	// match: (Rsh32x64 x (Const64 [c]))
  7502  	// cond: uint32(c) >= 32
  7503  	// result: (SRAconst x [31])
  7504  	for {
  7505  		x := v.Args[0]
  7506  		v_1 := v.Args[1]
  7507  		if v_1.Op != OpConst64 {
  7508  			break
  7509  		}
  7510  		c := v_1.AuxInt
  7511  		if !(uint32(c) >= 32) {
  7512  			break
  7513  		}
  7514  		v.reset(OpMIPSSRAconst)
  7515  		v.AuxInt = 31
  7516  		v.AddArg(x)
  7517  		return true
  7518  	}
  7519  	return false
  7520  }
  7521  func rewriteValueMIPS_OpRsh32x8(v *Value, config *Config) bool {
  7522  	b := v.Block
  7523  	_ = b
  7524  	// match: (Rsh32x8 x y)
  7525  	// cond:
  7526  	// result: (SRA x ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7527  	for {
  7528  		x := v.Args[0]
  7529  		y := v.Args[1]
  7530  		v.reset(OpMIPSSRA)
  7531  		v.AddArg(x)
  7532  		v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7533  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7534  		v1.AddArg(y)
  7535  		v0.AddArg(v1)
  7536  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7537  		v2.AuxInt = -1
  7538  		v0.AddArg(v2)
  7539  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7540  		v3.AuxInt = 32
  7541  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7542  		v4.AddArg(y)
  7543  		v3.AddArg(v4)
  7544  		v0.AddArg(v3)
  7545  		v.AddArg(v0)
  7546  		return true
  7547  	}
  7548  }
  7549  func rewriteValueMIPS_OpRsh8Ux16(v *Value, config *Config) bool {
  7550  	b := v.Block
  7551  	_ = b
  7552  	// match: (Rsh8Ux16 <t> x y)
  7553  	// cond:
  7554  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7555  	for {
  7556  		t := v.Type
  7557  		x := v.Args[0]
  7558  		y := v.Args[1]
  7559  		v.reset(OpMIPSCMOVZ)
  7560  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7561  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7562  		v1.AddArg(x)
  7563  		v0.AddArg(v1)
  7564  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7565  		v2.AddArg(y)
  7566  		v0.AddArg(v2)
  7567  		v.AddArg(v0)
  7568  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7569  		v3.AuxInt = 0
  7570  		v.AddArg(v3)
  7571  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7572  		v4.AuxInt = 32
  7573  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7574  		v5.AddArg(y)
  7575  		v4.AddArg(v5)
  7576  		v.AddArg(v4)
  7577  		return true
  7578  	}
  7579  }
  7580  func rewriteValueMIPS_OpRsh8Ux32(v *Value, config *Config) bool {
  7581  	b := v.Block
  7582  	_ = b
  7583  	// match: (Rsh8Ux32 <t> x y)
  7584  	// cond:
  7585  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7586  	for {
  7587  		t := v.Type
  7588  		x := v.Args[0]
  7589  		y := v.Args[1]
  7590  		v.reset(OpMIPSCMOVZ)
  7591  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7592  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7593  		v1.AddArg(x)
  7594  		v0.AddArg(v1)
  7595  		v0.AddArg(y)
  7596  		v.AddArg(v0)
  7597  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7598  		v2.AuxInt = 0
  7599  		v.AddArg(v2)
  7600  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7601  		v3.AuxInt = 32
  7602  		v3.AddArg(y)
  7603  		v.AddArg(v3)
  7604  		return true
  7605  	}
  7606  }
  7607  func rewriteValueMIPS_OpRsh8Ux64(v *Value, config *Config) bool {
  7608  	b := v.Block
  7609  	_ = b
  7610  	// match: (Rsh8Ux64 x (Const64 [c]))
  7611  	// cond: uint32(c) < 8
  7612  	// result: (SRLconst (SLLconst <config.fe.TypeUInt32()> x [24]) [c+24])
  7613  	for {
  7614  		x := v.Args[0]
  7615  		v_1 := v.Args[1]
  7616  		if v_1.Op != OpConst64 {
  7617  			break
  7618  		}
  7619  		c := v_1.AuxInt
  7620  		if !(uint32(c) < 8) {
  7621  			break
  7622  		}
  7623  		v.reset(OpMIPSSRLconst)
  7624  		v.AuxInt = c + 24
  7625  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
  7626  		v0.AuxInt = 24
  7627  		v0.AddArg(x)
  7628  		v.AddArg(v0)
  7629  		return true
  7630  	}
  7631  	// match: (Rsh8Ux64 _ (Const64 [c]))
  7632  	// cond: uint32(c) >= 8
  7633  	// result: (MOVWconst [0])
  7634  	for {
  7635  		v_1 := v.Args[1]
  7636  		if v_1.Op != OpConst64 {
  7637  			break
  7638  		}
  7639  		c := v_1.AuxInt
  7640  		if !(uint32(c) >= 8) {
  7641  			break
  7642  		}
  7643  		v.reset(OpMIPSMOVWconst)
  7644  		v.AuxInt = 0
  7645  		return true
  7646  	}
  7647  	return false
  7648  }
  7649  func rewriteValueMIPS_OpRsh8Ux8(v *Value, config *Config) bool {
  7650  	b := v.Block
  7651  	_ = b
  7652  	// match: (Rsh8Ux8 <t> x y)
  7653  	// cond:
  7654  	// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7655  	for {
  7656  		t := v.Type
  7657  		x := v.Args[0]
  7658  		y := v.Args[1]
  7659  		v.reset(OpMIPSCMOVZ)
  7660  		v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7661  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7662  		v1.AddArg(x)
  7663  		v0.AddArg(v1)
  7664  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7665  		v2.AddArg(y)
  7666  		v0.AddArg(v2)
  7667  		v.AddArg(v0)
  7668  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7669  		v3.AuxInt = 0
  7670  		v.AddArg(v3)
  7671  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7672  		v4.AuxInt = 32
  7673  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7674  		v5.AddArg(y)
  7675  		v4.AddArg(v5)
  7676  		v.AddArg(v4)
  7677  		return true
  7678  	}
  7679  }
  7680  func rewriteValueMIPS_OpRsh8x16(v *Value, config *Config) bool {
  7681  	b := v.Block
  7682  	_ = b
  7683  	// match: (Rsh8x16 x y)
  7684  	// cond:
  7685  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7686  	for {
  7687  		x := v.Args[0]
  7688  		y := v.Args[1]
  7689  		v.reset(OpMIPSSRA)
  7690  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  7691  		v0.AddArg(x)
  7692  		v.AddArg(v0)
  7693  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7694  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7695  		v2.AddArg(y)
  7696  		v1.AddArg(v2)
  7697  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7698  		v3.AuxInt = -1
  7699  		v1.AddArg(v3)
  7700  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7701  		v4.AuxInt = 32
  7702  		v5 := b.NewValue0(v.Pos, OpZeroExt16to32, config.fe.TypeUInt32())
  7703  		v5.AddArg(y)
  7704  		v4.AddArg(v5)
  7705  		v1.AddArg(v4)
  7706  		v.AddArg(v1)
  7707  		return true
  7708  	}
  7709  }
  7710  func rewriteValueMIPS_OpRsh8x32(v *Value, config *Config) bool {
  7711  	b := v.Block
  7712  	_ = b
  7713  	// match: (Rsh8x32 x y)
  7714  	// cond:
  7715  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7716  	for {
  7717  		x := v.Args[0]
  7718  		y := v.Args[1]
  7719  		v.reset(OpMIPSSRA)
  7720  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  7721  		v0.AddArg(x)
  7722  		v.AddArg(v0)
  7723  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7724  		v1.AddArg(y)
  7725  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7726  		v2.AuxInt = -1
  7727  		v1.AddArg(v2)
  7728  		v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7729  		v3.AuxInt = 32
  7730  		v3.AddArg(y)
  7731  		v1.AddArg(v3)
  7732  		v.AddArg(v1)
  7733  		return true
  7734  	}
  7735  }
  7736  func rewriteValueMIPS_OpRsh8x64(v *Value, config *Config) bool {
  7737  	b := v.Block
  7738  	_ = b
  7739  	// match: (Rsh8x64 x (Const64 [c]))
  7740  	// cond: uint32(c) < 8
  7741  	// result: (SRAconst (SLLconst <config.fe.TypeUInt32()> x [24]) [c+24])
  7742  	for {
  7743  		x := v.Args[0]
  7744  		v_1 := v.Args[1]
  7745  		if v_1.Op != OpConst64 {
  7746  			break
  7747  		}
  7748  		c := v_1.AuxInt
  7749  		if !(uint32(c) < 8) {
  7750  			break
  7751  		}
  7752  		v.reset(OpMIPSSRAconst)
  7753  		v.AuxInt = c + 24
  7754  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
  7755  		v0.AuxInt = 24
  7756  		v0.AddArg(x)
  7757  		v.AddArg(v0)
  7758  		return true
  7759  	}
  7760  	// match: (Rsh8x64 x (Const64 [c]))
  7761  	// cond: uint32(c) >= 8
  7762  	// result: (SRAconst (SLLconst <config.fe.TypeUInt32()> x [24]) [31])
  7763  	for {
  7764  		x := v.Args[0]
  7765  		v_1 := v.Args[1]
  7766  		if v_1.Op != OpConst64 {
  7767  			break
  7768  		}
  7769  		c := v_1.AuxInt
  7770  		if !(uint32(c) >= 8) {
  7771  			break
  7772  		}
  7773  		v.reset(OpMIPSSRAconst)
  7774  		v.AuxInt = 31
  7775  		v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, config.fe.TypeUInt32())
  7776  		v0.AuxInt = 24
  7777  		v0.AddArg(x)
  7778  		v.AddArg(v0)
  7779  		return true
  7780  	}
  7781  	return false
  7782  }
  7783  func rewriteValueMIPS_OpRsh8x8(v *Value, config *Config) bool {
  7784  	b := v.Block
  7785  	_ = b
  7786  	// match: (Rsh8x8 x y)
  7787  	// cond:
  7788  	// result: (SRA (SignExt16to32 x) ( CMOVZ <config.fe.TypeUInt32()> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7789  	for {
  7790  		x := v.Args[0]
  7791  		y := v.Args[1]
  7792  		v.reset(OpMIPSSRA)
  7793  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, config.fe.TypeInt32())
  7794  		v0.AddArg(x)
  7795  		v.AddArg(v0)
  7796  		v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, config.fe.TypeUInt32())
  7797  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7798  		v2.AddArg(y)
  7799  		v1.AddArg(v2)
  7800  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7801  		v3.AuxInt = -1
  7802  		v1.AddArg(v3)
  7803  		v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, config.fe.TypeBool())
  7804  		v4.AuxInt = 32
  7805  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, config.fe.TypeUInt32())
  7806  		v5.AddArg(y)
  7807  		v4.AddArg(v5)
  7808  		v1.AddArg(v4)
  7809  		v.AddArg(v1)
  7810  		return true
  7811  	}
  7812  }
  7813  func rewriteValueMIPS_OpSelect0(v *Value, config *Config) bool {
  7814  	b := v.Block
  7815  	_ = b
  7816  	// match: (Select0 (Add32carry <t> x y))
  7817  	// cond:
  7818  	// result: (ADD <t.FieldType(0)> x y)
  7819  	for {
  7820  		v_0 := v.Args[0]
  7821  		if v_0.Op != OpAdd32carry {
  7822  			break
  7823  		}
  7824  		t := v_0.Type
  7825  		x := v_0.Args[0]
  7826  		y := v_0.Args[1]
  7827  		v.reset(OpMIPSADD)
  7828  		v.Type = t.FieldType(0)
  7829  		v.AddArg(x)
  7830  		v.AddArg(y)
  7831  		return true
  7832  	}
  7833  	// match: (Select0 (Sub32carry <t> x y))
  7834  	// cond:
  7835  	// result: (SUB <t.FieldType(0)> x y)
  7836  	for {
  7837  		v_0 := v.Args[0]
  7838  		if v_0.Op != OpSub32carry {
  7839  			break
  7840  		}
  7841  		t := v_0.Type
  7842  		x := v_0.Args[0]
  7843  		y := v_0.Args[1]
  7844  		v.reset(OpMIPSSUB)
  7845  		v.Type = t.FieldType(0)
  7846  		v.AddArg(x)
  7847  		v.AddArg(y)
  7848  		return true
  7849  	}
  7850  	// match: (Select0 (MULTU x (MOVWconst [c])))
  7851  	// cond: x.Op != OpMIPSMOVWconst
  7852  	// result: (Select0 (MULTU (MOVWconst [c]) x ))
  7853  	for {
  7854  		v_0 := v.Args[0]
  7855  		if v_0.Op != OpMIPSMULTU {
  7856  			break
  7857  		}
  7858  		x := v_0.Args[0]
  7859  		v_0_1 := v_0.Args[1]
  7860  		if v_0_1.Op != OpMIPSMOVWconst {
  7861  			break
  7862  		}
  7863  		c := v_0_1.AuxInt
  7864  		if !(x.Op != OpMIPSMOVWconst) {
  7865  			break
  7866  		}
  7867  		v.reset(OpSelect0)
  7868  		v0 := b.NewValue0(v.Pos, OpMIPSMULTU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  7869  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7870  		v1.AuxInt = c
  7871  		v0.AddArg(v1)
  7872  		v0.AddArg(x)
  7873  		v.AddArg(v0)
  7874  		return true
  7875  	}
  7876  	// match: (Select0 (MULTU (MOVWconst [0]) _ ))
  7877  	// cond:
  7878  	// result: (MOVWconst [0])
  7879  	for {
  7880  		v_0 := v.Args[0]
  7881  		if v_0.Op != OpMIPSMULTU {
  7882  			break
  7883  		}
  7884  		v_0_0 := v_0.Args[0]
  7885  		if v_0_0.Op != OpMIPSMOVWconst {
  7886  			break
  7887  		}
  7888  		if v_0_0.AuxInt != 0 {
  7889  			break
  7890  		}
  7891  		v.reset(OpMIPSMOVWconst)
  7892  		v.AuxInt = 0
  7893  		return true
  7894  	}
  7895  	// match: (Select0 (MULTU (MOVWconst [1]) _ ))
  7896  	// cond:
  7897  	// result: (MOVWconst [0])
  7898  	for {
  7899  		v_0 := v.Args[0]
  7900  		if v_0.Op != OpMIPSMULTU {
  7901  			break
  7902  		}
  7903  		v_0_0 := v_0.Args[0]
  7904  		if v_0_0.Op != OpMIPSMOVWconst {
  7905  			break
  7906  		}
  7907  		if v_0_0.AuxInt != 1 {
  7908  			break
  7909  		}
  7910  		v.reset(OpMIPSMOVWconst)
  7911  		v.AuxInt = 0
  7912  		return true
  7913  	}
  7914  	// match: (Select0 (MULTU (MOVWconst [-1]) x ))
  7915  	// cond:
  7916  	// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  7917  	for {
  7918  		v_0 := v.Args[0]
  7919  		if v_0.Op != OpMIPSMULTU {
  7920  			break
  7921  		}
  7922  		v_0_0 := v_0.Args[0]
  7923  		if v_0_0.Op != OpMIPSMOVWconst {
  7924  			break
  7925  		}
  7926  		if v_0_0.AuxInt != -1 {
  7927  			break
  7928  		}
  7929  		x := v_0.Args[1]
  7930  		v.reset(OpMIPSCMOVZ)
  7931  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  7932  		v0.AuxInt = -1
  7933  		v0.AddArg(x)
  7934  		v.AddArg(v0)
  7935  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  7936  		v1.AuxInt = 0
  7937  		v.AddArg(v1)
  7938  		v.AddArg(x)
  7939  		return true
  7940  	}
  7941  	// match: (Select0 (MULTU (MOVWconst [c]) x ))
  7942  	// cond: isPowerOfTwo(int64(uint32(c)))
  7943  	// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  7944  	for {
  7945  		v_0 := v.Args[0]
  7946  		if v_0.Op != OpMIPSMULTU {
  7947  			break
  7948  		}
  7949  		v_0_0 := v_0.Args[0]
  7950  		if v_0_0.Op != OpMIPSMOVWconst {
  7951  			break
  7952  		}
  7953  		c := v_0_0.AuxInt
  7954  		x := v_0.Args[1]
  7955  		if !(isPowerOfTwo(int64(uint32(c)))) {
  7956  			break
  7957  		}
  7958  		v.reset(OpMIPSSRLconst)
  7959  		v.AuxInt = 32 - log2(int64(uint32(c)))
  7960  		v.AddArg(x)
  7961  		return true
  7962  	}
  7963  	// match: (Select0 (MULTU  (MOVWconst [c]) (MOVWconst [d])))
  7964  	// cond:
  7965  	// result: (MOVWconst [(c*d)>>32])
  7966  	for {
  7967  		v_0 := v.Args[0]
  7968  		if v_0.Op != OpMIPSMULTU {
  7969  			break
  7970  		}
  7971  		v_0_0 := v_0.Args[0]
  7972  		if v_0_0.Op != OpMIPSMOVWconst {
  7973  			break
  7974  		}
  7975  		c := v_0_0.AuxInt
  7976  		v_0_1 := v_0.Args[1]
  7977  		if v_0_1.Op != OpMIPSMOVWconst {
  7978  			break
  7979  		}
  7980  		d := v_0_1.AuxInt
  7981  		v.reset(OpMIPSMOVWconst)
  7982  		v.AuxInt = (c * d) >> 32
  7983  		return true
  7984  	}
  7985  	// match: (Select0 (DIV  (MOVWconst [c]) (MOVWconst [d])))
  7986  	// cond:
  7987  	// result: (MOVWconst [int64(int32(c)%int32(d))])
  7988  	for {
  7989  		v_0 := v.Args[0]
  7990  		if v_0.Op != OpMIPSDIV {
  7991  			break
  7992  		}
  7993  		v_0_0 := v_0.Args[0]
  7994  		if v_0_0.Op != OpMIPSMOVWconst {
  7995  			break
  7996  		}
  7997  		c := v_0_0.AuxInt
  7998  		v_0_1 := v_0.Args[1]
  7999  		if v_0_1.Op != OpMIPSMOVWconst {
  8000  			break
  8001  		}
  8002  		d := v_0_1.AuxInt
  8003  		v.reset(OpMIPSMOVWconst)
  8004  		v.AuxInt = int64(int32(c) % int32(d))
  8005  		return true
  8006  	}
  8007  	// match: (Select0 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8008  	// cond:
  8009  	// result: (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
  8010  	for {
  8011  		v_0 := v.Args[0]
  8012  		if v_0.Op != OpMIPSDIVU {
  8013  			break
  8014  		}
  8015  		v_0_0 := v_0.Args[0]
  8016  		if v_0_0.Op != OpMIPSMOVWconst {
  8017  			break
  8018  		}
  8019  		c := v_0_0.AuxInt
  8020  		v_0_1 := v_0.Args[1]
  8021  		if v_0_1.Op != OpMIPSMOVWconst {
  8022  			break
  8023  		}
  8024  		d := v_0_1.AuxInt
  8025  		v.reset(OpMIPSMOVWconst)
  8026  		v.AuxInt = int64(int32(uint32(c) % uint32(d)))
  8027  		return true
  8028  	}
  8029  	return false
  8030  }
  8031  func rewriteValueMIPS_OpSelect1(v *Value, config *Config) bool {
  8032  	b := v.Block
  8033  	_ = b
  8034  	// match: (Select1 (Add32carry <t> x y))
  8035  	// cond:
  8036  	// result: (SGTU <config.fe.TypeBool()> x (ADD <t.FieldType(0)> x y))
  8037  	for {
  8038  		v_0 := v.Args[0]
  8039  		if v_0.Op != OpAdd32carry {
  8040  			break
  8041  		}
  8042  		t := v_0.Type
  8043  		x := v_0.Args[0]
  8044  		y := v_0.Args[1]
  8045  		v.reset(OpMIPSSGTU)
  8046  		v.Type = config.fe.TypeBool()
  8047  		v.AddArg(x)
  8048  		v0 := b.NewValue0(v.Pos, OpMIPSADD, t.FieldType(0))
  8049  		v0.AddArg(x)
  8050  		v0.AddArg(y)
  8051  		v.AddArg(v0)
  8052  		return true
  8053  	}
  8054  	// match: (Select1 (Sub32carry <t> x y))
  8055  	// cond:
  8056  	// result: (SGTU <config.fe.TypeBool()> (SUB <t.FieldType(0)> x y) x)
  8057  	for {
  8058  		v_0 := v.Args[0]
  8059  		if v_0.Op != OpSub32carry {
  8060  			break
  8061  		}
  8062  		t := v_0.Type
  8063  		x := v_0.Args[0]
  8064  		y := v_0.Args[1]
  8065  		v.reset(OpMIPSSGTU)
  8066  		v.Type = config.fe.TypeBool()
  8067  		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t.FieldType(0))
  8068  		v0.AddArg(x)
  8069  		v0.AddArg(y)
  8070  		v.AddArg(v0)
  8071  		v.AddArg(x)
  8072  		return true
  8073  	}
  8074  	// match: (Select1 (MULTU x (MOVWconst [c])))
  8075  	// cond: x.Op != OpMIPSMOVWconst
  8076  	// result: (Select1 (MULTU (MOVWconst [c]) x ))
  8077  	for {
  8078  		v_0 := v.Args[0]
  8079  		if v_0.Op != OpMIPSMULTU {
  8080  			break
  8081  		}
  8082  		x := v_0.Args[0]
  8083  		v_0_1 := v_0.Args[1]
  8084  		if v_0_1.Op != OpMIPSMOVWconst {
  8085  			break
  8086  		}
  8087  		c := v_0_1.AuxInt
  8088  		if !(x.Op != OpMIPSMOVWconst) {
  8089  			break
  8090  		}
  8091  		v.reset(OpSelect1)
  8092  		v0 := b.NewValue0(v.Pos, OpMIPSMULTU, MakeTuple(config.fe.TypeUInt32(), config.fe.TypeUInt32()))
  8093  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8094  		v1.AuxInt = c
  8095  		v0.AddArg(v1)
  8096  		v0.AddArg(x)
  8097  		v.AddArg(v0)
  8098  		return true
  8099  	}
  8100  	// match: (Select1 (MULTU (MOVWconst [0]) _ ))
  8101  	// cond:
  8102  	// result: (MOVWconst [0])
  8103  	for {
  8104  		v_0 := v.Args[0]
  8105  		if v_0.Op != OpMIPSMULTU {
  8106  			break
  8107  		}
  8108  		v_0_0 := v_0.Args[0]
  8109  		if v_0_0.Op != OpMIPSMOVWconst {
  8110  			break
  8111  		}
  8112  		if v_0_0.AuxInt != 0 {
  8113  			break
  8114  		}
  8115  		v.reset(OpMIPSMOVWconst)
  8116  		v.AuxInt = 0
  8117  		return true
  8118  	}
  8119  	// match: (Select1 (MULTU (MOVWconst [1]) x ))
  8120  	// cond:
  8121  	// result: x
  8122  	for {
  8123  		v_0 := v.Args[0]
  8124  		if v_0.Op != OpMIPSMULTU {
  8125  			break
  8126  		}
  8127  		v_0_0 := v_0.Args[0]
  8128  		if v_0_0.Op != OpMIPSMOVWconst {
  8129  			break
  8130  		}
  8131  		if v_0_0.AuxInt != 1 {
  8132  			break
  8133  		}
  8134  		x := v_0.Args[1]
  8135  		v.reset(OpCopy)
  8136  		v.Type = x.Type
  8137  		v.AddArg(x)
  8138  		return true
  8139  	}
  8140  	// match: (Select1 (MULTU (MOVWconst [-1]) x ))
  8141  	// cond:
  8142  	// result: (NEG <x.Type> x)
  8143  	for {
  8144  		v_0 := v.Args[0]
  8145  		if v_0.Op != OpMIPSMULTU {
  8146  			break
  8147  		}
  8148  		v_0_0 := v_0.Args[0]
  8149  		if v_0_0.Op != OpMIPSMOVWconst {
  8150  			break
  8151  		}
  8152  		if v_0_0.AuxInt != -1 {
  8153  			break
  8154  		}
  8155  		x := v_0.Args[1]
  8156  		v.reset(OpMIPSNEG)
  8157  		v.Type = x.Type
  8158  		v.AddArg(x)
  8159  		return true
  8160  	}
  8161  	// match: (Select1 (MULTU (MOVWconst [c]) x ))
  8162  	// cond: isPowerOfTwo(int64(uint32(c)))
  8163  	// result: (SLLconst [log2(int64(uint32(c)))] x)
  8164  	for {
  8165  		v_0 := v.Args[0]
  8166  		if v_0.Op != OpMIPSMULTU {
  8167  			break
  8168  		}
  8169  		v_0_0 := v_0.Args[0]
  8170  		if v_0_0.Op != OpMIPSMOVWconst {
  8171  			break
  8172  		}
  8173  		c := v_0_0.AuxInt
  8174  		x := v_0.Args[1]
  8175  		if !(isPowerOfTwo(int64(uint32(c)))) {
  8176  			break
  8177  		}
  8178  		v.reset(OpMIPSSLLconst)
  8179  		v.AuxInt = log2(int64(uint32(c)))
  8180  		v.AddArg(x)
  8181  		return true
  8182  	}
  8183  	// match: (Select1 (MULTU  (MOVWconst [c]) (MOVWconst [d])))
  8184  	// cond:
  8185  	// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8186  	for {
  8187  		v_0 := v.Args[0]
  8188  		if v_0.Op != OpMIPSMULTU {
  8189  			break
  8190  		}
  8191  		v_0_0 := v_0.Args[0]
  8192  		if v_0_0.Op != OpMIPSMOVWconst {
  8193  			break
  8194  		}
  8195  		c := v_0_0.AuxInt
  8196  		v_0_1 := v_0.Args[1]
  8197  		if v_0_1.Op != OpMIPSMOVWconst {
  8198  			break
  8199  		}
  8200  		d := v_0_1.AuxInt
  8201  		v.reset(OpMIPSMOVWconst)
  8202  		v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8203  		return true
  8204  	}
  8205  	// match: (Select1 (DIV  (MOVWconst [c]) (MOVWconst [d])))
  8206  	// cond:
  8207  	// result: (MOVWconst [int64(int32(c)/int32(d))])
  8208  	for {
  8209  		v_0 := v.Args[0]
  8210  		if v_0.Op != OpMIPSDIV {
  8211  			break
  8212  		}
  8213  		v_0_0 := v_0.Args[0]
  8214  		if v_0_0.Op != OpMIPSMOVWconst {
  8215  			break
  8216  		}
  8217  		c := v_0_0.AuxInt
  8218  		v_0_1 := v_0.Args[1]
  8219  		if v_0_1.Op != OpMIPSMOVWconst {
  8220  			break
  8221  		}
  8222  		d := v_0_1.AuxInt
  8223  		v.reset(OpMIPSMOVWconst)
  8224  		v.AuxInt = int64(int32(c) / int32(d))
  8225  		return true
  8226  	}
  8227  	// match: (Select1 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8228  	// cond:
  8229  	// result: (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
  8230  	for {
  8231  		v_0 := v.Args[0]
  8232  		if v_0.Op != OpMIPSDIVU {
  8233  			break
  8234  		}
  8235  		v_0_0 := v_0.Args[0]
  8236  		if v_0_0.Op != OpMIPSMOVWconst {
  8237  			break
  8238  		}
  8239  		c := v_0_0.AuxInt
  8240  		v_0_1 := v_0.Args[1]
  8241  		if v_0_1.Op != OpMIPSMOVWconst {
  8242  			break
  8243  		}
  8244  		d := v_0_1.AuxInt
  8245  		v.reset(OpMIPSMOVWconst)
  8246  		v.AuxInt = int64(int32(uint32(c) / uint32(d)))
  8247  		return true
  8248  	}
  8249  	return false
  8250  }
  8251  func rewriteValueMIPS_OpSignExt16to32(v *Value, config *Config) bool {
  8252  	b := v.Block
  8253  	_ = b
  8254  	// match: (SignExt16to32 x)
  8255  	// cond:
  8256  	// result: (MOVHreg x)
  8257  	for {
  8258  		x := v.Args[0]
  8259  		v.reset(OpMIPSMOVHreg)
  8260  		v.AddArg(x)
  8261  		return true
  8262  	}
  8263  }
  8264  func rewriteValueMIPS_OpSignExt8to16(v *Value, config *Config) bool {
  8265  	b := v.Block
  8266  	_ = b
  8267  	// match: (SignExt8to16 x)
  8268  	// cond:
  8269  	// result: (MOVBreg x)
  8270  	for {
  8271  		x := v.Args[0]
  8272  		v.reset(OpMIPSMOVBreg)
  8273  		v.AddArg(x)
  8274  		return true
  8275  	}
  8276  }
  8277  func rewriteValueMIPS_OpSignExt8to32(v *Value, config *Config) bool {
  8278  	b := v.Block
  8279  	_ = b
  8280  	// match: (SignExt8to32 x)
  8281  	// cond:
  8282  	// result: (MOVBreg x)
  8283  	for {
  8284  		x := v.Args[0]
  8285  		v.reset(OpMIPSMOVBreg)
  8286  		v.AddArg(x)
  8287  		return true
  8288  	}
  8289  }
  8290  func rewriteValueMIPS_OpSignmask(v *Value, config *Config) bool {
  8291  	b := v.Block
  8292  	_ = b
  8293  	// match: (Signmask x)
  8294  	// cond:
  8295  	// result: (SRAconst x [31])
  8296  	for {
  8297  		x := v.Args[0]
  8298  		v.reset(OpMIPSSRAconst)
  8299  		v.AuxInt = 31
  8300  		v.AddArg(x)
  8301  		return true
  8302  	}
  8303  }
  8304  func rewriteValueMIPS_OpSlicemask(v *Value, config *Config) bool {
  8305  	b := v.Block
  8306  	_ = b
  8307  	// match: (Slicemask <t> x)
  8308  	// cond:
  8309  	// result: (SRAconst (NEG <t> x) [31])
  8310  	for {
  8311  		t := v.Type
  8312  		x := v.Args[0]
  8313  		v.reset(OpMIPSSRAconst)
  8314  		v.AuxInt = 31
  8315  		v0 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  8316  		v0.AddArg(x)
  8317  		v.AddArg(v0)
  8318  		return true
  8319  	}
  8320  }
  8321  func rewriteValueMIPS_OpSqrt(v *Value, config *Config) bool {
  8322  	b := v.Block
  8323  	_ = b
  8324  	// match: (Sqrt x)
  8325  	// cond:
  8326  	// result: (SQRTD x)
  8327  	for {
  8328  		x := v.Args[0]
  8329  		v.reset(OpMIPSSQRTD)
  8330  		v.AddArg(x)
  8331  		return true
  8332  	}
  8333  }
  8334  func rewriteValueMIPS_OpStaticCall(v *Value, config *Config) bool {
  8335  	b := v.Block
  8336  	_ = b
  8337  	// match: (StaticCall [argwid] {target} mem)
  8338  	// cond:
  8339  	// result: (CALLstatic [argwid] {target} mem)
  8340  	for {
  8341  		argwid := v.AuxInt
  8342  		target := v.Aux
  8343  		mem := v.Args[0]
  8344  		v.reset(OpMIPSCALLstatic)
  8345  		v.AuxInt = argwid
  8346  		v.Aux = target
  8347  		v.AddArg(mem)
  8348  		return true
  8349  	}
  8350  }
  8351  func rewriteValueMIPS_OpStore(v *Value, config *Config) bool {
  8352  	b := v.Block
  8353  	_ = b
  8354  	// match: (Store [1] ptr val mem)
  8355  	// cond:
  8356  	// result: (MOVBstore ptr val mem)
  8357  	for {
  8358  		if v.AuxInt != 1 {
  8359  			break
  8360  		}
  8361  		ptr := v.Args[0]
  8362  		val := v.Args[1]
  8363  		mem := v.Args[2]
  8364  		v.reset(OpMIPSMOVBstore)
  8365  		v.AddArg(ptr)
  8366  		v.AddArg(val)
  8367  		v.AddArg(mem)
  8368  		return true
  8369  	}
  8370  	// match: (Store [2] ptr val mem)
  8371  	// cond:
  8372  	// result: (MOVHstore ptr val mem)
  8373  	for {
  8374  		if v.AuxInt != 2 {
  8375  			break
  8376  		}
  8377  		ptr := v.Args[0]
  8378  		val := v.Args[1]
  8379  		mem := v.Args[2]
  8380  		v.reset(OpMIPSMOVHstore)
  8381  		v.AddArg(ptr)
  8382  		v.AddArg(val)
  8383  		v.AddArg(mem)
  8384  		return true
  8385  	}
  8386  	// match: (Store [4] ptr val mem)
  8387  	// cond: !is32BitFloat(val.Type)
  8388  	// result: (MOVWstore ptr val mem)
  8389  	for {
  8390  		if v.AuxInt != 4 {
  8391  			break
  8392  		}
  8393  		ptr := v.Args[0]
  8394  		val := v.Args[1]
  8395  		mem := v.Args[2]
  8396  		if !(!is32BitFloat(val.Type)) {
  8397  			break
  8398  		}
  8399  		v.reset(OpMIPSMOVWstore)
  8400  		v.AddArg(ptr)
  8401  		v.AddArg(val)
  8402  		v.AddArg(mem)
  8403  		return true
  8404  	}
  8405  	// match: (Store [8] ptr val mem)
  8406  	// cond: !is64BitFloat(val.Type)
  8407  	// result: (MOVWstore ptr val mem)
  8408  	for {
  8409  		if v.AuxInt != 8 {
  8410  			break
  8411  		}
  8412  		ptr := v.Args[0]
  8413  		val := v.Args[1]
  8414  		mem := v.Args[2]
  8415  		if !(!is64BitFloat(val.Type)) {
  8416  			break
  8417  		}
  8418  		v.reset(OpMIPSMOVWstore)
  8419  		v.AddArg(ptr)
  8420  		v.AddArg(val)
  8421  		v.AddArg(mem)
  8422  		return true
  8423  	}
  8424  	// match: (Store [4] ptr val mem)
  8425  	// cond: is32BitFloat(val.Type)
  8426  	// result: (MOVFstore ptr val mem)
  8427  	for {
  8428  		if v.AuxInt != 4 {
  8429  			break
  8430  		}
  8431  		ptr := v.Args[0]
  8432  		val := v.Args[1]
  8433  		mem := v.Args[2]
  8434  		if !(is32BitFloat(val.Type)) {
  8435  			break
  8436  		}
  8437  		v.reset(OpMIPSMOVFstore)
  8438  		v.AddArg(ptr)
  8439  		v.AddArg(val)
  8440  		v.AddArg(mem)
  8441  		return true
  8442  	}
  8443  	// match: (Store [8] ptr val mem)
  8444  	// cond: is64BitFloat(val.Type)
  8445  	// result: (MOVDstore ptr val mem)
  8446  	for {
  8447  		if v.AuxInt != 8 {
  8448  			break
  8449  		}
  8450  		ptr := v.Args[0]
  8451  		val := v.Args[1]
  8452  		mem := v.Args[2]
  8453  		if !(is64BitFloat(val.Type)) {
  8454  			break
  8455  		}
  8456  		v.reset(OpMIPSMOVDstore)
  8457  		v.AddArg(ptr)
  8458  		v.AddArg(val)
  8459  		v.AddArg(mem)
  8460  		return true
  8461  	}
  8462  	return false
  8463  }
  8464  func rewriteValueMIPS_OpSub16(v *Value, config *Config) bool {
  8465  	b := v.Block
  8466  	_ = b
  8467  	// match: (Sub16 x y)
  8468  	// cond:
  8469  	// result: (SUB x y)
  8470  	for {
  8471  		x := v.Args[0]
  8472  		y := v.Args[1]
  8473  		v.reset(OpMIPSSUB)
  8474  		v.AddArg(x)
  8475  		v.AddArg(y)
  8476  		return true
  8477  	}
  8478  }
  8479  func rewriteValueMIPS_OpSub32(v *Value, config *Config) bool {
  8480  	b := v.Block
  8481  	_ = b
  8482  	// match: (Sub32 x y)
  8483  	// cond:
  8484  	// result: (SUB x y)
  8485  	for {
  8486  		x := v.Args[0]
  8487  		y := v.Args[1]
  8488  		v.reset(OpMIPSSUB)
  8489  		v.AddArg(x)
  8490  		v.AddArg(y)
  8491  		return true
  8492  	}
  8493  }
  8494  func rewriteValueMIPS_OpSub32F(v *Value, config *Config) bool {
  8495  	b := v.Block
  8496  	_ = b
  8497  	// match: (Sub32F x y)
  8498  	// cond:
  8499  	// result: (SUBF x y)
  8500  	for {
  8501  		x := v.Args[0]
  8502  		y := v.Args[1]
  8503  		v.reset(OpMIPSSUBF)
  8504  		v.AddArg(x)
  8505  		v.AddArg(y)
  8506  		return true
  8507  	}
  8508  }
  8509  func rewriteValueMIPS_OpSub32withcarry(v *Value, config *Config) bool {
  8510  	b := v.Block
  8511  	_ = b
  8512  	// match: (Sub32withcarry <t> x y c)
  8513  	// cond:
  8514  	// result: (SUB (SUB <t> x y) c)
  8515  	for {
  8516  		t := v.Type
  8517  		x := v.Args[0]
  8518  		y := v.Args[1]
  8519  		c := v.Args[2]
  8520  		v.reset(OpMIPSSUB)
  8521  		v0 := b.NewValue0(v.Pos, OpMIPSSUB, t)
  8522  		v0.AddArg(x)
  8523  		v0.AddArg(y)
  8524  		v.AddArg(v0)
  8525  		v.AddArg(c)
  8526  		return true
  8527  	}
  8528  }
  8529  func rewriteValueMIPS_OpSub64F(v *Value, config *Config) bool {
  8530  	b := v.Block
  8531  	_ = b
  8532  	// match: (Sub64F x y)
  8533  	// cond:
  8534  	// result: (SUBD x y)
  8535  	for {
  8536  		x := v.Args[0]
  8537  		y := v.Args[1]
  8538  		v.reset(OpMIPSSUBD)
  8539  		v.AddArg(x)
  8540  		v.AddArg(y)
  8541  		return true
  8542  	}
  8543  }
  8544  func rewriteValueMIPS_OpSub8(v *Value, config *Config) bool {
  8545  	b := v.Block
  8546  	_ = b
  8547  	// match: (Sub8 x y)
  8548  	// cond:
  8549  	// result: (SUB x y)
  8550  	for {
  8551  		x := v.Args[0]
  8552  		y := v.Args[1]
  8553  		v.reset(OpMIPSSUB)
  8554  		v.AddArg(x)
  8555  		v.AddArg(y)
  8556  		return true
  8557  	}
  8558  }
  8559  func rewriteValueMIPS_OpSubPtr(v *Value, config *Config) bool {
  8560  	b := v.Block
  8561  	_ = b
  8562  	// match: (SubPtr x y)
  8563  	// cond:
  8564  	// result: (SUB x y)
  8565  	for {
  8566  		x := v.Args[0]
  8567  		y := v.Args[1]
  8568  		v.reset(OpMIPSSUB)
  8569  		v.AddArg(x)
  8570  		v.AddArg(y)
  8571  		return true
  8572  	}
  8573  }
  8574  func rewriteValueMIPS_OpTrunc16to8(v *Value, config *Config) bool {
  8575  	b := v.Block
  8576  	_ = b
  8577  	// match: (Trunc16to8 x)
  8578  	// cond:
  8579  	// result: x
  8580  	for {
  8581  		x := v.Args[0]
  8582  		v.reset(OpCopy)
  8583  		v.Type = x.Type
  8584  		v.AddArg(x)
  8585  		return true
  8586  	}
  8587  }
  8588  func rewriteValueMIPS_OpTrunc32to16(v *Value, config *Config) bool {
  8589  	b := v.Block
  8590  	_ = b
  8591  	// match: (Trunc32to16 x)
  8592  	// cond:
  8593  	// result: x
  8594  	for {
  8595  		x := v.Args[0]
  8596  		v.reset(OpCopy)
  8597  		v.Type = x.Type
  8598  		v.AddArg(x)
  8599  		return true
  8600  	}
  8601  }
  8602  func rewriteValueMIPS_OpTrunc32to8(v *Value, config *Config) bool {
  8603  	b := v.Block
  8604  	_ = b
  8605  	// match: (Trunc32to8 x)
  8606  	// cond:
  8607  	// result: x
  8608  	for {
  8609  		x := v.Args[0]
  8610  		v.reset(OpCopy)
  8611  		v.Type = x.Type
  8612  		v.AddArg(x)
  8613  		return true
  8614  	}
  8615  }
  8616  func rewriteValueMIPS_OpXor16(v *Value, config *Config) bool {
  8617  	b := v.Block
  8618  	_ = b
  8619  	// match: (Xor16 x y)
  8620  	// cond:
  8621  	// result: (XOR x y)
  8622  	for {
  8623  		x := v.Args[0]
  8624  		y := v.Args[1]
  8625  		v.reset(OpMIPSXOR)
  8626  		v.AddArg(x)
  8627  		v.AddArg(y)
  8628  		return true
  8629  	}
  8630  }
  8631  func rewriteValueMIPS_OpXor32(v *Value, config *Config) bool {
  8632  	b := v.Block
  8633  	_ = b
  8634  	// match: (Xor32 x y)
  8635  	// cond:
  8636  	// result: (XOR x y)
  8637  	for {
  8638  		x := v.Args[0]
  8639  		y := v.Args[1]
  8640  		v.reset(OpMIPSXOR)
  8641  		v.AddArg(x)
  8642  		v.AddArg(y)
  8643  		return true
  8644  	}
  8645  }
  8646  func rewriteValueMIPS_OpXor8(v *Value, config *Config) bool {
  8647  	b := v.Block
  8648  	_ = b
  8649  	// match: (Xor8 x y)
  8650  	// cond:
  8651  	// result: (XOR x y)
  8652  	for {
  8653  		x := v.Args[0]
  8654  		y := v.Args[1]
  8655  		v.reset(OpMIPSXOR)
  8656  		v.AddArg(x)
  8657  		v.AddArg(y)
  8658  		return true
  8659  	}
  8660  }
  8661  func rewriteValueMIPS_OpZero(v *Value, config *Config) bool {
  8662  	b := v.Block
  8663  	_ = b
  8664  	// match: (Zero [s] _ mem)
  8665  	// cond: SizeAndAlign(s).Size() == 0
  8666  	// result: mem
  8667  	for {
  8668  		s := v.AuxInt
  8669  		mem := v.Args[1]
  8670  		if !(SizeAndAlign(s).Size() == 0) {
  8671  			break
  8672  		}
  8673  		v.reset(OpCopy)
  8674  		v.Type = mem.Type
  8675  		v.AddArg(mem)
  8676  		return true
  8677  	}
  8678  	// match: (Zero [s] ptr mem)
  8679  	// cond: SizeAndAlign(s).Size() == 1
  8680  	// result: (MOVBstore ptr (MOVWconst [0]) mem)
  8681  	for {
  8682  		s := v.AuxInt
  8683  		ptr := v.Args[0]
  8684  		mem := v.Args[1]
  8685  		if !(SizeAndAlign(s).Size() == 1) {
  8686  			break
  8687  		}
  8688  		v.reset(OpMIPSMOVBstore)
  8689  		v.AddArg(ptr)
  8690  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8691  		v0.AuxInt = 0
  8692  		v.AddArg(v0)
  8693  		v.AddArg(mem)
  8694  		return true
  8695  	}
  8696  	// match: (Zero [s] ptr mem)
  8697  	// cond: SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0
  8698  	// result: (MOVHstore ptr (MOVWconst [0]) mem)
  8699  	for {
  8700  		s := v.AuxInt
  8701  		ptr := v.Args[0]
  8702  		mem := v.Args[1]
  8703  		if !(SizeAndAlign(s).Size() == 2 && SizeAndAlign(s).Align()%2 == 0) {
  8704  			break
  8705  		}
  8706  		v.reset(OpMIPSMOVHstore)
  8707  		v.AddArg(ptr)
  8708  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8709  		v0.AuxInt = 0
  8710  		v.AddArg(v0)
  8711  		v.AddArg(mem)
  8712  		return true
  8713  	}
  8714  	// match: (Zero [s] ptr mem)
  8715  	// cond: SizeAndAlign(s).Size() == 2
  8716  	// result: (MOVBstore [1] ptr (MOVWconst [0]) 		(MOVBstore [0] ptr (MOVWconst [0]) mem))
  8717  	for {
  8718  		s := v.AuxInt
  8719  		ptr := v.Args[0]
  8720  		mem := v.Args[1]
  8721  		if !(SizeAndAlign(s).Size() == 2) {
  8722  			break
  8723  		}
  8724  		v.reset(OpMIPSMOVBstore)
  8725  		v.AuxInt = 1
  8726  		v.AddArg(ptr)
  8727  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8728  		v0.AuxInt = 0
  8729  		v.AddArg(v0)
  8730  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  8731  		v1.AuxInt = 0
  8732  		v1.AddArg(ptr)
  8733  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8734  		v2.AuxInt = 0
  8735  		v1.AddArg(v2)
  8736  		v1.AddArg(mem)
  8737  		v.AddArg(v1)
  8738  		return true
  8739  	}
  8740  	// match: (Zero [s] ptr mem)
  8741  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0
  8742  	// result: (MOVWstore ptr (MOVWconst [0]) mem)
  8743  	for {
  8744  		s := v.AuxInt
  8745  		ptr := v.Args[0]
  8746  		mem := v.Args[1]
  8747  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%4 == 0) {
  8748  			break
  8749  		}
  8750  		v.reset(OpMIPSMOVWstore)
  8751  		v.AddArg(ptr)
  8752  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8753  		v0.AuxInt = 0
  8754  		v.AddArg(v0)
  8755  		v.AddArg(mem)
  8756  		return true
  8757  	}
  8758  	// match: (Zero [s] ptr mem)
  8759  	// cond: SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0
  8760  	// result: (MOVHstore [2] ptr (MOVWconst [0]) 		(MOVHstore [0] ptr (MOVWconst [0]) mem))
  8761  	for {
  8762  		s := v.AuxInt
  8763  		ptr := v.Args[0]
  8764  		mem := v.Args[1]
  8765  		if !(SizeAndAlign(s).Size() == 4 && SizeAndAlign(s).Align()%2 == 0) {
  8766  			break
  8767  		}
  8768  		v.reset(OpMIPSMOVHstore)
  8769  		v.AuxInt = 2
  8770  		v.AddArg(ptr)
  8771  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8772  		v0.AuxInt = 0
  8773  		v.AddArg(v0)
  8774  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  8775  		v1.AuxInt = 0
  8776  		v1.AddArg(ptr)
  8777  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8778  		v2.AuxInt = 0
  8779  		v1.AddArg(v2)
  8780  		v1.AddArg(mem)
  8781  		v.AddArg(v1)
  8782  		return true
  8783  	}
  8784  	// match: (Zero [s] ptr mem)
  8785  	// cond: SizeAndAlign(s).Size() == 4
  8786  	// result: (MOVBstore [3] ptr (MOVWconst [0]) 		(MOVBstore [2] ptr (MOVWconst [0]) 			(MOVBstore [1] ptr (MOVWconst [0]) 				(MOVBstore [0] ptr (MOVWconst [0]) mem))))
  8787  	for {
  8788  		s := v.AuxInt
  8789  		ptr := v.Args[0]
  8790  		mem := v.Args[1]
  8791  		if !(SizeAndAlign(s).Size() == 4) {
  8792  			break
  8793  		}
  8794  		v.reset(OpMIPSMOVBstore)
  8795  		v.AuxInt = 3
  8796  		v.AddArg(ptr)
  8797  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8798  		v0.AuxInt = 0
  8799  		v.AddArg(v0)
  8800  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  8801  		v1.AuxInt = 2
  8802  		v1.AddArg(ptr)
  8803  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8804  		v2.AuxInt = 0
  8805  		v1.AddArg(v2)
  8806  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  8807  		v3.AuxInt = 1
  8808  		v3.AddArg(ptr)
  8809  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8810  		v4.AuxInt = 0
  8811  		v3.AddArg(v4)
  8812  		v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  8813  		v5.AuxInt = 0
  8814  		v5.AddArg(ptr)
  8815  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8816  		v6.AuxInt = 0
  8817  		v5.AddArg(v6)
  8818  		v5.AddArg(mem)
  8819  		v3.AddArg(v5)
  8820  		v1.AddArg(v3)
  8821  		v.AddArg(v1)
  8822  		return true
  8823  	}
  8824  	// match: (Zero [s] ptr mem)
  8825  	// cond: SizeAndAlign(s).Size() == 3
  8826  	// result: (MOVBstore [2] ptr (MOVWconst [0]) 		(MOVBstore [1] ptr (MOVWconst [0]) 			(MOVBstore [0] ptr (MOVWconst [0]) mem)))
  8827  	for {
  8828  		s := v.AuxInt
  8829  		ptr := v.Args[0]
  8830  		mem := v.Args[1]
  8831  		if !(SizeAndAlign(s).Size() == 3) {
  8832  			break
  8833  		}
  8834  		v.reset(OpMIPSMOVBstore)
  8835  		v.AuxInt = 2
  8836  		v.AddArg(ptr)
  8837  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8838  		v0.AuxInt = 0
  8839  		v.AddArg(v0)
  8840  		v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  8841  		v1.AuxInt = 1
  8842  		v1.AddArg(ptr)
  8843  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8844  		v2.AuxInt = 0
  8845  		v1.AddArg(v2)
  8846  		v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, TypeMem)
  8847  		v3.AuxInt = 0
  8848  		v3.AddArg(ptr)
  8849  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8850  		v4.AuxInt = 0
  8851  		v3.AddArg(v4)
  8852  		v3.AddArg(mem)
  8853  		v1.AddArg(v3)
  8854  		v.AddArg(v1)
  8855  		return true
  8856  	}
  8857  	// match: (Zero [s] ptr mem)
  8858  	// cond: SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0
  8859  	// result: (MOVHstore [4] ptr (MOVWconst [0]) 		(MOVHstore [2] ptr (MOVWconst [0]) 			(MOVHstore [0] ptr (MOVWconst [0]) mem)))
  8860  	for {
  8861  		s := v.AuxInt
  8862  		ptr := v.Args[0]
  8863  		mem := v.Args[1]
  8864  		if !(SizeAndAlign(s).Size() == 6 && SizeAndAlign(s).Align()%2 == 0) {
  8865  			break
  8866  		}
  8867  		v.reset(OpMIPSMOVHstore)
  8868  		v.AuxInt = 4
  8869  		v.AddArg(ptr)
  8870  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8871  		v0.AuxInt = 0
  8872  		v.AddArg(v0)
  8873  		v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  8874  		v1.AuxInt = 2
  8875  		v1.AddArg(ptr)
  8876  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8877  		v2.AuxInt = 0
  8878  		v1.AddArg(v2)
  8879  		v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, TypeMem)
  8880  		v3.AuxInt = 0
  8881  		v3.AddArg(ptr)
  8882  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8883  		v4.AuxInt = 0
  8884  		v3.AddArg(v4)
  8885  		v3.AddArg(mem)
  8886  		v1.AddArg(v3)
  8887  		v.AddArg(v1)
  8888  		return true
  8889  	}
  8890  	// match: (Zero [s] ptr mem)
  8891  	// cond: SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0
  8892  	// result: (MOVWstore [4] ptr (MOVWconst [0]) 			(MOVWstore [0] ptr (MOVWconst [0]) mem))
  8893  	for {
  8894  		s := v.AuxInt
  8895  		ptr := v.Args[0]
  8896  		mem := v.Args[1]
  8897  		if !(SizeAndAlign(s).Size() == 8 && SizeAndAlign(s).Align()%4 == 0) {
  8898  			break
  8899  		}
  8900  		v.reset(OpMIPSMOVWstore)
  8901  		v.AuxInt = 4
  8902  		v.AddArg(ptr)
  8903  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8904  		v0.AuxInt = 0
  8905  		v.AddArg(v0)
  8906  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  8907  		v1.AuxInt = 0
  8908  		v1.AddArg(ptr)
  8909  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8910  		v2.AuxInt = 0
  8911  		v1.AddArg(v2)
  8912  		v1.AddArg(mem)
  8913  		v.AddArg(v1)
  8914  		return true
  8915  	}
  8916  	// match: (Zero [s] ptr mem)
  8917  	// cond: SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0
  8918  	// result: (MOVWstore [8] ptr (MOVWconst [0]) 		(MOVWstore [4] ptr (MOVWconst [0]) 			(MOVWstore [0] ptr (MOVWconst [0]) mem)))
  8919  	for {
  8920  		s := v.AuxInt
  8921  		ptr := v.Args[0]
  8922  		mem := v.Args[1]
  8923  		if !(SizeAndAlign(s).Size() == 12 && SizeAndAlign(s).Align()%4 == 0) {
  8924  			break
  8925  		}
  8926  		v.reset(OpMIPSMOVWstore)
  8927  		v.AuxInt = 8
  8928  		v.AddArg(ptr)
  8929  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8930  		v0.AuxInt = 0
  8931  		v.AddArg(v0)
  8932  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  8933  		v1.AuxInt = 4
  8934  		v1.AddArg(ptr)
  8935  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8936  		v2.AuxInt = 0
  8937  		v1.AddArg(v2)
  8938  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  8939  		v3.AuxInt = 0
  8940  		v3.AddArg(ptr)
  8941  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8942  		v4.AuxInt = 0
  8943  		v3.AddArg(v4)
  8944  		v3.AddArg(mem)
  8945  		v1.AddArg(v3)
  8946  		v.AddArg(v1)
  8947  		return true
  8948  	}
  8949  	// match: (Zero [s] ptr mem)
  8950  	// cond: SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0
  8951  	// result: (MOVWstore [12] ptr (MOVWconst [0]) 		(MOVWstore [8] ptr (MOVWconst [0]) 			(MOVWstore [4] ptr (MOVWconst [0]) 				(MOVWstore [0] ptr (MOVWconst [0]) mem))))
  8952  	for {
  8953  		s := v.AuxInt
  8954  		ptr := v.Args[0]
  8955  		mem := v.Args[1]
  8956  		if !(SizeAndAlign(s).Size() == 16 && SizeAndAlign(s).Align()%4 == 0) {
  8957  			break
  8958  		}
  8959  		v.reset(OpMIPSMOVWstore)
  8960  		v.AuxInt = 12
  8961  		v.AddArg(ptr)
  8962  		v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8963  		v0.AuxInt = 0
  8964  		v.AddArg(v0)
  8965  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  8966  		v1.AuxInt = 8
  8967  		v1.AddArg(ptr)
  8968  		v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8969  		v2.AuxInt = 0
  8970  		v1.AddArg(v2)
  8971  		v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  8972  		v3.AuxInt = 4
  8973  		v3.AddArg(ptr)
  8974  		v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8975  		v4.AuxInt = 0
  8976  		v3.AddArg(v4)
  8977  		v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, TypeMem)
  8978  		v5.AuxInt = 0
  8979  		v5.AddArg(ptr)
  8980  		v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  8981  		v6.AuxInt = 0
  8982  		v5.AddArg(v6)
  8983  		v5.AddArg(mem)
  8984  		v3.AddArg(v5)
  8985  		v1.AddArg(v3)
  8986  		v.AddArg(v1)
  8987  		return true
  8988  	}
  8989  	// match: (Zero [s] ptr mem)
  8990  	// cond: (SizeAndAlign(s).Size() > 16  || SizeAndAlign(s).Align()%4 != 0)
  8991  	// result: (LoweredZero [SizeAndAlign(s).Align()] 		ptr 		(ADDconst <ptr.Type> ptr [SizeAndAlign(s).Size()-moveSize(SizeAndAlign(s).Align(), config)]) 		mem)
  8992  	for {
  8993  		s := v.AuxInt
  8994  		ptr := v.Args[0]
  8995  		mem := v.Args[1]
  8996  		if !(SizeAndAlign(s).Size() > 16 || SizeAndAlign(s).Align()%4 != 0) {
  8997  			break
  8998  		}
  8999  		v.reset(OpMIPSLoweredZero)
  9000  		v.AuxInt = SizeAndAlign(s).Align()
  9001  		v.AddArg(ptr)
  9002  		v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type)
  9003  		v0.AuxInt = SizeAndAlign(s).Size() - moveSize(SizeAndAlign(s).Align(), config)
  9004  		v0.AddArg(ptr)
  9005  		v.AddArg(v0)
  9006  		v.AddArg(mem)
  9007  		return true
  9008  	}
  9009  	return false
  9010  }
  9011  func rewriteValueMIPS_OpZeroExt16to32(v *Value, config *Config) bool {
  9012  	b := v.Block
  9013  	_ = b
  9014  	// match: (ZeroExt16to32 x)
  9015  	// cond:
  9016  	// result: (MOVHUreg x)
  9017  	for {
  9018  		x := v.Args[0]
  9019  		v.reset(OpMIPSMOVHUreg)
  9020  		v.AddArg(x)
  9021  		return true
  9022  	}
  9023  }
  9024  func rewriteValueMIPS_OpZeroExt8to16(v *Value, config *Config) bool {
  9025  	b := v.Block
  9026  	_ = b
  9027  	// match: (ZeroExt8to16 x)
  9028  	// cond:
  9029  	// result: (MOVBUreg x)
  9030  	for {
  9031  		x := v.Args[0]
  9032  		v.reset(OpMIPSMOVBUreg)
  9033  		v.AddArg(x)
  9034  		return true
  9035  	}
  9036  }
  9037  func rewriteValueMIPS_OpZeroExt8to32(v *Value, config *Config) bool {
  9038  	b := v.Block
  9039  	_ = b
  9040  	// match: (ZeroExt8to32 x)
  9041  	// cond:
  9042  	// result: (MOVBUreg x)
  9043  	for {
  9044  		x := v.Args[0]
  9045  		v.reset(OpMIPSMOVBUreg)
  9046  		v.AddArg(x)
  9047  		return true
  9048  	}
  9049  }
  9050  func rewriteValueMIPS_OpZeromask(v *Value, config *Config) bool {
  9051  	b := v.Block
  9052  	_ = b
  9053  	// match: (Zeromask x)
  9054  	// cond:
  9055  	// result: (NEG (SGTU x (MOVWconst [0])))
  9056  	for {
  9057  		x := v.Args[0]
  9058  		v.reset(OpMIPSNEG)
  9059  		v0 := b.NewValue0(v.Pos, OpMIPSSGTU, config.fe.TypeBool())
  9060  		v0.AddArg(x)
  9061  		v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, config.fe.TypeUInt32())
  9062  		v1.AuxInt = 0
  9063  		v0.AddArg(v1)
  9064  		v.AddArg(v0)
  9065  		return true
  9066  	}
  9067  }
  9068  func rewriteBlockMIPS(b *Block, config *Config) bool {
  9069  	switch b.Kind {
  9070  	case BlockMIPSEQ:
  9071  		// match: (EQ (FPFlagTrue cmp) yes no)
  9072  		// cond:
  9073  		// result: (FPF cmp yes no)
  9074  		for {
  9075  			v := b.Control
  9076  			if v.Op != OpMIPSFPFlagTrue {
  9077  				break
  9078  			}
  9079  			cmp := v.Args[0]
  9080  			yes := b.Succs[0]
  9081  			no := b.Succs[1]
  9082  			b.Kind = BlockMIPSFPF
  9083  			b.SetControl(cmp)
  9084  			_ = yes
  9085  			_ = no
  9086  			return true
  9087  		}
  9088  		// match: (EQ (FPFlagFalse cmp) yes no)
  9089  		// cond:
  9090  		// result: (FPT cmp yes no)
  9091  		for {
  9092  			v := b.Control
  9093  			if v.Op != OpMIPSFPFlagFalse {
  9094  				break
  9095  			}
  9096  			cmp := v.Args[0]
  9097  			yes := b.Succs[0]
  9098  			no := b.Succs[1]
  9099  			b.Kind = BlockMIPSFPT
  9100  			b.SetControl(cmp)
  9101  			_ = yes
  9102  			_ = no
  9103  			return true
  9104  		}
  9105  		// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  9106  		// cond:
  9107  		// result: (NE cmp yes no)
  9108  		for {
  9109  			v := b.Control
  9110  			if v.Op != OpMIPSXORconst {
  9111  				break
  9112  			}
  9113  			if v.AuxInt != 1 {
  9114  				break
  9115  			}
  9116  			cmp := v.Args[0]
  9117  			if cmp.Op != OpMIPSSGT {
  9118  				break
  9119  			}
  9120  			yes := b.Succs[0]
  9121  			no := b.Succs[1]
  9122  			b.Kind = BlockMIPSNE
  9123  			b.SetControl(cmp)
  9124  			_ = yes
  9125  			_ = no
  9126  			return true
  9127  		}
  9128  		// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  9129  		// cond:
  9130  		// result: (NE cmp yes no)
  9131  		for {
  9132  			v := b.Control
  9133  			if v.Op != OpMIPSXORconst {
  9134  				break
  9135  			}
  9136  			if v.AuxInt != 1 {
  9137  				break
  9138  			}
  9139  			cmp := v.Args[0]
  9140  			if cmp.Op != OpMIPSSGTU {
  9141  				break
  9142  			}
  9143  			yes := b.Succs[0]
  9144  			no := b.Succs[1]
  9145  			b.Kind = BlockMIPSNE
  9146  			b.SetControl(cmp)
  9147  			_ = yes
  9148  			_ = no
  9149  			return true
  9150  		}
  9151  		// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  9152  		// cond:
  9153  		// result: (NE cmp yes no)
  9154  		for {
  9155  			v := b.Control
  9156  			if v.Op != OpMIPSXORconst {
  9157  				break
  9158  			}
  9159  			if v.AuxInt != 1 {
  9160  				break
  9161  			}
  9162  			cmp := v.Args[0]
  9163  			if cmp.Op != OpMIPSSGTconst {
  9164  				break
  9165  			}
  9166  			yes := b.Succs[0]
  9167  			no := b.Succs[1]
  9168  			b.Kind = BlockMIPSNE
  9169  			b.SetControl(cmp)
  9170  			_ = yes
  9171  			_ = no
  9172  			return true
  9173  		}
  9174  		// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  9175  		// cond:
  9176  		// result: (NE cmp yes no)
  9177  		for {
  9178  			v := b.Control
  9179  			if v.Op != OpMIPSXORconst {
  9180  				break
  9181  			}
  9182  			if v.AuxInt != 1 {
  9183  				break
  9184  			}
  9185  			cmp := v.Args[0]
  9186  			if cmp.Op != OpMIPSSGTUconst {
  9187  				break
  9188  			}
  9189  			yes := b.Succs[0]
  9190  			no := b.Succs[1]
  9191  			b.Kind = BlockMIPSNE
  9192  			b.SetControl(cmp)
  9193  			_ = yes
  9194  			_ = no
  9195  			return true
  9196  		}
  9197  		// match: (EQ (XORconst [1] cmp:(SGTzero _)) yes no)
  9198  		// cond:
  9199  		// result: (NE cmp yes no)
  9200  		for {
  9201  			v := b.Control
  9202  			if v.Op != OpMIPSXORconst {
  9203  				break
  9204  			}
  9205  			if v.AuxInt != 1 {
  9206  				break
  9207  			}
  9208  			cmp := v.Args[0]
  9209  			if cmp.Op != OpMIPSSGTzero {
  9210  				break
  9211  			}
  9212  			yes := b.Succs[0]
  9213  			no := b.Succs[1]
  9214  			b.Kind = BlockMIPSNE
  9215  			b.SetControl(cmp)
  9216  			_ = yes
  9217  			_ = no
  9218  			return true
  9219  		}
  9220  		// match: (EQ (XORconst [1] cmp:(SGTUzero _)) yes no)
  9221  		// cond:
  9222  		// result: (NE cmp yes no)
  9223  		for {
  9224  			v := b.Control
  9225  			if v.Op != OpMIPSXORconst {
  9226  				break
  9227  			}
  9228  			if v.AuxInt != 1 {
  9229  				break
  9230  			}
  9231  			cmp := v.Args[0]
  9232  			if cmp.Op != OpMIPSSGTUzero {
  9233  				break
  9234  			}
  9235  			yes := b.Succs[0]
  9236  			no := b.Succs[1]
  9237  			b.Kind = BlockMIPSNE
  9238  			b.SetControl(cmp)
  9239  			_ = yes
  9240  			_ = no
  9241  			return true
  9242  		}
  9243  		// match: (EQ (SGTUconst [1] x) yes no)
  9244  		// cond:
  9245  		// result: (NE x yes no)
  9246  		for {
  9247  			v := b.Control
  9248  			if v.Op != OpMIPSSGTUconst {
  9249  				break
  9250  			}
  9251  			if v.AuxInt != 1 {
  9252  				break
  9253  			}
  9254  			x := v.Args[0]
  9255  			yes := b.Succs[0]
  9256  			no := b.Succs[1]
  9257  			b.Kind = BlockMIPSNE
  9258  			b.SetControl(x)
  9259  			_ = yes
  9260  			_ = no
  9261  			return true
  9262  		}
  9263  		// match: (EQ (SGTUzero x) yes no)
  9264  		// cond:
  9265  		// result: (EQ x yes no)
  9266  		for {
  9267  			v := b.Control
  9268  			if v.Op != OpMIPSSGTUzero {
  9269  				break
  9270  			}
  9271  			x := v.Args[0]
  9272  			yes := b.Succs[0]
  9273  			no := b.Succs[1]
  9274  			b.Kind = BlockMIPSEQ
  9275  			b.SetControl(x)
  9276  			_ = yes
  9277  			_ = no
  9278  			return true
  9279  		}
  9280  		// match: (EQ (SGTconst [0] x) yes no)
  9281  		// cond:
  9282  		// result: (GEZ x yes no)
  9283  		for {
  9284  			v := b.Control
  9285  			if v.Op != OpMIPSSGTconst {
  9286  				break
  9287  			}
  9288  			if v.AuxInt != 0 {
  9289  				break
  9290  			}
  9291  			x := v.Args[0]
  9292  			yes := b.Succs[0]
  9293  			no := b.Succs[1]
  9294  			b.Kind = BlockMIPSGEZ
  9295  			b.SetControl(x)
  9296  			_ = yes
  9297  			_ = no
  9298  			return true
  9299  		}
  9300  		// match: (EQ (SGTzero x) yes no)
  9301  		// cond:
  9302  		// result: (LEZ x yes no)
  9303  		for {
  9304  			v := b.Control
  9305  			if v.Op != OpMIPSSGTzero {
  9306  				break
  9307  			}
  9308  			x := v.Args[0]
  9309  			yes := b.Succs[0]
  9310  			no := b.Succs[1]
  9311  			b.Kind = BlockMIPSLEZ
  9312  			b.SetControl(x)
  9313  			_ = yes
  9314  			_ = no
  9315  			return true
  9316  		}
  9317  		// match: (EQ  (MOVWconst [0]) yes no)
  9318  		// cond:
  9319  		// result: (First nil yes no)
  9320  		for {
  9321  			v := b.Control
  9322  			if v.Op != OpMIPSMOVWconst {
  9323  				break
  9324  			}
  9325  			if v.AuxInt != 0 {
  9326  				break
  9327  			}
  9328  			yes := b.Succs[0]
  9329  			no := b.Succs[1]
  9330  			b.Kind = BlockFirst
  9331  			b.SetControl(nil)
  9332  			_ = yes
  9333  			_ = no
  9334  			return true
  9335  		}
  9336  		// match: (EQ  (MOVWconst [c]) yes no)
  9337  		// cond: c != 0
  9338  		// result: (First nil no yes)
  9339  		for {
  9340  			v := b.Control
  9341  			if v.Op != OpMIPSMOVWconst {
  9342  				break
  9343  			}
  9344  			c := v.AuxInt
  9345  			yes := b.Succs[0]
  9346  			no := b.Succs[1]
  9347  			if !(c != 0) {
  9348  				break
  9349  			}
  9350  			b.Kind = BlockFirst
  9351  			b.SetControl(nil)
  9352  			b.swapSuccessors()
  9353  			_ = no
  9354  			_ = yes
  9355  			return true
  9356  		}
  9357  	case BlockMIPSGEZ:
  9358  		// match: (GEZ (MOVWconst [c]) yes no)
  9359  		// cond: int32(c) >= 0
  9360  		// result: (First nil yes no)
  9361  		for {
  9362  			v := b.Control
  9363  			if v.Op != OpMIPSMOVWconst {
  9364  				break
  9365  			}
  9366  			c := v.AuxInt
  9367  			yes := b.Succs[0]
  9368  			no := b.Succs[1]
  9369  			if !(int32(c) >= 0) {
  9370  				break
  9371  			}
  9372  			b.Kind = BlockFirst
  9373  			b.SetControl(nil)
  9374  			_ = yes
  9375  			_ = no
  9376  			return true
  9377  		}
  9378  		// match: (GEZ (MOVWconst [c]) yes no)
  9379  		// cond: int32(c) <  0
  9380  		// result: (First nil no yes)
  9381  		for {
  9382  			v := b.Control
  9383  			if v.Op != OpMIPSMOVWconst {
  9384  				break
  9385  			}
  9386  			c := v.AuxInt
  9387  			yes := b.Succs[0]
  9388  			no := b.Succs[1]
  9389  			if !(int32(c) < 0) {
  9390  				break
  9391  			}
  9392  			b.Kind = BlockFirst
  9393  			b.SetControl(nil)
  9394  			b.swapSuccessors()
  9395  			_ = no
  9396  			_ = yes
  9397  			return true
  9398  		}
  9399  	case BlockMIPSGTZ:
  9400  		// match: (GTZ (MOVWconst [c]) yes no)
  9401  		// cond: int32(c) >  0
  9402  		// result: (First nil yes no)
  9403  		for {
  9404  			v := b.Control
  9405  			if v.Op != OpMIPSMOVWconst {
  9406  				break
  9407  			}
  9408  			c := v.AuxInt
  9409  			yes := b.Succs[0]
  9410  			no := b.Succs[1]
  9411  			if !(int32(c) > 0) {
  9412  				break
  9413  			}
  9414  			b.Kind = BlockFirst
  9415  			b.SetControl(nil)
  9416  			_ = yes
  9417  			_ = no
  9418  			return true
  9419  		}
  9420  		// match: (GTZ (MOVWconst [c]) yes no)
  9421  		// cond: int32(c) <= 0
  9422  		// result: (First nil no yes)
  9423  		for {
  9424  			v := b.Control
  9425  			if v.Op != OpMIPSMOVWconst {
  9426  				break
  9427  			}
  9428  			c := v.AuxInt
  9429  			yes := b.Succs[0]
  9430  			no := b.Succs[1]
  9431  			if !(int32(c) <= 0) {
  9432  				break
  9433  			}
  9434  			b.Kind = BlockFirst
  9435  			b.SetControl(nil)
  9436  			b.swapSuccessors()
  9437  			_ = no
  9438  			_ = yes
  9439  			return true
  9440  		}
  9441  	case BlockIf:
  9442  		// match: (If cond yes no)
  9443  		// cond:
  9444  		// result: (NE cond yes no)
  9445  		for {
  9446  			v := b.Control
  9447  			_ = v
  9448  			cond := b.Control
  9449  			yes := b.Succs[0]
  9450  			no := b.Succs[1]
  9451  			b.Kind = BlockMIPSNE
  9452  			b.SetControl(cond)
  9453  			_ = yes
  9454  			_ = no
  9455  			return true
  9456  		}
  9457  	case BlockMIPSLEZ:
  9458  		// match: (LEZ (MOVWconst [c]) yes no)
  9459  		// cond: int32(c) <= 0
  9460  		// result: (First nil yes no)
  9461  		for {
  9462  			v := b.Control
  9463  			if v.Op != OpMIPSMOVWconst {
  9464  				break
  9465  			}
  9466  			c := v.AuxInt
  9467  			yes := b.Succs[0]
  9468  			no := b.Succs[1]
  9469  			if !(int32(c) <= 0) {
  9470  				break
  9471  			}
  9472  			b.Kind = BlockFirst
  9473  			b.SetControl(nil)
  9474  			_ = yes
  9475  			_ = no
  9476  			return true
  9477  		}
  9478  		// match: (LEZ (MOVWconst [c]) yes no)
  9479  		// cond: int32(c) >  0
  9480  		// result: (First nil no yes)
  9481  		for {
  9482  			v := b.Control
  9483  			if v.Op != OpMIPSMOVWconst {
  9484  				break
  9485  			}
  9486  			c := v.AuxInt
  9487  			yes := b.Succs[0]
  9488  			no := b.Succs[1]
  9489  			if !(int32(c) > 0) {
  9490  				break
  9491  			}
  9492  			b.Kind = BlockFirst
  9493  			b.SetControl(nil)
  9494  			b.swapSuccessors()
  9495  			_ = no
  9496  			_ = yes
  9497  			return true
  9498  		}
  9499  	case BlockMIPSLTZ:
  9500  		// match: (LTZ (MOVWconst [c]) yes no)
  9501  		// cond: int32(c) <  0
  9502  		// result: (First nil yes no)
  9503  		for {
  9504  			v := b.Control
  9505  			if v.Op != OpMIPSMOVWconst {
  9506  				break
  9507  			}
  9508  			c := v.AuxInt
  9509  			yes := b.Succs[0]
  9510  			no := b.Succs[1]
  9511  			if !(int32(c) < 0) {
  9512  				break
  9513  			}
  9514  			b.Kind = BlockFirst
  9515  			b.SetControl(nil)
  9516  			_ = yes
  9517  			_ = no
  9518  			return true
  9519  		}
  9520  		// match: (LTZ (MOVWconst [c]) yes no)
  9521  		// cond: int32(c) >= 0
  9522  		// result: (First nil no yes)
  9523  		for {
  9524  			v := b.Control
  9525  			if v.Op != OpMIPSMOVWconst {
  9526  				break
  9527  			}
  9528  			c := v.AuxInt
  9529  			yes := b.Succs[0]
  9530  			no := b.Succs[1]
  9531  			if !(int32(c) >= 0) {
  9532  				break
  9533  			}
  9534  			b.Kind = BlockFirst
  9535  			b.SetControl(nil)
  9536  			b.swapSuccessors()
  9537  			_ = no
  9538  			_ = yes
  9539  			return true
  9540  		}
  9541  	case BlockMIPSNE:
  9542  		// match: (NE (FPFlagTrue cmp) yes no)
  9543  		// cond:
  9544  		// result: (FPT cmp yes no)
  9545  		for {
  9546  			v := b.Control
  9547  			if v.Op != OpMIPSFPFlagTrue {
  9548  				break
  9549  			}
  9550  			cmp := v.Args[0]
  9551  			yes := b.Succs[0]
  9552  			no := b.Succs[1]
  9553  			b.Kind = BlockMIPSFPT
  9554  			b.SetControl(cmp)
  9555  			_ = yes
  9556  			_ = no
  9557  			return true
  9558  		}
  9559  		// match: (NE (FPFlagFalse cmp) yes no)
  9560  		// cond:
  9561  		// result: (FPF cmp yes no)
  9562  		for {
  9563  			v := b.Control
  9564  			if v.Op != OpMIPSFPFlagFalse {
  9565  				break
  9566  			}
  9567  			cmp := v.Args[0]
  9568  			yes := b.Succs[0]
  9569  			no := b.Succs[1]
  9570  			b.Kind = BlockMIPSFPF
  9571  			b.SetControl(cmp)
  9572  			_ = yes
  9573  			_ = no
  9574  			return true
  9575  		}
  9576  		// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
  9577  		// cond:
  9578  		// result: (EQ cmp yes no)
  9579  		for {
  9580  			v := b.Control
  9581  			if v.Op != OpMIPSXORconst {
  9582  				break
  9583  			}
  9584  			if v.AuxInt != 1 {
  9585  				break
  9586  			}
  9587  			cmp := v.Args[0]
  9588  			if cmp.Op != OpMIPSSGT {
  9589  				break
  9590  			}
  9591  			yes := b.Succs[0]
  9592  			no := b.Succs[1]
  9593  			b.Kind = BlockMIPSEQ
  9594  			b.SetControl(cmp)
  9595  			_ = yes
  9596  			_ = no
  9597  			return true
  9598  		}
  9599  		// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
  9600  		// cond:
  9601  		// result: (EQ cmp yes no)
  9602  		for {
  9603  			v := b.Control
  9604  			if v.Op != OpMIPSXORconst {
  9605  				break
  9606  			}
  9607  			if v.AuxInt != 1 {
  9608  				break
  9609  			}
  9610  			cmp := v.Args[0]
  9611  			if cmp.Op != OpMIPSSGTU {
  9612  				break
  9613  			}
  9614  			yes := b.Succs[0]
  9615  			no := b.Succs[1]
  9616  			b.Kind = BlockMIPSEQ
  9617  			b.SetControl(cmp)
  9618  			_ = yes
  9619  			_ = no
  9620  			return true
  9621  		}
  9622  		// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
  9623  		// cond:
  9624  		// result: (EQ cmp yes no)
  9625  		for {
  9626  			v := b.Control
  9627  			if v.Op != OpMIPSXORconst {
  9628  				break
  9629  			}
  9630  			if v.AuxInt != 1 {
  9631  				break
  9632  			}
  9633  			cmp := v.Args[0]
  9634  			if cmp.Op != OpMIPSSGTconst {
  9635  				break
  9636  			}
  9637  			yes := b.Succs[0]
  9638  			no := b.Succs[1]
  9639  			b.Kind = BlockMIPSEQ
  9640  			b.SetControl(cmp)
  9641  			_ = yes
  9642  			_ = no
  9643  			return true
  9644  		}
  9645  		// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
  9646  		// cond:
  9647  		// result: (EQ cmp yes no)
  9648  		for {
  9649  			v := b.Control
  9650  			if v.Op != OpMIPSXORconst {
  9651  				break
  9652  			}
  9653  			if v.AuxInt != 1 {
  9654  				break
  9655  			}
  9656  			cmp := v.Args[0]
  9657  			if cmp.Op != OpMIPSSGTUconst {
  9658  				break
  9659  			}
  9660  			yes := b.Succs[0]
  9661  			no := b.Succs[1]
  9662  			b.Kind = BlockMIPSEQ
  9663  			b.SetControl(cmp)
  9664  			_ = yes
  9665  			_ = no
  9666  			return true
  9667  		}
  9668  		// match: (NE (XORconst [1] cmp:(SGTzero _)) yes no)
  9669  		// cond:
  9670  		// result: (EQ cmp yes no)
  9671  		for {
  9672  			v := b.Control
  9673  			if v.Op != OpMIPSXORconst {
  9674  				break
  9675  			}
  9676  			if v.AuxInt != 1 {
  9677  				break
  9678  			}
  9679  			cmp := v.Args[0]
  9680  			if cmp.Op != OpMIPSSGTzero {
  9681  				break
  9682  			}
  9683  			yes := b.Succs[0]
  9684  			no := b.Succs[1]
  9685  			b.Kind = BlockMIPSEQ
  9686  			b.SetControl(cmp)
  9687  			_ = yes
  9688  			_ = no
  9689  			return true
  9690  		}
  9691  		// match: (NE (XORconst [1] cmp:(SGTUzero _)) yes no)
  9692  		// cond:
  9693  		// result: (EQ cmp yes no)
  9694  		for {
  9695  			v := b.Control
  9696  			if v.Op != OpMIPSXORconst {
  9697  				break
  9698  			}
  9699  			if v.AuxInt != 1 {
  9700  				break
  9701  			}
  9702  			cmp := v.Args[0]
  9703  			if cmp.Op != OpMIPSSGTUzero {
  9704  				break
  9705  			}
  9706  			yes := b.Succs[0]
  9707  			no := b.Succs[1]
  9708  			b.Kind = BlockMIPSEQ
  9709  			b.SetControl(cmp)
  9710  			_ = yes
  9711  			_ = no
  9712  			return true
  9713  		}
  9714  		// match: (NE (SGTUconst [1] x) yes no)
  9715  		// cond:
  9716  		// result: (EQ x yes no)
  9717  		for {
  9718  			v := b.Control
  9719  			if v.Op != OpMIPSSGTUconst {
  9720  				break
  9721  			}
  9722  			if v.AuxInt != 1 {
  9723  				break
  9724  			}
  9725  			x := v.Args[0]
  9726  			yes := b.Succs[0]
  9727  			no := b.Succs[1]
  9728  			b.Kind = BlockMIPSEQ
  9729  			b.SetControl(x)
  9730  			_ = yes
  9731  			_ = no
  9732  			return true
  9733  		}
  9734  		// match: (NE (SGTUzero x) yes no)
  9735  		// cond:
  9736  		// result: (NE x yes no)
  9737  		for {
  9738  			v := b.Control
  9739  			if v.Op != OpMIPSSGTUzero {
  9740  				break
  9741  			}
  9742  			x := v.Args[0]
  9743  			yes := b.Succs[0]
  9744  			no := b.Succs[1]
  9745  			b.Kind = BlockMIPSNE
  9746  			b.SetControl(x)
  9747  			_ = yes
  9748  			_ = no
  9749  			return true
  9750  		}
  9751  		// match: (NE (SGTconst [0] x) yes no)
  9752  		// cond:
  9753  		// result: (LTZ x yes no)
  9754  		for {
  9755  			v := b.Control
  9756  			if v.Op != OpMIPSSGTconst {
  9757  				break
  9758  			}
  9759  			if v.AuxInt != 0 {
  9760  				break
  9761  			}
  9762  			x := v.Args[0]
  9763  			yes := b.Succs[0]
  9764  			no := b.Succs[1]
  9765  			b.Kind = BlockMIPSLTZ
  9766  			b.SetControl(x)
  9767  			_ = yes
  9768  			_ = no
  9769  			return true
  9770  		}
  9771  		// match: (NE (SGTzero x) yes no)
  9772  		// cond:
  9773  		// result: (GTZ x yes no)
  9774  		for {
  9775  			v := b.Control
  9776  			if v.Op != OpMIPSSGTzero {
  9777  				break
  9778  			}
  9779  			x := v.Args[0]
  9780  			yes := b.Succs[0]
  9781  			no := b.Succs[1]
  9782  			b.Kind = BlockMIPSGTZ
  9783  			b.SetControl(x)
  9784  			_ = yes
  9785  			_ = no
  9786  			return true
  9787  		}
  9788  		// match: (NE  (MOVWconst [0]) yes no)
  9789  		// cond:
  9790  		// result: (First nil no yes)
  9791  		for {
  9792  			v := b.Control
  9793  			if v.Op != OpMIPSMOVWconst {
  9794  				break
  9795  			}
  9796  			if v.AuxInt != 0 {
  9797  				break
  9798  			}
  9799  			yes := b.Succs[0]
  9800  			no := b.Succs[1]
  9801  			b.Kind = BlockFirst
  9802  			b.SetControl(nil)
  9803  			b.swapSuccessors()
  9804  			_ = no
  9805  			_ = yes
  9806  			return true
  9807  		}
  9808  		// match: (NE  (MOVWconst [c]) yes no)
  9809  		// cond: c != 0
  9810  		// result: (First nil yes no)
  9811  		for {
  9812  			v := b.Control
  9813  			if v.Op != OpMIPSMOVWconst {
  9814  				break
  9815  			}
  9816  			c := v.AuxInt
  9817  			yes := b.Succs[0]
  9818  			no := b.Succs[1]
  9819  			if !(c != 0) {
  9820  				break
  9821  			}
  9822  			b.Kind = BlockFirst
  9823  			b.SetControl(nil)
  9824  			_ = yes
  9825  			_ = no
  9826  			return true
  9827  		}
  9828  	}
  9829  	return false
  9830  }